ngram
listlengths
0
82k
[ "default=480, type=int) args = parser.parse_args() mkdir(args.dest_folder) if (args.width % 32", "Path def _extract_frames(video_path, parent, start=0, sampling_f=1): vidcap = cv2.VideoCapture(video_path) success,", "image) saved += 1 success, image = vidcap.read() # read", "with its name. ''') parser.add_argument('--dest-folder', dest=\"dest_folder\", default='./dataset/', help='''Path where to", "files with extension defined in --extension and save them under", "dest_folder. The frames from each video will be saved under", "parser.add_argument('--sampling', help='how many fps', default='3') parser.add_argument('--run-type', help='train or test', default='train')", "default='./dataset/', help='''Path where to store frames. NB all files in", "cv2 import argparse from utils import * from tqdm import", "from a given video') parser.add_argument('-input', dest=\"input\", required=True, help='''Path to a", "True if you want to save the frames of all", "saved += 1 success, image = vidcap.read() # read next", "type=int) args = parser.parse_args() mkdir(args.dest_folder) if (args.width % 32 !=", "to a single video or a folder. If path to", "\"frame dataset\" from a given video') parser.add_argument('-input', dest=\"input\", required=True, help='''Path", "folder or path to {args.extension} file only') if args.same_folder: start", "if not videos: raise Exception(f'No {args.extension} files in input directory", "folder the algorithm will extract frames from all files with", "args.same_folder: start = 0 dest_folder = str(Path(args.dest_folder) / f'{args.run_type}_frames') mkdir(dest_folder)", "= 0 dest_folder = str(Path(args.dest_folder) / f'{args.run_type}_frames') mkdir(dest_folder) for v", "+= 1 success, image = vidcap.read() # read next print(f'Successfully", "be saved under a folder with its name. ''') parser.add_argument('--dest-folder',", "to store frames. NB all files in this folder will", "!= args.extension: raise ValueError(f'Correct inputs: folder or path to {args.extension}", "% 300 == 0: print('Processing frame: ', count) if count", "if count % 300 == 0: print('Processing frame: ', count)", "(args.width % 32 != 0) or (args.height % 32 !=", "default='mp4') parser.add_argument('-width', help='output width', default=640, type=int) parser.add_argument('-height', help='output height', default=480,", "videos = [v for v in glob(inp)] if not videos:", "1 success, image = vidcap.read() # read next print(f'Successfully saved", "vidcap.read() # read next print(f'Successfully saved {saved} frames to {dest_folder}')", "each video will be saved under a folder with its", "divisible by 32\") if os.path.isdir(args.input): inp = str(Path(args.input) / f'*.{args.extension}')", "from all files with extension defined in --extension and save", "= parser.parse_args() mkdir(args.dest_folder) if (args.width % 32 != 0) or", "height that are divisible by 32\") if os.path.isdir(args.input): inp =", "start parser = argparse.ArgumentParser( description='build a \"frame dataset\" from a", "inp = str(Path(args.input) / f'*.{args.extension}') videos = [v for v", "width and height that are divisible by 32\") if os.path.isdir(args.input):", "[v for v in glob(inp)] if not videos: raise Exception(f'No", "If True frames will be saved in dest_folder/frames.''') parser.add_argument('--sampling', help='how", "image = success, image = vidcap.read() count = -1 saved", "argparse from utils import * from tqdm import tqdm from", "will be removed before adding the new frames''') parser.add_argument('--same-folder', dest=\"same_folder\",", "_, ext = get_filename_extension(args.input) if ext != args.extension: raise ValueError(f'Correct", "help='train or test', default='train') parser.add_argument('--extension', help='avi, mp4, mov...', default='mp4') parser.add_argument('-width',", "pathlib import Path def _extract_frames(video_path, parent, start=0, sampling_f=1): vidcap =", "parser.add_argument('--same-folder', dest=\"same_folder\", default=False, help='''Set it to True if you want", "count) if count % sampling_f == 0: # sampling cv2.imwrite(''.join([dest_folder,", "to the same folder in ascending order going from the", "last frame of the last video. If True frames will", "in --extension and save them under separate folders under dest_folder.", "the frames of all videos to the same folder in", "+ start}.jpg\"]), image) saved += 1 success, image = vidcap.read()", "in dest_folder/frames.''') parser.add_argument('--sampling', help='how many fps', default='3') parser.add_argument('--run-type', help='train or", "= str(Path(args.input) / f'*.{args.extension}') videos = [v for v in", "0 dest_folder = str(Path(args.dest_folder) / f'{args.run_type}_frames') mkdir(dest_folder) for v in", "if not args.same_folder: start = 0 name, _ = get_filename_extension(v)", "folder in ascending order going from the first frame of", "in tqdm(videos): if not args.same_folder: start = 0 name, _", "to the last frame of the last video. If True", "str(Path(args.dest_folder) / name) mkdir(dest_folder) start = _extract_frames(v, dest_folder, start, sampling_f=int(args.sampling))", "be saved in dest_folder/frames.''') parser.add_argument('--sampling', help='how many fps', default='3') parser.add_argument('--run-type',", "help='''Set it to True if you want to save the", "frames from all files with extension defined in --extension and", "help='avi, mp4, mov...', default='mp4') parser.add_argument('-width', help='output width', default=640, type=int) parser.add_argument('-height',", "sampling_f == 0: # sampling cv2.imwrite(''.join([dest_folder, f\"/{count + start}.jpg\"]), image)", "from pathlib import Path def _extract_frames(video_path, parent, start=0, sampling_f=1): vidcap", "before adding the new frames''') parser.add_argument('--same-folder', dest=\"same_folder\", default=False, help='''Set it", "frames from each video will be saved under a folder", "= str(Path(args.dest_folder) / f'{args.run_type}_frames') mkdir(dest_folder) for v in tqdm(videos): if", "0 print(f'Processing: {video_path}') while success: count += 1 if count", "with extension defined in --extension and save them under separate", "ext = get_filename_extension(args.input) if ext != args.extension: raise ValueError(f'Correct inputs:", "import glob from pathlib import Path def _extract_frames(video_path, parent, start=0,", "sampling_f=1): vidcap = cv2.VideoCapture(video_path) success, image = success, image =", "input directory {args.input}') elif os.path.isfile(args.input): _, ext = get_filename_extension(args.input) if", "= -1 saved = 0 print(f'Processing: {video_path}') while success: count", "print(f'Successfully saved {saved} frames to {dest_folder}') return count + start", "(args.height % 32 != 0): raise Exception(\"Please use width and", "start = 0 dest_folder = str(Path(args.dest_folder) / f'{args.run_type}_frames') mkdir(dest_folder) for", "them under separate folders under dest_folder. The frames from each", "= cv2.VideoCapture(video_path) success, image = success, image = vidcap.read() count", "import os import cv2 import argparse from utils import *", "{args.extension} file only') videos = [args.input] else: raise ValueError(f'Correct inputs:", "= 0 name, _ = get_filename_extension(v) dest_folder = str(Path(args.dest_folder) /", "!= 0): raise Exception(\"Please use width and height that are", "folder or path to {args.extension} file only') videos = [args.input]", "count += 1 if count % 300 == 0: print('Processing", "folder will be removed before adding the new frames''') parser.add_argument('--same-folder',", "dest_folder = str(Path(args.dest_folder) / name) mkdir(dest_folder) start = _extract_frames(v, dest_folder,", "in this folder will be removed before adding the new", "first video to the last frame of the last video.", "glob from pathlib import Path def _extract_frames(video_path, parent, start=0, sampling_f=1):", "print('Processing frame: ', count) if count % sampling_f == 0:", "ValueError(f'Correct inputs: folder or path to {args.extension} file only') if", "of the first video to the last frame of the", "start=0, sampling_f=1): vidcap = cv2.VideoCapture(video_path) success, image = success, image", "True frames will be saved in dest_folder/frames.''') parser.add_argument('--sampling', help='how many", "# sampling cv2.imwrite(''.join([dest_folder, f\"/{count + start}.jpg\"]), image) saved += 1", "first frame of the first video to the last frame", "use width and height that are divisible by 32\") if", "new frames''') parser.add_argument('--same-folder', dest=\"same_folder\", default=False, help='''Set it to True if", "be removed before adding the new frames''') parser.add_argument('--same-folder', dest=\"same_folder\", default=False,", "The frames from each video will be saved under a", "[args.input] else: raise ValueError(f'Correct inputs: folder or path to {args.extension}", "0: print('Processing frame: ', count) if count % sampling_f ==", "under a folder with its name. ''') parser.add_argument('--dest-folder', dest=\"dest_folder\", default='./dataset/',", "% sampling_f == 0: # sampling cv2.imwrite(''.join([dest_folder, f\"/{count + start}.jpg\"]),", "store frames. NB all files in this folder will be", "parser = argparse.ArgumentParser( description='build a \"frame dataset\" from a given", "if ext != args.extension: raise ValueError(f'Correct inputs: folder or path", "inputs: folder or path to {args.extension} file only') if args.same_folder:", "from tqdm import tqdm from glob import glob from pathlib", "and save them under separate folders under dest_folder. The frames", "frames. NB all files in this folder will be removed", "help='how many fps', default='3') parser.add_argument('--run-type', help='train or test', default='train') parser.add_argument('--extension',", "help='''Path where to store frames. NB all files in this", "frame of the last video. If True frames will be", "in glob(inp)] if not videos: raise Exception(f'No {args.extension} files in", "path to {args.extension} file only') if args.same_folder: start = 0", "== 0: # sampling cv2.imwrite(''.join([dest_folder, f\"/{count + start}.jpg\"]), image) saved", "only') videos = [args.input] else: raise ValueError(f'Correct inputs: folder or", "test', default='train') parser.add_argument('--extension', help='avi, mp4, mov...', default='mp4') parser.add_argument('-width', help='output width',", "/ f'*.{args.extension}') videos = [v for v in glob(inp)] if", "that are divisible by 32\") if os.path.isdir(args.input): inp = str(Path(args.input)", "a folder with its name. ''') parser.add_argument('--dest-folder', dest=\"dest_folder\", default='./dataset/', help='''Path", "success, image = success, image = vidcap.read() count = -1", "0 name, _ = get_filename_extension(v) dest_folder = str(Path(args.dest_folder) / name)", "32 != 0): raise Exception(\"Please use width and height that", "from glob import glob from pathlib import Path def _extract_frames(video_path,", "% 32 != 0) or (args.height % 32 != 0):", "raise Exception(f'No {args.extension} files in input directory {args.input}') elif os.path.isfile(args.input):", "dest=\"same_folder\", default=False, help='''Set it to True if you want to", "glob(inp)] if not videos: raise Exception(f'No {args.extension} files in input", "order going from the first frame of the first video", "tqdm(videos): if not args.same_folder: start = 0 name, _ =", "ascending order going from the first frame of the first", "v in glob(inp)] if not videos: raise Exception(f'No {args.extension} files", "will extract frames from all files with extension defined in", "its name. ''') parser.add_argument('--dest-folder', dest=\"dest_folder\", default='./dataset/', help='''Path where to store", "frame: ', count) if count % sampling_f == 0: #", "save the frames of all videos to the same folder", "all files in this folder will be removed before adding", "the last video. If True frames will be saved in", "/ f'{args.run_type}_frames') mkdir(dest_folder) for v in tqdm(videos): if not args.same_folder:", "str(Path(args.dest_folder) / f'{args.run_type}_frames') mkdir(dest_folder) for v in tqdm(videos): if not", "algorithm will extract frames from all files with extension defined", "{saved} frames to {dest_folder}') return count + start parser =", "= argparse.ArgumentParser( description='build a \"frame dataset\" from a given video')", "get_filename_extension(args.input) if ext != args.extension: raise ValueError(f'Correct inputs: folder or", "Exception(f'No {args.extension} files in input directory {args.input}') elif os.path.isfile(args.input): _,", "under separate folders under dest_folder. The frames from each video", "success: count += 1 if count % 300 == 0:", "a \"frame dataset\" from a given video') parser.add_argument('-input', dest=\"input\", required=True,", "file only') videos = [args.input] else: raise ValueError(f'Correct inputs: folder", "raise ValueError(f'Correct inputs: folder or path to {args.extension} file only')", "a given video') parser.add_argument('-input', dest=\"input\", required=True, help='''Path to a single", "will be saved under a folder with its name. ''')", "to {args.extension} file only') if args.same_folder: start = 0 dest_folder", "same folder in ascending order going from the first frame", "os.path.isdir(args.input): inp = str(Path(args.input) / f'*.{args.extension}') videos = [v for", "frames to {dest_folder}') return count + start parser = argparse.ArgumentParser(", "-1 saved = 0 print(f'Processing: {video_path}') while success: count +=", "files in this folder will be removed before adding the", "NB all files in this folder will be removed before", "or test', default='train') parser.add_argument('--extension', help='avi, mp4, mov...', default='mp4') parser.add_argument('-width', help='output", "str(Path(args.input) / f'*.{args.extension}') videos = [v for v in glob(inp)]", "v in tqdm(videos): if not args.same_folder: start = 0 name,", "+ start parser = argparse.ArgumentParser( description='build a \"frame dataset\" from", "and height that are divisible by 32\") if os.path.isdir(args.input): inp", "= [args.input] else: raise ValueError(f'Correct inputs: folder or path to", "videos to the same folder in ascending order going from", "mkdir(dest_folder) for v in tqdm(videos): if not args.same_folder: start =", "args.same_folder: start = 0 name, _ = get_filename_extension(v) dest_folder =", "path to folder the algorithm will extract frames from all", "saved under a folder with its name. ''') parser.add_argument('--dest-folder', dest=\"dest_folder\",", "elif os.path.isfile(args.input): _, ext = get_filename_extension(args.input) if ext != args.extension:", "parser.add_argument('--dest-folder', dest=\"dest_folder\", default='./dataset/', help='''Path where to store frames. NB all", "= success, image = vidcap.read() count = -1 saved =", "folders under dest_folder. The frames from each video will be", "parser.parse_args() mkdir(args.dest_folder) if (args.width % 32 != 0) or (args.height", "the new frames''') parser.add_argument('--same-folder', dest=\"same_folder\", default=False, help='''Set it to True", "from utils import * from tqdm import tqdm from glob", "if os.path.isdir(args.input): inp = str(Path(args.input) / f'*.{args.extension}') videos = [v", "_extract_frames(video_path, parent, start=0, sampling_f=1): vidcap = cv2.VideoCapture(video_path) success, image =", "going from the first frame of the first video to", "from the first frame of the first video to the", "count + start parser = argparse.ArgumentParser( description='build a \"frame dataset\"", "all files with extension defined in --extension and save them", "tqdm import tqdm from glob import glob from pathlib import", "== 0: print('Processing frame: ', count) if count % sampling_f", "under dest_folder. The frames from each video will be saved", "_ = get_filename_extension(v) dest_folder = str(Path(args.dest_folder) / name) mkdir(dest_folder) start", "success, image = vidcap.read() count = -1 saved = 0", "os import cv2 import argparse from utils import * from", "if args.same_folder: start = 0 dest_folder = str(Path(args.dest_folder) / f'{args.run_type}_frames')", "video to the last frame of the last video. If", "vidcap.read() count = -1 saved = 0 print(f'Processing: {video_path}') while", "= [v for v in glob(inp)] if not videos: raise", "inputs: folder or path to {args.extension} file only') videos =", "saved = 0 print(f'Processing: {video_path}') while success: count += 1", "parser.add_argument('--extension', help='avi, mp4, mov...', default='mp4') parser.add_argument('-width', help='output width', default=640, type=int)", "import * from tqdm import tqdm from glob import glob", "image = vidcap.read() # read next print(f'Successfully saved {saved} frames", "default=False, help='''Set it to True if you want to save", "video or a folder. If path to folder the algorithm", "read next print(f'Successfully saved {saved} frames to {dest_folder}') return count", "parser.add_argument('-input', dest=\"input\", required=True, help='''Path to a single video or a", "If path to folder the algorithm will extract frames from", "type=int) parser.add_argument('-height', help='output height', default=480, type=int) args = parser.parse_args() mkdir(args.dest_folder)", "{args.extension} files in input directory {args.input}') elif os.path.isfile(args.input): _, ext", "will be saved in dest_folder/frames.''') parser.add_argument('--sampling', help='how many fps', default='3')", "= 0 print(f'Processing: {video_path}') while success: count += 1 if", "frames will be saved in dest_folder/frames.''') parser.add_argument('--sampling', help='how many fps',", "+= 1 if count % 300 == 0: print('Processing frame:", "= str(Path(args.dest_folder) / name) mkdir(dest_folder) start = _extract_frames(v, dest_folder, start,", "start = 0 name, _ = get_filename_extension(v) dest_folder = str(Path(args.dest_folder)", "frame of the first video to the last frame of", "if (args.width % 32 != 0) or (args.height % 32", "the first video to the last frame of the last", "where to store frames. NB all files in this folder", "to True if you want to save the frames of", "extension defined in --extension and save them under separate folders", "def _extract_frames(video_path, parent, start=0, sampling_f=1): vidcap = cv2.VideoCapture(video_path) success, image", "ext != args.extension: raise ValueError(f'Correct inputs: folder or path to", "many fps', default='3') parser.add_argument('--run-type', help='train or test', default='train') parser.add_argument('--extension', help='avi,", "{args.extension} file only') if args.same_folder: start = 0 dest_folder =", "frames''') parser.add_argument('--same-folder', dest=\"same_folder\", default=False, help='''Set it to True if you", "parent, start=0, sampling_f=1): vidcap = cv2.VideoCapture(video_path) success, image = success,", "only') if args.same_folder: start = 0 dest_folder = str(Path(args.dest_folder) /", "default='train') parser.add_argument('--extension', help='avi, mp4, mov...', default='mp4') parser.add_argument('-width', help='output width', default=640,", "parser.add_argument('-width', help='output width', default=640, type=int) parser.add_argument('-height', help='output height', default=480, type=int)", "raise Exception(\"Please use width and height that are divisible by", "files in input directory {args.input}') elif os.path.isfile(args.input): _, ext =", "height', default=480, type=int) args = parser.parse_args() mkdir(args.dest_folder) if (args.width %", "sampling cv2.imwrite(''.join([dest_folder, f\"/{count + start}.jpg\"]), image) saved += 1 success,", "removed before adding the new frames''') parser.add_argument('--same-folder', dest=\"same_folder\", default=False, help='''Set", "dest=\"input\", required=True, help='''Path to a single video or a folder.", "Exception(\"Please use width and height that are divisible by 32\")", "1 if count % 300 == 0: print('Processing frame: ',", "import argparse from utils import * from tqdm import tqdm", "the first frame of the first video to the last", "import cv2 import argparse from utils import * from tqdm", "saved {saved} frames to {dest_folder}') return count + start parser", "import tqdm from glob import glob from pathlib import Path", "<filename>Unsupervised/pix2pixHD/extract_frames.py import os import cv2 import argparse from utils import", "import Path def _extract_frames(video_path, parent, start=0, sampling_f=1): vidcap = cv2.VideoCapture(video_path)", "name, _ = get_filename_extension(v) dest_folder = str(Path(args.dest_folder) / name) mkdir(dest_folder)", "folder with its name. ''') parser.add_argument('--dest-folder', dest=\"dest_folder\", default='./dataset/', help='''Path where", "to {args.extension} file only') videos = [args.input] else: raise ValueError(f'Correct", "dataset\" from a given video') parser.add_argument('-input', dest=\"input\", required=True, help='''Path to", "folder. If path to folder the algorithm will extract frames", "video. If True frames will be saved in dest_folder/frames.''') parser.add_argument('--sampling',", "for v in tqdm(videos): if not args.same_folder: start = 0", "it to True if you want to save the frames", "count % 300 == 0: print('Processing frame: ', count) if", "the same folder in ascending order going from the first", "help='''Path to a single video or a folder. If path", "help='output width', default=640, type=int) parser.add_argument('-height', help='output height', default=480, type=int) args", "0): raise Exception(\"Please use width and height that are divisible", "name. ''') parser.add_argument('--dest-folder', dest=\"dest_folder\", default='./dataset/', help='''Path where to store frames.", "ValueError(f'Correct inputs: folder or path to {args.extension} file only') videos", "cv2.imwrite(''.join([dest_folder, f\"/{count + start}.jpg\"]), image) saved += 1 success, image", "in input directory {args.input}') elif os.path.isfile(args.input): _, ext = get_filename_extension(args.input)", "= vidcap.read() count = -1 saved = 0 print(f'Processing: {video_path}')", "cv2.VideoCapture(video_path) success, image = success, image = vidcap.read() count =", "or a folder. If path to folder the algorithm will", "description='build a \"frame dataset\" from a given video') parser.add_argument('-input', dest=\"input\",", "next print(f'Successfully saved {saved} frames to {dest_folder}') return count +", "frames of all videos to the same folder in ascending", "default=640, type=int) parser.add_argument('-height', help='output height', default=480, type=int) args = parser.parse_args()", "the last frame of the last video. If True frames", "single video or a folder. If path to folder the", "or path to {args.extension} file only') videos = [args.input] else:", "= get_filename_extension(args.input) if ext != args.extension: raise ValueError(f'Correct inputs: folder", "return count + start parser = argparse.ArgumentParser( description='build a \"frame", "f'*.{args.extension}') videos = [v for v in glob(inp)] if not", "300 == 0: print('Processing frame: ', count) if count %", "videos = [args.input] else: raise ValueError(f'Correct inputs: folder or path", "--extension and save them under separate folders under dest_folder. The", "videos: raise Exception(f'No {args.extension} files in input directory {args.input}') elif", "while success: count += 1 if count % 300 ==", "', count) if count % sampling_f == 0: # sampling", "default='3') parser.add_argument('--run-type', help='train or test', default='train') parser.add_argument('--extension', help='avi, mp4, mov...',", "glob import glob from pathlib import Path def _extract_frames(video_path, parent,", "separate folders under dest_folder. The frames from each video will", "{args.input}') elif os.path.isfile(args.input): _, ext = get_filename_extension(args.input) if ext !=", "adding the new frames''') parser.add_argument('--same-folder', dest=\"same_folder\", default=False, help='''Set it to", "you want to save the frames of all videos to", "or path to {args.extension} file only') if args.same_folder: start =", "args = parser.parse_args() mkdir(args.dest_folder) if (args.width % 32 != 0)", "= get_filename_extension(v) dest_folder = str(Path(args.dest_folder) / name) mkdir(dest_folder) start =", "of all videos to the same folder in ascending order", "mov...', default='mp4') parser.add_argument('-width', help='output width', default=640, type=int) parser.add_argument('-height', help='output height',", "get_filename_extension(v) dest_folder = str(Path(args.dest_folder) / name) mkdir(dest_folder) start = _extract_frames(v,", "args.extension: raise ValueError(f'Correct inputs: folder or path to {args.extension} file", "to {dest_folder}') return count + start parser = argparse.ArgumentParser( description='build", "0) or (args.height % 32 != 0): raise Exception(\"Please use", "given video') parser.add_argument('-input', dest=\"input\", required=True, help='''Path to a single video", "{dest_folder}') return count + start parser = argparse.ArgumentParser( description='build a", "''') parser.add_argument('--dest-folder', dest=\"dest_folder\", default='./dataset/', help='''Path where to store frames. NB", "by 32\") if os.path.isdir(args.input): inp = str(Path(args.input) / f'*.{args.extension}') videos", "mkdir(args.dest_folder) if (args.width % 32 != 0) or (args.height %", "# read next print(f'Successfully saved {saved} frames to {dest_folder}') return", "if you want to save the frames of all videos", "width', default=640, type=int) parser.add_argument('-height', help='output height', default=480, type=int) args =", "32 != 0) or (args.height % 32 != 0): raise", "a folder. If path to folder the algorithm will extract", "if count % sampling_f == 0: # sampling cv2.imwrite(''.join([dest_folder, f\"/{count", "mp4, mov...', default='mp4') parser.add_argument('-width', help='output width', default=640, type=int) parser.add_argument('-height', help='output", "* from tqdm import tqdm from glob import glob from", "argparse.ArgumentParser( description='build a \"frame dataset\" from a given video') parser.add_argument('-input',", "the algorithm will extract frames from all files with extension", "utils import * from tqdm import tqdm from glob import", "dest=\"dest_folder\", default='./dataset/', help='''Path where to store frames. NB all files", "0: # sampling cv2.imwrite(''.join([dest_folder, f\"/{count + start}.jpg\"]), image) saved +=", "saved in dest_folder/frames.''') parser.add_argument('--sampling', help='how many fps', default='3') parser.add_argument('--run-type', help='train", "from each video will be saved under a folder with", "to folder the algorithm will extract frames from all files", "last video. If True frames will be saved in dest_folder/frames.''')", "of the last video. If True frames will be saved", "f\"/{count + start}.jpg\"]), image) saved += 1 success, image =", "32\") if os.path.isdir(args.input): inp = str(Path(args.input) / f'*.{args.extension}') videos =", "for v in glob(inp)] if not videos: raise Exception(f'No {args.extension}", "want to save the frames of all videos to the", "print(f'Processing: {video_path}') while success: count += 1 if count %", "extract frames from all files with extension defined in --extension", "video will be saved under a folder with its name.", "help='output height', default=480, type=int) args = parser.parse_args() mkdir(args.dest_folder) if (args.width", "save them under separate folders under dest_folder. The frames from", "path to {args.extension} file only') videos = [args.input] else: raise", "video') parser.add_argument('-input', dest=\"input\", required=True, help='''Path to a single video or", "success, image = vidcap.read() # read next print(f'Successfully saved {saved}", "required=True, help='''Path to a single video or a folder. If", "not args.same_folder: start = 0 name, _ = get_filename_extension(v) dest_folder", "dest_folder/frames.''') parser.add_argument('--sampling', help='how many fps', default='3') parser.add_argument('--run-type', help='train or test',", "this folder will be removed before adding the new frames''')", "% 32 != 0): raise Exception(\"Please use width and height", "os.path.isfile(args.input): _, ext = get_filename_extension(args.input) if ext != args.extension: raise", "all videos to the same folder in ascending order going", "a single video or a folder. If path to folder", "= vidcap.read() # read next print(f'Successfully saved {saved} frames to", "f'{args.run_type}_frames') mkdir(dest_folder) for v in tqdm(videos): if not args.same_folder: start", "file only') if args.same_folder: start = 0 dest_folder = str(Path(args.dest_folder)", "tqdm from glob import glob from pathlib import Path def", "count = -1 saved = 0 print(f'Processing: {video_path}') while success:", "defined in --extension and save them under separate folders under", "vidcap = cv2.VideoCapture(video_path) success, image = success, image = vidcap.read()", "count % sampling_f == 0: # sampling cv2.imwrite(''.join([dest_folder, f\"/{count +", "are divisible by 32\") if os.path.isdir(args.input): inp = str(Path(args.input) /", "!= 0) or (args.height % 32 != 0): raise Exception(\"Please", "or (args.height % 32 != 0): raise Exception(\"Please use width", "image = vidcap.read() count = -1 saved = 0 print(f'Processing:", "else: raise ValueError(f'Correct inputs: folder or path to {args.extension} file", "in ascending order going from the first frame of the", "directory {args.input}') elif os.path.isfile(args.input): _, ext = get_filename_extension(args.input) if ext", "parser.add_argument('--run-type', help='train or test', default='train') parser.add_argument('--extension', help='avi, mp4, mov...', default='mp4')", "dest_folder = str(Path(args.dest_folder) / f'{args.run_type}_frames') mkdir(dest_folder) for v in tqdm(videos):", "start}.jpg\"]), image) saved += 1 success, image = vidcap.read() #", "to save the frames of all videos to the same", "{video_path}') while success: count += 1 if count % 300", "not videos: raise Exception(f'No {args.extension} files in input directory {args.input}')", "fps', default='3') parser.add_argument('--run-type', help='train or test', default='train') parser.add_argument('--extension', help='avi, mp4,", "parser.add_argument('-height', help='output height', default=480, type=int) args = parser.parse_args() mkdir(args.dest_folder) if" ]
[ "using utils headers = self.utils.getHeaders(responseInfo) # overwrite the Content-Type header.", "class BurpExtender(IBurpExtender, IHttpListener): # implement IBurpExtender # set everything up", "register an HTTP listener callbacks.registerHttpListener(self) # # implement IHttpListener #", "callbacks.registerHttpListener(self) # # implement IHttpListener # def processHttpMessage(self, toolFlag, messageIsRequest,", "adds the header if it # does not exist. headers.overwrite(\"Content-Type\",", "def processHttpMessage(self, toolFlag, messageIsRequest, messageInfo): # only process responses if", "does not exist. headers.overwrite(\"Content-Type\", \"text/css; charset=UTF-8\") # put everything back", "a directory and add filter-options.py to Burp. Nees Jython. #", "import FixBurpExceptions import sys except ImportError: pass # support for", "responses if messageIsRequest: return # now we only have responses", "directory and add filter-options.py to Burp. Nees Jython. # Blog", "burp-exceptions try: sys.stdout = callbacks.getStdout() except: pass # set our", "redirector\" # https://raw.githubusercontent.com/PortSwigger/example-traffic-redirector/master/python/TrafficRedirector.py # Idea: https://github.com/pajswigger/filter-options/blob/master/src/filter-options.kt # Usage: Put both", "processHttpMessage(self, toolFlag, messageIsRequest, messageInfo): # only process responses if messageIsRequest:", "pass # set our extension name callbacks.setExtensionName(\"Filter OPTIONS\") # register", "back together bodyBytes = self.utils.getBody(messageIsRequest, messageInfo) # Debug # rawHeaders", "# return if the request method was not OPTIONS if", "set modified message response self.utils.setRequestResponse(messageIsRequest, modifiedmsg, messageInfo) # this should", "Content-Type header. Overwrite adds the header if it # does", "\"--------\" return # support for burp-exceptions try: FixBurpExceptions() except: pass", "only have responses # get the request associated with the", "# https://raw.githubusercontent.com/PortSwigger/example-traffic-redirector/master/python/TrafficRedirector.py # Idea: https://github.com/pajswigger/filter-options/blob/master/src/filter-options.kt # Usage: Put both files", "burp import IHttpListener class BurpExtender(IBurpExtender, IHttpListener): # implement IBurpExtender #", "# obtain an extension helpers object self.utils = BurpUtils(callbacks.getHelpers()) #", "header. Overwrite adds the header if it # does not", "# modified \"example traffic redirector\" # https://raw.githubusercontent.com/PortSwigger/example-traffic-redirector/master/python/TrafficRedirector.py # Idea: https://github.com/pajswigger/filter-options/blob/master/src/filter-options.kt", "both files in a directory and add filter-options.py to Burp.", "extension helpers object self.utils = BurpUtils(callbacks.getHelpers()) # support for burp-exceptions", "# set our extension name callbacks.setExtensionName(\"Filter OPTIONS\") # register an", "add filter-options.py to Burp. Nees Jython. # Blog post: https://parsiya.net/blog/2019-04-06-hiding-options-an-adventure-in-dealing-with-burp-proxy-in-an-extension/", "burputils import BurpUtils except ImportError: pass from burp import IBurpExtender", "= headers.exportRaw() # build message modifiedmsg = self.utils.burpHelper.buildHttpMessage(headers.exportRaw(), bodyBytes) #", "modified message response self.utils.setRequestResponse(messageIsRequest, modifiedmsg, messageInfo) # this should be", "only process responses if messageIsRequest: return # now we only", "message response self.utils.setRequestResponse(messageIsRequest, modifiedmsg, messageInfo) # this should be reflected", "should be reflected in response tab # done print \"--------\"", "messageInfo) # get headers using utils headers = self.utils.getHeaders(responseInfo) #", "was not OPTIONS if requestInfo.getMethod() != \"OPTIONS\": return # get", "have responses # get the request associated with the response", "Burp. Nees Jython. # Blog post: https://parsiya.net/blog/2019-04-06-hiding-options-an-adventure-in-dealing-with-burp-proxy-in-an-extension/ # support for", "if requestInfo.getMethod() != \"OPTIONS\": return # get response info responseInfo", "https://github.com/pajswigger/filter-options/blob/master/src/filter-options.kt # Usage: Put both files in a directory and", "- https://github.com/parsiya/burputils try: from burputils import BurpUtils except ImportError: pass", "BurpUtils except ImportError: pass from burp import IBurpExtender from burp", "name callbacks.setExtensionName(\"Filter OPTIONS\") # register an HTTP listener callbacks.registerHttpListener(self) #", "IBurpExtender # set everything up def registerExtenderCallbacks(self, callbacks): # obtain", "https://raw.githubusercontent.com/PortSwigger/example-traffic-redirector/master/python/TrafficRedirector.py # Idea: https://github.com/pajswigger/filter-options/blob/master/src/filter-options.kt # Usage: Put both files in", "# set modified message response self.utils.setRequestResponse(messageIsRequest, modifiedmsg, messageInfo) # this", "self.utils.getBody(messageIsRequest, messageInfo) # Debug # rawHeaders = headers.exportRaw() # build", "# implement IBurpExtender # set everything up def registerExtenderCallbacks(self, callbacks):", "the request method was not OPTIONS if requestInfo.getMethod() != \"OPTIONS\":", "sys.stdout = callbacks.getStdout() except: pass # set our extension name", "self.utils.burpHelper.buildHttpMessage(headers.exportRaw(), bodyBytes) # set modified message response self.utils.setRequestResponse(messageIsRequest, modifiedmsg, messageInfo)", "# this should be reflected in response tab # done", "except: pass # set our extension name callbacks.setExtensionName(\"Filter OPTIONS\") #", "headers using utils headers = self.utils.getHeaders(responseInfo) # overwrite the Content-Type", "traffic redirector\" # https://raw.githubusercontent.com/PortSwigger/example-traffic-redirector/master/python/TrafficRedirector.py # Idea: https://github.com/pajswigger/filter-options/blob/master/src/filter-options.kt # Usage: Put", "return if the request method was not OPTIONS if requestInfo.getMethod()", "OPTIONS\") # register an HTTP listener callbacks.registerHttpListener(self) # # implement", "self.utils.setRequestResponse(messageIsRequest, modifiedmsg, messageInfo) # this should be reflected in response", "burputils - https://github.com/parsiya/burputils try: from burputils import BurpUtils except ImportError:", "implement IBurpExtender # set everything up def registerExtenderCallbacks(self, callbacks): #", "messageIsRequest, messageInfo): # only process responses if messageIsRequest: return #", "get response info responseInfo = self.utils.getInfo(False, messageInfo) # get headers", "response self.utils.setRequestResponse(messageIsRequest, modifiedmsg, messageInfo) # this should be reflected in", "set our extension name callbacks.setExtensionName(\"Filter OPTIONS\") # register an HTTP", "IBurpExtender from burp import IHttpListener class BurpExtender(IBurpExtender, IHttpListener): # implement", "messageInfo) # Debug # rawHeaders = headers.exportRaw() # build message", "an extension helpers object self.utils = BurpUtils(callbacks.getHelpers()) # support for", "# get the request associated with the response requestInfo =", "try: from exceptions_fix import FixBurpExceptions import sys except ImportError: pass", "# implement IHttpListener # def processHttpMessage(self, toolFlag, messageIsRequest, messageInfo): #", "modifiedmsg = self.utils.burpHelper.buildHttpMessage(headers.exportRaw(), bodyBytes) # set modified message response self.utils.setRequestResponse(messageIsRequest,", "set everything up def registerExtenderCallbacks(self, callbacks): # obtain an extension", "this should be reflected in response tab # done print", "the response requestInfo = self.utils.getInfo(True, messageInfo) # return if the", "request associated with the response requestInfo = self.utils.getInfo(True, messageInfo) #", "IHttpListener): # implement IBurpExtender # set everything up def registerExtenderCallbacks(self,", "bodyBytes = self.utils.getBody(messageIsRequest, messageInfo) # Debug # rawHeaders = headers.exportRaw()", "associated with the response requestInfo = self.utils.getInfo(True, messageInfo) # return", "= self.utils.getInfo(False, messageInfo) # get headers using utils headers =", "bodyBytes) # set modified message response self.utils.setRequestResponse(messageIsRequest, modifiedmsg, messageInfo) #", "# # implement IHttpListener # def processHttpMessage(self, toolFlag, messageIsRequest, messageInfo):", "if it # does not exist. headers.overwrite(\"Content-Type\", \"text/css; charset=UTF-8\") #", "burp import IBurpExtender from burp import IHttpListener class BurpExtender(IBurpExtender, IHttpListener):", "Jython. # Blog post: https://parsiya.net/blog/2019-04-06-hiding-options-an-adventure-in-dealing-with-burp-proxy-in-an-extension/ # support for burp-exceptions -", "registerExtenderCallbacks(self, callbacks): # obtain an extension helpers object self.utils =", "# register an HTTP listener callbacks.registerHttpListener(self) # # implement IHttpListener", "self.utils.getHeaders(responseInfo) # overwrite the Content-Type header. Overwrite adds the header", "= self.utils.getHeaders(responseInfo) # overwrite the Content-Type header. Overwrite adds the", "BurpUtils(callbacks.getHelpers()) # support for burp-exceptions try: sys.stdout = callbacks.getStdout() except:", "messageInfo): # only process responses if messageIsRequest: return # now", "= BurpUtils(callbacks.getHelpers()) # support for burp-exceptions try: sys.stdout = callbacks.getStdout()", "if messageIsRequest: return # now we only have responses #", "responseInfo = self.utils.getInfo(False, messageInfo) # get headers using utils headers", "# done print \"--------\" return # support for burp-exceptions try:", "try: from burputils import BurpUtils except ImportError: pass from burp", "build message modifiedmsg = self.utils.burpHelper.buildHttpMessage(headers.exportRaw(), bodyBytes) # set modified message", "# set everything up def registerExtenderCallbacks(self, callbacks): # obtain an", "modified \"example traffic redirector\" # https://raw.githubusercontent.com/PortSwigger/example-traffic-redirector/master/python/TrafficRedirector.py # Idea: https://github.com/pajswigger/filter-options/blob/master/src/filter-options.kt #", "see https://github.com/securityMB/burp-exceptions try: from exceptions_fix import FixBurpExceptions import sys except", "for burputils - https://github.com/parsiya/burputils try: from burputils import BurpUtils except", "# overwrite the Content-Type header. Overwrite adds the header if", "# Blog post: https://parsiya.net/blog/2019-04-06-hiding-options-an-adventure-in-dealing-with-burp-proxy-in-an-extension/ # support for burp-exceptions - see", "# get response info responseInfo = self.utils.getInfo(False, messageInfo) # get", "# Usage: Put both files in a directory and add", "if the request method was not OPTIONS if requestInfo.getMethod() !=", "return # now we only have responses # get the", "helpers object self.utils = BurpUtils(callbacks.getHelpers()) # support for burp-exceptions try:", "# support for burp-exceptions - see https://github.com/securityMB/burp-exceptions try: from exceptions_fix", "except ImportError: pass from burp import IBurpExtender from burp import", "requestInfo = self.utils.getInfo(True, messageInfo) # return if the request method", "not exist. headers.overwrite(\"Content-Type\", \"text/css; charset=UTF-8\") # put everything back together", "response tab # done print \"--------\" return # support for", "# support for burputils - https://github.com/parsiya/burputils try: from burputils import", "import BurpUtils except ImportError: pass from burp import IBurpExtender from", "header if it # does not exist. headers.overwrite(\"Content-Type\", \"text/css; charset=UTF-8\")", "object self.utils = BurpUtils(callbacks.getHelpers()) # support for burp-exceptions try: sys.stdout", "https://github.com/securityMB/burp-exceptions try: from exceptions_fix import FixBurpExceptions import sys except ImportError:", "# def processHttpMessage(self, toolFlag, messageIsRequest, messageInfo): # only process responses", "charset=UTF-8\") # put everything back together bodyBytes = self.utils.getBody(messageIsRequest, messageInfo)", "requestInfo.getMethod() != \"OPTIONS\": return # get response info responseInfo =", "get the request associated with the response requestInfo = self.utils.getInfo(True,", "\"OPTIONS\": return # get response info responseInfo = self.utils.getInfo(False, messageInfo)", "\"text/css; charset=UTF-8\") # put everything back together bodyBytes = self.utils.getBody(messageIsRequest,", "def registerExtenderCallbacks(self, callbacks): # obtain an extension helpers object self.utils", "try: sys.stdout = callbacks.getStdout() except: pass # set our extension", "FixBurpExceptions import sys except ImportError: pass # support for burputils", "https://parsiya.net/blog/2019-04-06-hiding-options-an-adventure-in-dealing-with-burp-proxy-in-an-extension/ # support for burp-exceptions - see https://github.com/securityMB/burp-exceptions try: from", "sys except ImportError: pass # support for burputils - https://github.com/parsiya/burputils", "from burp import IBurpExtender from burp import IHttpListener class BurpExtender(IBurpExtender,", "!= \"OPTIONS\": return # get response info responseInfo = self.utils.getInfo(False,", "# rawHeaders = headers.exportRaw() # build message modifiedmsg = self.utils.burpHelper.buildHttpMessage(headers.exportRaw(),", "from burp import IHttpListener class BurpExtender(IBurpExtender, IHttpListener): # implement IBurpExtender", "import IBurpExtender from burp import IHttpListener class BurpExtender(IBurpExtender, IHttpListener): #", "response requestInfo = self.utils.getInfo(True, messageInfo) # return if the request", "# Idea: https://github.com/pajswigger/filter-options/blob/master/src/filter-options.kt # Usage: Put both files in a", "reflected in response tab # done print \"--------\" return #", "tab # done print \"--------\" return # support for burp-exceptions", "our extension name callbacks.setExtensionName(\"Filter OPTIONS\") # register an HTTP listener", "except ImportError: pass # support for burputils - https://github.com/parsiya/burputils try:", "implement IHttpListener # def processHttpMessage(self, toolFlag, messageIsRequest, messageInfo): # only", "pass # support for burputils - https://github.com/parsiya/burputils try: from burputils", "= self.utils.getBody(messageIsRequest, messageInfo) # Debug # rawHeaders = headers.exportRaw() #", "the header if it # does not exist. headers.overwrite(\"Content-Type\", \"text/css;", "# does not exist. headers.overwrite(\"Content-Type\", \"text/css; charset=UTF-8\") # put everything", "everything up def registerExtenderCallbacks(self, callbacks): # obtain an extension helpers", "# Debug # rawHeaders = headers.exportRaw() # build message modifiedmsg", "messageInfo) # return if the request method was not OPTIONS", "headers.exportRaw() # build message modifiedmsg = self.utils.burpHelper.buildHttpMessage(headers.exportRaw(), bodyBytes) # set", "callbacks): # obtain an extension helpers object self.utils = BurpUtils(callbacks.getHelpers())", "IHttpListener # def processHttpMessage(self, toolFlag, messageIsRequest, messageInfo): # only process", "method was not OPTIONS if requestInfo.getMethod() != \"OPTIONS\": return #", "return # get response info responseInfo = self.utils.getInfo(False, messageInfo) #", "= self.utils.getInfo(True, messageInfo) # return if the request method was", "# support for burp-exceptions try: sys.stdout = callbacks.getStdout() except: pass", "obtain an extension helpers object self.utils = BurpUtils(callbacks.getHelpers()) # support", "extension name callbacks.setExtensionName(\"Filter OPTIONS\") # register an HTTP listener callbacks.registerHttpListener(self)", "= self.utils.burpHelper.buildHttpMessage(headers.exportRaw(), bodyBytes) # set modified message response self.utils.setRequestResponse(messageIsRequest, modifiedmsg,", "in response tab # done print \"--------\" return # support", "Put both files in a directory and add filter-options.py to", "done print \"--------\" return # support for burp-exceptions try: FixBurpExceptions()", "BurpExtender(IBurpExtender, IHttpListener): # implement IBurpExtender # set everything up def", "get headers using utils headers = self.utils.getHeaders(responseInfo) # overwrite the", "import IHttpListener class BurpExtender(IBurpExtender, IHttpListener): # implement IBurpExtender # set", "pass from burp import IBurpExtender from burp import IHttpListener class", "from burputils import BurpUtils except ImportError: pass from burp import", "messageInfo) # this should be reflected in response tab #", "support for burp-exceptions try: sys.stdout = callbacks.getStdout() except: pass #", "Usage: Put both files in a directory and add filter-options.py", "# put everything back together bodyBytes = self.utils.getBody(messageIsRequest, messageInfo) #", "utils headers = self.utils.getHeaders(responseInfo) # overwrite the Content-Type header. Overwrite", "process responses if messageIsRequest: return # now we only have", "it # does not exist. headers.overwrite(\"Content-Type\", \"text/css; charset=UTF-8\") # put", "support for burp-exceptions - see https://github.com/securityMB/burp-exceptions try: from exceptions_fix import", "self.utils = BurpUtils(callbacks.getHelpers()) # support for burp-exceptions try: sys.stdout =", "- see https://github.com/securityMB/burp-exceptions try: from exceptions_fix import FixBurpExceptions import sys", "\"example traffic redirector\" # https://raw.githubusercontent.com/PortSwigger/example-traffic-redirector/master/python/TrafficRedirector.py # Idea: https://github.com/pajswigger/filter-options/blob/master/src/filter-options.kt # Usage:", "headers = self.utils.getHeaders(responseInfo) # overwrite the Content-Type header. Overwrite adds", "exist. headers.overwrite(\"Content-Type\", \"text/css; charset=UTF-8\") # put everything back together bodyBytes", "the request associated with the response requestInfo = self.utils.getInfo(True, messageInfo)", "Nees Jython. # Blog post: https://parsiya.net/blog/2019-04-06-hiding-options-an-adventure-in-dealing-with-burp-proxy-in-an-extension/ # support for burp-exceptions", "overwrite the Content-Type header. Overwrite adds the header if it", "filter-options.py to Burp. Nees Jython. # Blog post: https://parsiya.net/blog/2019-04-06-hiding-options-an-adventure-in-dealing-with-burp-proxy-in-an-extension/ #", "ImportError: pass from burp import IBurpExtender from burp import IHttpListener", "Idea: https://github.com/pajswigger/filter-options/blob/master/src/filter-options.kt # Usage: Put both files in a directory", "headers.overwrite(\"Content-Type\", \"text/css; charset=UTF-8\") # put everything back together bodyBytes =", "callbacks.setExtensionName(\"Filter OPTIONS\") # register an HTTP listener callbacks.registerHttpListener(self) # #", "callbacks.getStdout() except: pass # set our extension name callbacks.setExtensionName(\"Filter OPTIONS\")", "for burp-exceptions - see https://github.com/securityMB/burp-exceptions try: from exceptions_fix import FixBurpExceptions", "an HTTP listener callbacks.registerHttpListener(self) # # implement IHttpListener # def", "from exceptions_fix import FixBurpExceptions import sys except ImportError: pass #", "rawHeaders = headers.exportRaw() # build message modifiedmsg = self.utils.burpHelper.buildHttpMessage(headers.exportRaw(), bodyBytes)", "toolFlag, messageIsRequest, messageInfo): # only process responses if messageIsRequest: return", "request method was not OPTIONS if requestInfo.getMethod() != \"OPTIONS\": return", "modifiedmsg, messageInfo) # this should be reflected in response tab", "together bodyBytes = self.utils.getBody(messageIsRequest, messageInfo) # Debug # rawHeaders =", "import sys except ImportError: pass # support for burputils -", "we only have responses # get the request associated with", "listener callbacks.registerHttpListener(self) # # implement IHttpListener # def processHttpMessage(self, toolFlag,", "not OPTIONS if requestInfo.getMethod() != \"OPTIONS\": return # get response", "the Content-Type header. Overwrite adds the header if it #", "everything back together bodyBytes = self.utils.getBody(messageIsRequest, messageInfo) # Debug #", "Debug # rawHeaders = headers.exportRaw() # build message modifiedmsg =", "message modifiedmsg = self.utils.burpHelper.buildHttpMessage(headers.exportRaw(), bodyBytes) # set modified message response", "be reflected in response tab # done print \"--------\" return", "IHttpListener class BurpExtender(IBurpExtender, IHttpListener): # implement IBurpExtender # set everything", "info responseInfo = self.utils.getInfo(False, messageInfo) # get headers using utils", "responses # get the request associated with the response requestInfo", "burp-exceptions - see https://github.com/securityMB/burp-exceptions try: from exceptions_fix import FixBurpExceptions import", "in a directory and add filter-options.py to Burp. Nees Jython.", "post: https://parsiya.net/blog/2019-04-06-hiding-options-an-adventure-in-dealing-with-burp-proxy-in-an-extension/ # support for burp-exceptions - see https://github.com/securityMB/burp-exceptions try:", "to Burp. Nees Jython. # Blog post: https://parsiya.net/blog/2019-04-06-hiding-options-an-adventure-in-dealing-with-burp-proxy-in-an-extension/ # support", "Overwrite adds the header if it # does not exist.", "# get headers using utils headers = self.utils.getHeaders(responseInfo) # overwrite", "messageIsRequest: return # now we only have responses # get", "OPTIONS if requestInfo.getMethod() != \"OPTIONS\": return # get response info", "self.utils.getInfo(True, messageInfo) # return if the request method was not", "support for burputils - https://github.com/parsiya/burputils try: from burputils import BurpUtils", "# now we only have responses # get the request", "now we only have responses # get the request associated", "up def registerExtenderCallbacks(self, callbacks): # obtain an extension helpers object", "Blog post: https://parsiya.net/blog/2019-04-06-hiding-options-an-adventure-in-dealing-with-burp-proxy-in-an-extension/ # support for burp-exceptions - see https://github.com/securityMB/burp-exceptions", "ImportError: pass # support for burputils - https://github.com/parsiya/burputils try: from", "https://github.com/parsiya/burputils try: from burputils import BurpUtils except ImportError: pass from", "= callbacks.getStdout() except: pass # set our extension name callbacks.setExtensionName(\"Filter", "put everything back together bodyBytes = self.utils.getBody(messageIsRequest, messageInfo) # Debug", "<reponame>parsiya/Parsia-Code # modified \"example traffic redirector\" # https://raw.githubusercontent.com/PortSwigger/example-traffic-redirector/master/python/TrafficRedirector.py # Idea:", "exceptions_fix import FixBurpExceptions import sys except ImportError: pass # support", "print \"--------\" return # support for burp-exceptions try: FixBurpExceptions() except:", "files in a directory and add filter-options.py to Burp. Nees", "HTTP listener callbacks.registerHttpListener(self) # # implement IHttpListener # def processHttpMessage(self,", "and add filter-options.py to Burp. Nees Jython. # Blog post:", "response info responseInfo = self.utils.getInfo(False, messageInfo) # get headers using", "for burp-exceptions try: sys.stdout = callbacks.getStdout() except: pass # set", "with the response requestInfo = self.utils.getInfo(True, messageInfo) # return if", "# only process responses if messageIsRequest: return # now we", "self.utils.getInfo(False, messageInfo) # get headers using utils headers = self.utils.getHeaders(responseInfo)", "# build message modifiedmsg = self.utils.burpHelper.buildHttpMessage(headers.exportRaw(), bodyBytes) # set modified" ]
[ "= 'dev_appserver_login' # Indicates that the user has admin access", "hashlib import logging import os import sha import sys import", "completed. email: Email address to set for the client. admin:", "cookie, so ignoring it.\".format(email)) return '', False, '' admin_apps =", "2.0 (the \"License\"); # you may not use this file", "action.lower() == LOGIN_ACTION.lower() and set_email: self.response.headers['Set-Cookie'] = _set_user_info_cookie(set_email, set_admin) #", "has completed. email: Email address to set for the client.", "if isinstance(redirect_url, unicode): redirect_url = redirect_url.encode('ascii') # Redirect the user", "sha import sys import urllib import uuid import webapp2 app_dashboard_lib", "user's email address, if any. admin: True if the user", "type=\"submit\" id=\"submit-logout\" /> </p> </div> <input name=\"continue\" type=\"hidden\" value=\"%(continue_url)s\"/> </form>", "action. self.response.status = 302 self.response.status_message = 'Redirecting to continue URL'", "'' else: vhsh = sha.new(email+nickname+admin+cookie_secret).hexdigest() if hsh != vhsh: logging.info(\"{0}", "self.response.headers['Location'] = redirect_url else: # Send the user to the", "names onto values. cookie_name: The name of the cookie that", "Returns: A Set-Cookie value for clearing the user info of", "login_url # Perform the action. if action.lower() == LOGOUT_ACTION.lower(): self.response.headers['Set-Cookie']", "self.request.get(_EMAIL_PARAM) set_admin = self.request.get(_ADMIN_PARAM).lower() == 'true' continue_url = self.request.get(CONTINUE_PARAM) login_url", "'email' _ADMIN_PARAM = 'admin' ACTION_PARAM = 'action' # Values for", "a continue parameter to return to continue_url. The login_url should", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "= \"{0}:{1}/login\".format( AppDashboardHelper.SHIBBOLETH_CONNECTOR, DASHBOARD_HTTPS_PORT) else: appscale_login_url = \"https://{0}:{1}/login\".format( os.environ['NGINX_HOST'], DASHBOARD_HTTPS_PORT)", "= '/../../../../../AppDashboard/lib' sys.path.append(os.path.dirname(__file__) + app_dashboard_lib) from app_dashboard_helper import AppDashboardHelper #", "should be admin; False otherwise. cookie_name: The name of the", "login_url: The parameter to _login_response. continue_url: The parameter to _login_response.", "Returns: An (empty) iterable over strings containing the body of", "onto values. cookie_name: The name of the cookie that stores", "request header. cookie_name: The name of the cookie that stores", "= '1' + ''.join(['%02d' % ord(x) for x in user_id_digest])[:20]", "_set_user_info_cookie(set_email, set_admin) # URLs should be ASCII-only byte strings. if", "query parameters: continue: URL to redirect to after a login", "function. Returns: An (empty) iterable over strings containing the body", "user is an admin; False otherwise. user_id: The user ID,", "cookie[cookie_name]['path'] = '/' cookie[cookie_name]['max-age'] = '0' if AppDashboardHelper.USE_SHIBBOLETH: cookie[cookie_name]['domain'] =", "urllib.quote(continue_url)) start_response('302 Requires login', [('Location', redirect_url)]) return [] def fake_admin():", "set_admin) # URLs should be ASCII-only byte strings. if isinstance(redirect_url,", "def _set_user_info_cookie(email, admin, cookie_name=_COOKIE_NAME): \"\"\"Creates a cookie to set the", "<h3>%(login_message)s</h3> <p style=\"padding: 0; margin: 0\"> <label for=\"email\" style=\"width: 3em\">Email:</label>", "dict((k, v.value) for k, v in cookie.iteritems()) return _get_user_info_from_dict(cookie_dict, cookie_name)", "user info of the requestor. \"\"\" cookie = Cookie.SimpleCookie() cookie[cookie_name]", "= Cookie.SimpleCookie() cookie[cookie_name] = cookie_value cookie[cookie_name]['path'] = '/' return cookie[cookie_name].OutputString()", "address. admin: True if the user is an admin; False", "Cookie.CookieError: return '', False, '' cookie_dict = dict((k, v.value) for", "import os import sha import sys import urllib import uuid", "</p> </div> <input name=\"continue\" type=\"hidden\" value=\"%(continue_url)s\"/> </form> </body> </html> \"\"\"", "start_response): \"\"\"Writes a login redirection URL to a user. This", "otherwise. Returns: A string containing the cookie payload. \"\"\" if", "_COOKIE_NAME = 'dev_appserver_login' # Indicates that the user has admin", "= '0' if AppDashboardHelper.USE_SHIBBOLETH: cookie[cookie_name]['domain'] = AppDashboardHelper.\\ SHIBBOLETH_COOKIE_DOMAIN return cookie[cookie_name].OutputString()", "data. Args: email: The user's email address. admin: True if", "the user info. _COOKIE_NAME = 'dev_appserver_login' # Indicates that the", "parameters. \"\"\" import cgi import Cookie import hashlib import logging", "it.\".format(email)) return '', False, '' admin_apps = admin.split(',') current_app =", "cookie from the requestor, logging them out. Args: cookie_name: The", "sans-serif\"> <div style=\"width: 20em; margin: 1em auto; text-align:left; padding: 0", "use this file except in compliance with the License. #", "style=\"width: 3em\">Email:</label> <input name=\"email\" type=\"email\" value=\"%(email)s\" id=\"email\"/> </p> <p style=\"margin:", "requestor's user info from a cookie dictionary. Args: cookie_dict: A", "= cookie_value.replace(\"%40\",'@') cookie_value = cookie_value.replace(\"%2C\",\",\") email, nickname, admin, hsh =", "an admin; False otherwise. user_id: The user ID, if any.", "AppDashboardHelper.USE_SHIBBOLETH: redirect_url = '{0}:{1}/login?{2}={3}'.format( AppDashboardHelper.SHIBBOLETH_CONNECTOR, AppDashboardHelper.SHIBBOLETH_CONNECTOR_PORT, CONTINUE_PARAM, urllib.quote(continue_url) ) else:", "any. admin: True if the user is currently an admin;", "The email address of the current user, if any. admin:", "for logging in and out, supply no parameters. \"\"\" import", "= 'Logged in' else: login_message = 'Not logged in' email", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "def get(self): action = self.request.get(ACTION_PARAM) set_email = self.request.get(_EMAIL_PARAM) set_admin =", "<label for=\"email\" style=\"width: 3em\">Email:</label> <input name=\"email\" type=\"email\" value=\"%(email)s\" id=\"email\"/> </p>", "os.environ.get('APPNAME', str(uuid.uuid4())), os.environ.get('COOKIE_SECRET', str(uuid.uuid4())))).hexdigest() class Handler(webapp2.RequestHandler): \"\"\"The request handler for", "License. # You may obtain a copy of the License", "If 'True', the client should be logged in as an", "body of the HTTP response. \"\"\" if AppDashboardHelper.USE_SHIBBOLETH: redirect_url =", "admin access to all applications. CLOUD_ADMIN_MARKER = 'CLOUD_ADMIN' # The", "'logout' LOGIN_ACTION = 'login' # Name of the cookie that", "email to set for the user. admin: True if the", "is_admin, nickname def _create_cookie_data(email, admin): \"\"\"Creates cookie payload data. Args:", "'') cookie_value = cookie_value.replace(\"%3A\",\":\") cookie_value = cookie_value.replace(\"%40\",'@') cookie_value = cookie_value.replace(\"%2C\",\",\")", "the user is an admin; False otherwise. Returns: A string", "under the License is distributed on an \"AS IS\" BASIS,", "if AppDashboardHelper.USE_SHIBBOLETH: redirect_url = '{0}:{1}/login?{2}={3}'.format( AppDashboardHelper.SHIBBOLETH_CONNECTOR, AppDashboardHelper.SHIBBOLETH_CONNECTOR_PORT, CONTINUE_PARAM, urllib.quote(continue_url) )", "redirect_url else: # Send the user to the AppDashboard to", "that the AppDashboard serves HTTPS traffic on. DASHBOARD_HTTPS_PORT = \"1443\"", "License for the specific language governing permissions and # limitations", "= '_ah/login' # CGI parameter constants. CONTINUE_PARAM = 'continue' _EMAIL_PARAM", "clearing the user info of the requestor. \"\"\" cookie =", "traffic on. DASHBOARD_HTTPS_PORT = \"1443\" def get_user_info(http_cookie, cookie_name=_COOKIE_NAME): \"\"\"Gets the", "of the dev appserver domain (e.g., 'http://localhost:8080'). continue_url: The URL", "an admin. action: What action to take ('Login' or 'Logout').", "email: user_id_digest = hashlib.md5(email.lower()).digest() user_id = '1' + ''.join(['%02d' %", "action. if action.lower() == LOGOUT_ACTION.lower(): self.response.headers['Set-Cookie'] = _clear_user_info_cookie() if AppDashboardHelper.USE_SHIBBOLETH:", "in admin_apps return email, is_admin, nickname def _create_cookie_data(email, admin): \"\"\"Creates", "return '', False, '' cookie_dict = dict((k, v.value) for k,", "= sha.new(email+nickname+admin+cookie_secret).hexdigest() if hsh != vhsh: logging.info(\"{0} has an invalid", "admin_apps return email, is_admin, nickname def _create_cookie_data(email, admin): \"\"\"Creates cookie", "serves the login page and handles login and logout HTTP", "cookie that stores the user info. Returns: A tuple (email,", "to return to continue_url. The login_url should be on the", "regardless of the host:port the user connected to. Args: application_url:", "login', [('Location', redirect_url)]) return [] def fake_admin(): \"\"\" Generate the", "= 'email' _ADMIN_PARAM = 'admin' ACTION_PARAM = 'action' # Values", "get(self): action = self.request.get(ACTION_PARAM) set_email = self.request.get(_EMAIL_PARAM) set_admin = self.request.get(_ADMIN_PARAM).lower()", "the AppDashboard serves HTTPS traffic on. DASHBOARD_HTTPS_PORT = \"1443\" def", "style=\"padding: 0; margin: 0\"> <label for=\"email\" style=\"width: 3em\">Email:</label> <input name=\"email\"", "cookie_value = cookie_dict.get(cookie_name, '') cookie_value = cookie_value.replace(\"%3A\",\":\") cookie_value = cookie_value.replace(\"%40\",'@')", "_render_login_template(login_url, continue_url, email, admin): \"\"\"Renders the login page. Args: login_url:", "an invalid cookie, so ignoring it.\".format(email)) return '', False, ''", "else: appscale_login_url = \"https://{0}:{1}/login\".format( os.environ['NGINX_HOST'], DASHBOARD_HTTPS_PORT) redirect_url = '{0}?{1}={2}'.format(appscale_login_url, CONTINUE_PARAM,", "URL to a user. This redirects to login_url with a", "if any. \"\"\" try: cookie = Cookie.SimpleCookie(http_cookie) except Cookie.CookieError: return", "= hashlib.md5(email.lower()).digest() user_id = '1' + ''.join(['%02d' % ord(x) for", "email = 'test\\x40example.com' admin_checked = 'checked' if admin else ''", "cookie[cookie_name] = cookie_value cookie[cookie_name]['path'] = '/' return cookie[cookie_name].OutputString() def _clear_user_info_cookie(cookie_name=_COOKIE_NAME):", "governing permissions and # limitations under the License. # \"\"\"Handles", "'login' # Name of the cookie that stores the user", "\"\"\" import cgi import Cookie import hashlib import logging import", "login secret \"\"\" return hashlib.sha1('{}/{}'.format( os.environ.get('APPNAME', str(uuid.uuid4())), os.environ.get('COOKIE_SECRET', str(uuid.uuid4())))).hexdigest() class", "os.environ['NGINX_HOST'], DASHBOARD_HTTPS_PORT) redirect_url = '{0}?{1}={2}'.format(appscale_login_url, CONTINUE_PARAM, continue_url) self.response.status = 302", "a cookie to set the user information for the requestor.", "AppDashboardHelper.SHIBBOLETH_CONNECTOR, DASHBOARD_HTTPS_PORT) else: appscale_login_url = \"https://{0}:{1}/login\".format( os.environ['NGINX_HOST'], DASHBOARD_HTTPS_PORT) redirect_url =", "client should be logged in as an admin. action: What", "for the user. admin: True if the user should be", "redirect_url = redirect_url.encode('ascii') # Redirect the user after performing the", "def fake_admin(): \"\"\" Generate the fake admin login secret Returns:", "False otherwise. Returns: A string containing the contents of the", "in compliance with the License. # You may obtain a", "'checked' if admin else '' template_dict = { 'email': cgi.escape(email,", "software # distributed under the License is distributed on an", "constants. CONTINUE_PARAM = 'continue' _EMAIL_PARAM = 'email' _ADMIN_PARAM = 'admin'", "in before letting them view the # specified URL. if", "value of the 'Cookie' HTTP request header. cookie_name: The name", "'Logout'). To view the current user information and a form", "A tuple (email, admin, user_id) where: email: The user's email", "import webapp2 app_dashboard_lib = '/../../../../../AppDashboard/lib' sys.path.append(os.path.dirname(__file__) + app_dashboard_lib) from app_dashboard_helper", "the user info cookie from the requestor, logging them out.", "'login_url': cgi.escape(login_url, quote=True), 'continue_url': cgi.escape(continue_url, quote=True), } return _LOGIN_TEMPLATE %", "It accepts these GET query parameters: continue: URL to redirect", "hsh = (cookie_value.split(':') + ['', '', '', ''])[:4] if email", "fake login secret \"\"\" return hashlib.sha1('{}/{}'.format( os.environ.get('APPNAME', str(uuid.uuid4())), os.environ.get('COOKIE_SECRET', str(uuid.uuid4())))).hexdigest()", "#67a7e3\"> <h3>%(login_message)s</h3> <p style=\"padding: 0; margin: 0\"> <label for=\"email\" style=\"width:", "the user is an admin; False otherwise. user_id: The user", "the user has admin access to all applications. CLOUD_ADMIN_MARKER =", "requestor's user info from an HTTP Cookie header. Args: http_cookie:", "(cookie_value.split(':') + ['', '', '', ''])[:4] if email == '':", "an admin; False otherwise. Returns: A string containing the cookie", "value for clearing the user info of the requestor. \"\"\"", "is currently an admin; False otherwise. Returns: A string containing", "cookie_dict: A dictionary mapping cookie names onto values. cookie_name: The", "vhsh: logging.info(\"{0} has an invalid cookie, so ignoring it.\".format(email)) return", "</p> <p style=\"margin-left: 3em\"> <input name=\"action\" value=\"Login\" type=\"submit\" id=\"submit-login\" />", "AppDashboardHelper.USE_SHIBBOLETH: cookie[cookie_name]['domain'] = AppDashboardHelper.\\ SHIBBOLETH_COOKIE_DOMAIN return cookie[cookie_name].OutputString() _LOGIN_TEMPLATE = \"\"\"<html>", "to redirect to after a login or logout has completed.", "http_cookie: The value of the 'Cookie' HTTP request header. cookie_name:", "containing the body of the HTTP response. \"\"\" if AppDashboardHelper.USE_SHIBBOLETH:", "<input name=\"action\" value=\"Logout\" type=\"submit\" id=\"submit-logout\" /> </p> </div> <input name=\"continue\"", "connected to. Args: application_url: The URL of the dev appserver", "admin): \"\"\"Renders the login page. Args: login_url: The parameter to", "for the requestor. Args: email: The email to set for", "redirect_url = '{0}?{1}={2}'.format(appscale_login_url, CONTINUE_PARAM, continue_url) self.response.status = 302 self.response.status_message =", "within the dev appserver. LOGIN_URL_RELATIVE = '_ah/login' # CGI parameter", "a cookie dictionary. Args: cookie_dict: A dictionary mapping cookie names", "set_email: self.response.headers['Set-Cookie'] = _set_user_info_cookie(set_email, set_admin) # URLs should be ASCII-only", "= '/' return cookie[cookie_name].OutputString() def _clear_user_info_cookie(cookie_name=_COOKIE_NAME): \"\"\"Clears the user info", "a login redirection URL to a user. This redirects to", "redirect_url = AppDashboardHelper.SHIBBOLETH_LOGOUT_URL elif action.lower() == LOGIN_ACTION.lower() and set_email: self.response.headers['Set-Cookie']", "2em; background-color: #d6e9f8; border: 2px solid #67a7e3\"> <h3>%(login_message)s</h3> <p style=\"padding:", "!= vhsh: logging.info(\"{0} has an invalid cookie, so ignoring it.\".format(email))", "the action. if action.lower() == LOGOUT_ACTION.lower(): self.response.headers['Set-Cookie'] = _clear_user_info_cookie() if", "A Set-Cookie value for clearing the user info of the", "header. cookie_name: The name of the cookie that stores the", "os.environ['APPLICATION_ID'] is_admin = current_app in admin_apps or CLOUD_ADMIN_MARKER in admin_apps", "self.request.path_url if action: redirect_url = continue_url or login_url # Perform", "cookie_dict.get(cookie_name, '') cookie_value = cookie_value.replace(\"%3A\",\":\") cookie_value = cookie_value.replace(\"%40\",'@') cookie_value =", "3em\">Email:</label> <input name=\"email\" type=\"email\" value=\"%(email)s\" id=\"email\"/> </p> <p style=\"margin: .5em", "stores the user info. _COOKIE_NAME = 'dev_appserver_login' # Indicates that", "<p style=\"margin: .5em 0 0 3em; font-size:12px\"> <input name=\"admin\" type=\"checkbox\"", "set_admin = self.request.get(_ADMIN_PARAM).lower() == 'true' continue_url = self.request.get(CONTINUE_PARAM) login_url =", "13px sans-serif\"> <div style=\"width: 20em; margin: 1em auto; text-align:left; padding:", "1.25em 2em; background-color: #d6e9f8; border: 2px solid #67a7e3\"> <h3>%(login_message)s</h3> <p", "= 'Redirecting to continue URL' self.response.headers['Location'] = redirect_url else: #", "uuid import webapp2 app_dashboard_lib = '/../../../../../AppDashboard/lib' sys.path.append(os.path.dirname(__file__) + app_dashboard_lib) from", "= '' admin = '' return '', False, '' else:", "padding: 0 2em 1.25em 2em; background-color: #d6e9f8; border: 2px solid", "CGI parameter constants. CONTINUE_PARAM = 'continue' _EMAIL_PARAM = 'email' _ADMIN_PARAM", "style=\"margin-left: 3em\"> <input name=\"action\" value=\"Login\" type=\"submit\" id=\"submit-login\" /> <input name=\"action\"", "payload. \"\"\" if email: user_id_digest = hashlib.md5(email.lower()).digest() user_id = '1'", "admin; False otherwise. cookie_name: The name of the cookie that", "the canonical front-end server, regardless of the host:port the user", "the cookie that stores the user info. _COOKIE_NAME = 'dev_appserver_login'", "them out. Args: cookie_name: The name of the cookie that", "in user_id_digest])[:20] else: user_id = '' return '%s:%s:%s' % (email,", "of the host:port the user connected to. Args: application_url: The", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "'', False, '' cookie_dict = dict((k, v.value) for k, v", "admin, user_id) def _set_user_info_cookie(email, admin, cookie_name=_COOKIE_NAME): \"\"\"Creates a cookie to", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "dealing with user cookies. Includes a WSGI application that serves", "that stores the user info. Returns: A Set-Cookie value for", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "parameters: continue: URL to redirect to after a login or", "server, regardless of the host:port the user connected to. Args:", "to in writing, software # distributed under the License is", "[('Location', redirect_url)]) return [] def fake_admin(): \"\"\" Generate the fake", "\"\"\"Renders the login page. Args: login_url: The parameter to _login_response.", "id=\"submit-logout\" /> </p> </div> <input name=\"continue\" type=\"hidden\" value=\"%(continue_url)s\"/> </form> </body>", "be admin; False otherwise. cookie_name: The name of the cookie", "# See the License for the specific language governing permissions", "font: 13px sans-serif\"> <div style=\"width: 20em; margin: 1em auto; text-align:left;", "else: vhsh = sha.new(email+nickname+admin+cookie_secret).hexdigest() if hsh != vhsh: logging.info(\"{0} has", "application that serves the login page and handles login and", "Name of the cookie that stores the user info. _COOKIE_NAME", "def _get_user_info_from_dict(cookie_dict, cookie_name=_COOKIE_NAME): \"\"\"Gets the requestor's user info from a", "and # limitations under the License. # \"\"\"Handles login/logout pages", "for the client. admin: If 'True', the client should be", "to _login_response. continue_url: The parameter to _login_response. email: The email", "user ID, if any. \"\"\" try: cookie = Cookie.SimpleCookie(http_cookie) except", "the user info. Returns: A Set-Cookie value for clearing the", "This redirects to login_url with a continue parameter to return", "language governing permissions and # limitations under the License. #", "login_url should be on the canonical front-end server, regardless of", "redirect to after a login or logout has completed. email:", "or agreed to in writing, software # distributed under the", "any. \"\"\" try: cookie = Cookie.SimpleCookie(http_cookie) except Cookie.CookieError: return '',", "redirection URL to a user. This redirects to login_url with", "= '{0}:{1}/login?{2}={3}'.format( AppDashboardHelper.SHIBBOLETH_CONNECTOR, AppDashboardHelper.SHIBBOLETH_CONNECTOR_PORT, CONTINUE_PARAM, urllib.quote(continue_url) ) else: hostname =", "be ASCII-only byte strings. if isinstance(redirect_url, unicode): redirect_url = redirect_url.encode('ascii')", "# Redirect the user after performing the action. self.response.status =", "required by applicable law or agreed to in writing, software", "requestor. Args: email: The email to set for the user.", "self.response.status = 302 self.response.status_message = 'Redirecting to login service URL'", "To view the current user information and a form for", "the cookie that stores the user info. Returns: A Set-Cookie", "cookie names onto values. cookie_name: The name of the cookie", ") else: hostname = os.environ['NGINX_HOST'] redirect_url = 'https://{0}:{1}/login?{2}={3}'.format( hostname, DASHBOARD_HTTPS_PORT,", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "value=\"True\" %(admin_checked)s id=\"admin\"/> <label for=\"admin\">Sign in as Administrator</label> </p> <p", "\"\"\" return hashlib.sha1('{}/{}'.format( os.environ.get('APPNAME', str(uuid.uuid4())), os.environ.get('COOKIE_SECRET', str(uuid.uuid4())))).hexdigest() class Handler(webapp2.RequestHandler): \"\"\"The", "'{0}?{1}={2}'.format(appscale_login_url, CONTINUE_PARAM, continue_url) self.response.status = 302 self.response.status_message = 'Redirecting to", "with the License. # You may obtain a copy of", "+ app_dashboard_lib) from app_dashboard_helper import AppDashboardHelper # URL of the", "= '{0}?{1}={2}'.format(appscale_login_url, CONTINUE_PARAM, continue_url) self.response.status = 302 self.response.status_message = 'Redirecting", "'_ah/login' # CGI parameter constants. CONTINUE_PARAM = 'continue' _EMAIL_PARAM =", "user to the AppDashboard to log in before letting them", "if AppDashboardHelper.USE_SHIBBOLETH: appscale_login_url = \"{0}:{1}/login\".format( AppDashboardHelper.SHIBBOLETH_CONNECTOR, DASHBOARD_HTTPS_PORT) else: appscale_login_url =", "redirect_url = continue_url or login_url # Perform the action. if", "ACTION_PARAM = 'action' # Values for the action parameter. LOGOUT_ACTION", "form for logging in and out, supply no parameters. \"\"\"", "values. cookie_name: The name of the cookie that stores the", "cookie_value = cookie_value.replace(\"%2C\",\",\") email, nickname, admin, hsh = (cookie_value.split(':') +", "payload data. Args: email: The user's email address. admin: True", "cookie[cookie_name].OutputString() def _clear_user_info_cookie(cookie_name=_COOKIE_NAME): \"\"\"Clears the user info cookie from the", "user has admin access to all applications. CLOUD_ADMIN_MARKER = 'CLOUD_ADMIN'", "or logout has completed. email: Email address to set for", "Indicates that the user has admin access to all applications.", "email address of the current user, if any. admin: True", "return _LOGIN_TEMPLATE % template_dict def login_redirect(application_url, continue_url, start_response): \"\"\"Writes a", "to continue_url. The login_url should be on the canonical front-end", "'continue_url': cgi.escape(continue_url, quote=True), } return _LOGIN_TEMPLATE % template_dict def login_redirect(application_url,", "cookie_name=_COOKIE_NAME): \"\"\"Gets the requestor's user info from an HTTP Cookie", "login_redirect(application_url, continue_url, start_response): \"\"\"Writes a login redirection URL to a", "for the action parameter. LOGOUT_ACTION = 'logout' LOGIN_ACTION = 'login'", "login redirection URL to a user. This redirects to login_url", "continue_url, start_response): \"\"\"Writes a login redirection URL to a user.", "start_response('302 Requires login', [('Location', redirect_url)]) return [] def fake_admin(): \"\"\"", "start_response: A WSGI start_response function. Returns: An (empty) iterable over", "= self.request.get(_ADMIN_PARAM).lower() == 'true' continue_url = self.request.get(CONTINUE_PARAM) login_url = self.request.path_url", "\"\"\" try: cookie = Cookie.SimpleCookie(http_cookie) except Cookie.CookieError: return '', False,", "return to continue_url. The login_url should be on the canonical", "user info from a cookie dictionary. Args: cookie_dict: A dictionary", "os.environ['NGINX_HOST'] redirect_url = 'https://{0}:{1}/login?{2}={3}'.format( hostname, DASHBOARD_HTTPS_PORT, CONTINUE_PARAM, urllib.quote(continue_url)) start_response('302 Requires", "start_response function. Returns: An (empty) iterable over strings containing the", "compliance with the License. # You may obtain a copy", "True if the user is currently an admin; False otherwise.", "agreed to in writing, software # distributed under the License", "address, if any. admin: True if the user is an", "A string containing the fake login secret \"\"\" return hashlib.sha1('{}/{}'.format(", "set the user information for the requestor. Args: email: The", "'' return '%s:%s:%s' % (email, admin, user_id) def _set_user_info_cookie(email, admin,", "of the requestor. \"\"\" cookie = Cookie.SimpleCookie() cookie[cookie_name] = ''", "WSGI start_response function. Returns: An (empty) iterable over strings containing", "requests. It accepts these GET query parameters: continue: URL to", "['', '', '', ''])[:4] if email == '': nickname =", "distributed under the License is distributed on an \"AS IS\"", "a WSGI application that serves the login page and handles", "the action parameter. LOGOUT_ACTION = 'logout' LOGIN_ACTION = 'login' #", "= cookie_value.replace(\"%2C\",\",\") email, nickname, admin, hsh = (cookie_value.split(':') + ['',", "= os.environ['APPLICATION_ID'] is_admin = current_app in admin_apps or CLOUD_ADMIN_MARKER in", "style=\"width: 20em; margin: 1em auto; text-align:left; padding: 0 2em 1.25em", "Values for the action parameter. LOGOUT_ACTION = 'logout' LOGIN_ACTION =", "of the cookie that stores the user info. _COOKIE_NAME =", "letting them view the # specified URL. if AppDashboardHelper.USE_SHIBBOLETH: appscale_login_url", "information and a form for logging in and out, supply", "be on the canonical front-end server, regardless of the host:port", "before letting them view the # specified URL. if AppDashboardHelper.USE_SHIBBOLETH:", "them view the # specified URL. if AppDashboardHelper.USE_SHIBBOLETH: appscale_login_url =", "a form for logging in and out, supply no parameters.", "user information for the requestor. Args: email: The email to", "'' admin = '' return '', False, '' else: vhsh", "user. This redirects to login_url with a continue parameter to", "\"\"\" cookie_value = _create_cookie_data(email, admin) cookie = Cookie.SimpleCookie() cookie[cookie_name] =", "Returns: A tuple (email, admin, user_id) where: email: The user's", "express or implied. # See the License for the specific", "= 'continue' _EMAIL_PARAM = 'email' _ADMIN_PARAM = 'admin' ACTION_PARAM =", "LOGIN_ACTION = 'login' # Name of the cookie that stores", "type=\"submit\" id=\"submit-login\" /> <input name=\"action\" value=\"Logout\" type=\"submit\" id=\"submit-logout\" /> </p>", "except in compliance with the License. # You may obtain", "redirect_url)]) return [] def fake_admin(): \"\"\" Generate the fake admin", "'' cookie[cookie_name]['path'] = '/' cookie[cookie_name]['max-age'] = '0' if AppDashboardHelper.USE_SHIBBOLETH: cookie[cookie_name]['domain']", "the # specified URL. if AppDashboardHelper.USE_SHIBBOLETH: appscale_login_url = \"{0}:{1}/login\".format( AppDashboardHelper.SHIBBOLETH_CONNECTOR,", "'', False, '' admin_apps = admin.split(',') current_app = os.environ['APPLICATION_ID'] is_admin", "= cookie_value cookie[cookie_name]['path'] = '/' return cookie[cookie_name].OutputString() def _clear_user_info_cookie(cookie_name=_COOKIE_NAME): \"\"\"Clears", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "parameter to _login_response. email: The email address of the current", "not use this file except in compliance with the License.", "parameter constants. CONTINUE_PARAM = 'continue' _EMAIL_PARAM = 'email' _ADMIN_PARAM =", "# Copyright 2007 Google Inc. # # Licensed under the", "background-color: #d6e9f8; border: 2px solid #67a7e3\"> <h3>%(login_message)s</h3> <p style=\"padding: 0;", "What action to take ('Login' or 'Logout'). To view the", "admin: True if the user should be admin; False otherwise.", "AppDashboard to log in before letting them view the #", "Copyright 2007 Google Inc. # # Licensed under the Apache", "0 2em 1.25em 2em; background-color: #d6e9f8; border: 2px solid #67a7e3\">", "= 'CLOUD_ADMIN' # The port that the AppDashboard serves HTTPS", "app_dashboard_helper import AppDashboardHelper # URL of the login page within", "the dev appserver. LOGIN_URL_RELATIVE = '_ah/login' # CGI parameter constants.", "writing, software # distributed under the License is distributed on", "is_admin = current_app in admin_apps or CLOUD_ADMIN_MARKER in admin_apps return", "and handles login and logout HTTP requests. It accepts these", "the cookie that stores the user info. Returns: A tuple", "'/../../../../../AppDashboard/lib' sys.path.append(os.path.dirname(__file__) + app_dashboard_lib) from app_dashboard_helper import AppDashboardHelper # URL", "The login_url should be on the canonical front-end server, regardless", "ignoring it.\".format(email)) return '', False, '' admin_apps = admin.split(',') current_app", "you may not use this file except in compliance with", "= current_app in admin_apps or CLOUD_ADMIN_MARKER in admin_apps return email,", "the user should be admin; False otherwise. cookie_name: The name", "and logout HTTP requests. It accepts these GET query parameters:", "the requestor, logging them out. Args: cookie_name: The name of", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "domain (e.g., 'http://localhost:8080'). continue_url: The URL to continue to after", "'' return '', False, '' else: vhsh = sha.new(email+nickname+admin+cookie_secret).hexdigest() if", "cookie.iteritems()) return _get_user_info_from_dict(cookie_dict, cookie_name) def _get_user_info_from_dict(cookie_dict, cookie_name=_COOKIE_NAME): \"\"\"Gets the requestor's", "_get_user_info_from_dict(cookie_dict, cookie_name) def _get_user_info_from_dict(cookie_dict, cookie_name=_COOKIE_NAME): \"\"\"Gets the requestor's user info", "'' template_dict = { 'email': cgi.escape(email, quote=True), 'admin_checked': admin_checked, 'login_message':", "redirect_url.encode('ascii') # Redirect the user after performing the action. self.response.status", "\"\"\" if email: user_id_digest = hashlib.md5(email.lower()).digest() user_id = '1' +", "= os.environ['NGINX_HOST'] redirect_url = 'https://{0}:{1}/login?{2}={3}'.format( hostname, DASHBOARD_HTTPS_PORT, CONTINUE_PARAM, urllib.quote(continue_url)) start_response('302", "to _login_response. email: The email address of the current user,", "AppDashboard serves HTTPS traffic on. DASHBOARD_HTTPS_PORT = \"1443\" def get_user_info(http_cookie,", "parameter to return to continue_url. The login_url should be on", "cookie_name=_COOKIE_NAME): \"\"\"Gets the requestor's user info from a cookie dictionary.", "cookie = Cookie.SimpleCookie() cookie[cookie_name] = cookie_value cookie[cookie_name]['path'] = '/' return", "= Cookie.SimpleCookie(http_cookie) except Cookie.CookieError: return '', False, '' cookie_dict =", "'/' cookie[cookie_name]['max-age'] = '0' if AppDashboardHelper.USE_SHIBBOLETH: cookie[cookie_name]['domain'] = AppDashboardHelper.\\ SHIBBOLETH_COOKIE_DOMAIN", "ord(x) for x in user_id_digest])[:20] else: user_id = '' return", "name=\"admin\" type=\"checkbox\" value=\"True\" %(admin_checked)s id=\"admin\"/> <label for=\"admin\">Sign in as Administrator</label>", "admin; False otherwise. Returns: A string containing the cookie payload.", "as an admin. action: What action to take ('Login' or", "\"\"\"Writes a login redirection URL to a user. This redirects", "dev appserver. LOGIN_URL_RELATIVE = '_ah/login' # CGI parameter constants. CONTINUE_PARAM", "CONDITIONS OF ANY KIND, either express or implied. # See", "any. admin: True if the user is an admin; False", "try: cookie = Cookie.SimpleCookie(http_cookie) except Cookie.CookieError: return '', False, ''", "def login_redirect(application_url, continue_url, start_response): \"\"\"Writes a login redirection URL to", "''.join(['%02d' % ord(x) for x in user_id_digest])[:20] else: user_id =", "WSGI application that serves the login page and handles login", "handles login and logout HTTP requests. It accepts these GET", "'{0}:{1}/login?{2}={3}'.format( AppDashboardHelper.SHIBBOLETH_CONNECTOR, AppDashboardHelper.SHIBBOLETH_CONNECTOR_PORT, CONTINUE_PARAM, urllib.quote(continue_url) ) else: hostname = os.environ['NGINX_HOST']", "self.response.headers['Set-Cookie'] = _clear_user_info_cookie() if AppDashboardHelper.USE_SHIBBOLETH: redirect_url = AppDashboardHelper.SHIBBOLETH_LOGOUT_URL elif action.lower()", "the requestor. Args: email: The email to set for the", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "the fake admin login secret Returns: A string containing the", "Handler(webapp2.RequestHandler): \"\"\"The request handler for the login and logout pages.\"\"\"", "\"\"\" cookie_secret = os.environ['COOKIE_SECRET'] cookie_value = cookie_dict.get(cookie_name, '') cookie_value =", "'Redirecting to login service URL' self.response.headers['Location'] = redirect_url application =", "= admin.split(',') current_app = os.environ['APPLICATION_ID'] is_admin = current_app in admin_apps", "<input name=\"continue\" type=\"hidden\" value=\"%(continue_url)s\"/> </form> </body> </html> \"\"\" def _render_login_template(login_url,", "Requires login', [('Location', redirect_url)]) return [] def fake_admin(): \"\"\" Generate", "to continue URL' self.response.headers['Location'] = redirect_url else: # Send the", "any. \"\"\" cookie_secret = os.environ['COOKIE_SECRET'] cookie_value = cookie_dict.get(cookie_name, '') cookie_value", "'' admin_apps = admin.split(',') current_app = os.environ['APPLICATION_ID'] is_admin = current_app", "admin else '' template_dict = { 'email': cgi.escape(email, quote=True), 'admin_checked':", "style=\"text-align:center; font: 13px sans-serif\"> <div style=\"width: 20em; margin: 1em auto;", "= continue_url or login_url # Perform the action. if action.lower()", "import hashlib import logging import os import sha import sys", "user, if any. admin: True if the user is currently", "AppDashboardHelper.SHIBBOLETH_CONNECTOR_PORT, CONTINUE_PARAM, urllib.quote(continue_url) ) else: hostname = os.environ['NGINX_HOST'] redirect_url =", "self.response.headers['Set-Cookie'] = _set_user_info_cookie(set_email, set_admin) # URLs should be ASCII-only byte", "quote=True), 'admin_checked': admin_checked, 'login_message': login_message, 'login_url': cgi.escape(login_url, quote=True), 'continue_url': cgi.escape(continue_url,", "view the # specified URL. if AppDashboardHelper.USE_SHIBBOLETH: appscale_login_url = \"{0}:{1}/login\".format(", "'Redirecting to continue URL' self.response.headers['Location'] = redirect_url else: # Send", "cookie_secret = os.environ['COOKIE_SECRET'] cookie_value = cookie_dict.get(cookie_name, '') cookie_value = cookie_value.replace(\"%3A\",\":\")", "'/' return cookie[cookie_name].OutputString() def _clear_user_info_cookie(cookie_name=_COOKIE_NAME): \"\"\"Clears the user info cookie", "except Cookie.CookieError: return '', False, '' cookie_dict = dict((k, v.value)", "page within the dev appserver. LOGIN_URL_RELATIVE = '_ah/login' # CGI", "in' else: login_message = 'Not logged in' email = 'test\\x40example.com'", "stores the user info. Returns: A tuple (email, admin, user_id)", "LOGIN_ACTION.lower() and set_email: self.response.headers['Set-Cookie'] = _set_user_info_cookie(set_email, set_admin) # URLs should", "return _get_user_info_from_dict(cookie_dict, cookie_name) def _get_user_info_from_dict(cookie_dict, cookie_name=_COOKIE_NAME): \"\"\"Gets the requestor's user", "AppDashboardHelper.USE_SHIBBOLETH: redirect_url = AppDashboardHelper.SHIBBOLETH_LOGOUT_URL elif action.lower() == LOGIN_ACTION.lower() and set_email:", "action to take ('Login' or 'Logout'). To view the current", "# The port that the AppDashboard serves HTTPS traffic on.", "fake admin login secret Returns: A string containing the fake", "login and logout pages.\"\"\" def get(self): action = self.request.get(ACTION_PARAM) set_email", "<input name=\"admin\" type=\"checkbox\" value=\"True\" %(admin_checked)s id=\"admin\"/> <label for=\"admin\">Sign in as", "should be ASCII-only byte strings. if isinstance(redirect_url, unicode): redirect_url =", "</html> \"\"\" def _render_login_template(login_url, continue_url, email, admin): \"\"\"Renders the login", "OR CONDITIONS OF ANY KIND, either express or implied. #", "continue: URL to redirect to after a login or logout", "The name of the cookie that stores the user info.", "from a cookie dictionary. Args: cookie_dict: A dictionary mapping cookie", "requestor. \"\"\" cookie = Cookie.SimpleCookie() cookie[cookie_name] = '' cookie[cookie_name]['path'] =", "type=\"email\" value=\"%(email)s\" id=\"email\"/> </p> <p style=\"margin: .5em 0 0 3em;", "of the current user, if any. admin: True if the", "cookie[cookie_name] = '' cookie[cookie_name]['path'] = '/' cookie[cookie_name]['max-age'] = '0' if", "if any. admin: True if the user is currently an", "quote=True), } return _LOGIN_TEMPLATE % template_dict def login_redirect(application_url, continue_url, start_response):", "\"\"\"The request handler for the login and logout pages.\"\"\" def", "appscale_login_url = \"{0}:{1}/login\".format( AppDashboardHelper.SHIBBOLETH_CONNECTOR, DASHBOARD_HTTPS_PORT) else: appscale_login_url = \"https://{0}:{1}/login\".format( os.environ['NGINX_HOST'],", "the License is distributed on an \"AS IS\" BASIS, #", "Returns: Set-Cookie value for setting the user info of the", "CLOUD_ADMIN_MARKER in admin_apps return email, is_admin, nickname def _create_cookie_data(email, admin):", "= _create_cookie_data(email, admin) cookie = Cookie.SimpleCookie() cookie[cookie_name] = cookie_value cookie[cookie_name]['path']", "otherwise. user_id: The user ID, if any. \"\"\" try: cookie", "id=\"email\"/> </p> <p style=\"margin: .5em 0 0 3em; font-size:12px\"> <input", "action=\"%(login_url)s\" style=\"text-align:center; font: 13px sans-serif\"> <div style=\"width: 20em; margin: 1em", "self.response.status_message = 'Redirecting to continue URL' self.response.headers['Location'] = redirect_url else:", "the current user information and a form for logging in", "has admin access to all applications. CLOUD_ADMIN_MARKER = 'CLOUD_ADMIN' #", "cookie = Cookie.SimpleCookie() cookie[cookie_name] = '' cookie[cookie_name]['path'] = '/' cookie[cookie_name]['max-age']", "else '' template_dict = { 'email': cgi.escape(email, quote=True), 'admin_checked': admin_checked,", "</div> <input name=\"continue\" type=\"hidden\" value=\"%(continue_url)s\"/> </form> </body> </html> \"\"\" def", "template_dict = { 'email': cgi.escape(email, quote=True), 'admin_checked': admin_checked, 'login_message': login_message,", "== LOGOUT_ACTION.lower(): self.response.headers['Set-Cookie'] = _clear_user_info_cookie() if AppDashboardHelper.USE_SHIBBOLETH: redirect_url = AppDashboardHelper.SHIBBOLETH_LOGOUT_URL", "cgi import Cookie import hashlib import logging import os import", "= \"1443\" def get_user_info(http_cookie, cookie_name=_COOKIE_NAME): \"\"\"Gets the requestor's user info", "service URL' self.response.headers['Location'] = redirect_url application = webapp2.WSGIApplication([('/.*', Handler)], debug=True)", "cookie_value = _create_cookie_data(email, admin) cookie = Cookie.SimpleCookie() cookie[cookie_name] = cookie_value", "'Logged in' else: login_message = 'Not logged in' email =", "the current user, if any. admin: True if the user", "admin_apps = admin.split(',') current_app = os.environ['APPLICATION_ID'] is_admin = current_app in", "URL to redirect to after a login or logout has", "view the current user information and a form for logging", "v.value) for k, v in cookie.iteritems()) return _get_user_info_from_dict(cookie_dict, cookie_name) def", "in cookie.iteritems()) return _get_user_info_from_dict(cookie_dict, cookie_name) def _get_user_info_from_dict(cookie_dict, cookie_name=_COOKIE_NAME): \"\"\"Gets the", "if any. \"\"\" cookie_secret = os.environ['COOKIE_SECRET'] cookie_value = cookie_dict.get(cookie_name, '')", "user is an admin; False otherwise. Returns: A string containing", "the host:port the user connected to. Args: application_url: The URL", "admin.split(',') current_app = os.environ['APPLICATION_ID'] is_admin = current_app in admin_apps or", "_ADMIN_PARAM = 'admin' ACTION_PARAM = 'action' # Values for the", "law or agreed to in writing, software # distributed under", "type=\"checkbox\" value=\"True\" %(admin_checked)s id=\"admin\"/> <label for=\"admin\">Sign in as Administrator</label> </p>", "email: The user's email address. admin: True if the user", "/> <input name=\"action\" value=\"Logout\" type=\"submit\" id=\"submit-logout\" /> </p> </div> <input", "if the user is an admin; False otherwise. user_id: The", "cookie_value = cookie_value.replace(\"%40\",'@') cookie_value = cookie_value.replace(\"%2C\",\",\") email, nickname, admin, hsh", "= \"https://{0}:{1}/login\".format( os.environ['NGINX_HOST'], DASHBOARD_HTTPS_PORT) redirect_url = '{0}?{1}={2}'.format(appscale_login_url, CONTINUE_PARAM, continue_url) self.response.status", "'action' # Values for the action parameter. LOGOUT_ACTION = 'logout'", "login page. \"\"\" if email: login_message = 'Logged in' else:", "logout has completed. email: Email address to set for the", "+ ''.join(['%02d' % ord(x) for x in user_id_digest])[:20] else: user_id", "that the user has admin access to all applications. CLOUD_ADMIN_MARKER", "and out, supply no parameters. \"\"\" import cgi import Cookie", "for k, v in cookie.iteritems()) return _get_user_info_from_dict(cookie_dict, cookie_name) def _get_user_info_from_dict(cookie_dict,", "Args: cookie_name: The name of the cookie that stores the", "if action.lower() == LOGOUT_ACTION.lower(): self.response.headers['Set-Cookie'] = _clear_user_info_cookie() if AppDashboardHelper.USE_SHIBBOLETH: redirect_url", "permissions and # limitations under the License. # \"\"\"Handles login/logout", "the HTTP response. \"\"\" if AppDashboardHelper.USE_SHIBBOLETH: redirect_url = '{0}:{1}/login?{2}={3}'.format( AppDashboardHelper.SHIBBOLETH_CONNECTOR,", "is an admin; False otherwise. user_id: The user ID, if", "os.environ.get('COOKIE_SECRET', str(uuid.uuid4())))).hexdigest() class Handler(webapp2.RequestHandler): \"\"\"The request handler for the login", "Args: application_url: The URL of the dev appserver domain (e.g.,", "to after the user logs in. start_response: A WSGI start_response", "client. admin: If 'True', the client should be logged in", "invalid cookie, so ignoring it.\".format(email)) return '', False, '' admin_apps", "'test\\x40example.com' admin_checked = 'checked' if admin else '' template_dict =", "def _create_cookie_data(email, admin): \"\"\"Creates cookie payload data. Args: email: The", "Cookie.SimpleCookie() cookie[cookie_name] = cookie_value cookie[cookie_name]['path'] = '/' return cookie[cookie_name].OutputString() def", "URL of the dev appserver domain (e.g., 'http://localhost:8080'). continue_url: The", "Args: login_url: The parameter to _login_response. continue_url: The parameter to", "login_message = 'Logged in' else: login_message = 'Not logged in'", "sys import urllib import uuid import webapp2 app_dashboard_lib = '/../../../../../AppDashboard/lib'", "The user ID, if any. \"\"\" cookie_secret = os.environ['COOKIE_SECRET'] cookie_value", "<div style=\"width: 20em; margin: 1em auto; text-align:left; padding: 0 2em", "may obtain a copy of the License at # #", "or CLOUD_ADMIN_MARKER in admin_apps return email, is_admin, nickname def _create_cookie_data(email,", "= 'action' # Values for the action parameter. LOGOUT_ACTION =", "parameter. LOGOUT_ACTION = 'logout' LOGIN_ACTION = 'login' # Name of", "admin: If 'True', the client should be logged in as", "limitations under the License. # \"\"\"Handles login/logout pages and dealing", "DASHBOARD_HTTPS_PORT = \"1443\" def get_user_info(http_cookie, cookie_name=_COOKIE_NAME): \"\"\"Gets the requestor's user", "#d6e9f8; border: 2px solid #67a7e3\"> <h3>%(login_message)s</h3> <p style=\"padding: 0; margin:", "string containing the cookie payload. \"\"\" if email: user_id_digest =", "{ 'email': cgi.escape(email, quote=True), 'admin_checked': admin_checked, 'login_message': login_message, 'login_url': cgi.escape(login_url,", "class Handler(webapp2.RequestHandler): \"\"\"The request handler for the login and logout", "Redirect the user after performing the action. self.response.status = 302", "log in before letting them view the # specified URL.", "AppDashboardHelper.USE_SHIBBOLETH: appscale_login_url = \"{0}:{1}/login\".format( AppDashboardHelper.SHIBBOLETH_CONNECTOR, DASHBOARD_HTTPS_PORT) else: appscale_login_url = \"https://{0}:{1}/login\".format(", "as Administrator</label> </p> <p style=\"margin-left: 3em\"> <input name=\"action\" value=\"Login\" type=\"submit\"", "the contents of the login page. \"\"\" if email: login_message", "# CGI parameter constants. CONTINUE_PARAM = 'continue' _EMAIL_PARAM = 'email'", "user info. _COOKIE_NAME = 'dev_appserver_login' # Indicates that the user", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "Inc. # # Licensed under the Apache License, Version 2.0", "requestor, logging them out. Args: cookie_name: The name of the", "in' email = 'test\\x40example.com' admin_checked = 'checked' if admin else", "login secret Returns: A string containing the fake login secret", "import urllib import uuid import webapp2 app_dashboard_lib = '/../../../../../AppDashboard/lib' sys.path.append(os.path.dirname(__file__)", "set_email = self.request.get(_EMAIL_PARAM) set_admin = self.request.get(_ADMIN_PARAM).lower() == 'true' continue_url =", "_LOGIN_TEMPLATE = \"\"\"<html> <head> <title>Login</title> </head> <body> <form method=\"get\" action=\"%(login_url)s\"", "strings. if isinstance(redirect_url, unicode): redirect_url = redirect_url.encode('ascii') # Redirect the", "containing the fake login secret \"\"\" return hashlib.sha1('{}/{}'.format( os.environ.get('APPNAME', str(uuid.uuid4())),", "specified URL. if AppDashboardHelper.USE_SHIBBOLETH: appscale_login_url = \"{0}:{1}/login\".format( AppDashboardHelper.SHIBBOLETH_CONNECTOR, DASHBOARD_HTTPS_PORT) else:", "login page within the dev appserver. LOGIN_URL_RELATIVE = '_ah/login' #", "an HTTP Cookie header. Args: http_cookie: The value of the", "user info. Returns: A tuple (email, admin, user_id) where: email:", "cookie that stores the user info. Returns: Set-Cookie value for", "def get_user_info(http_cookie, cookie_name=_COOKIE_NAME): \"\"\"Gets the requestor's user info from an", "user_id) def _set_user_info_cookie(email, admin, cookie_name=_COOKIE_NAME): \"\"\"Creates a cookie to set", "may not use this file except in compliance with the", "user info. Returns: Set-Cookie value for setting the user info", "admin = '' return '', False, '' else: vhsh =", "if the user is an admin; False otherwise. Returns: A", "the dev appserver domain (e.g., 'http://localhost:8080'). continue_url: The URL to", "URL. if AppDashboardHelper.USE_SHIBBOLETH: appscale_login_url = \"{0}:{1}/login\".format( AppDashboardHelper.SHIBBOLETH_CONNECTOR, DASHBOARD_HTTPS_PORT) else: appscale_login_url", "self.response.status_message = 'Redirecting to login service URL' self.response.headers['Location'] = redirect_url", "= cookie_dict.get(cookie_name, '') cookie_value = cookie_value.replace(\"%3A\",\":\") cookie_value = cookie_value.replace(\"%40\",'@') cookie_value", "\"\"\"Creates a cookie to set the user information for the", "out, supply no parameters. \"\"\" import cgi import Cookie import", "20em; margin: 1em auto; text-align:left; padding: 0 2em 1.25em 2em;", "else: hostname = os.environ['NGINX_HOST'] redirect_url = 'https://{0}:{1}/login?{2}={3}'.format( hostname, DASHBOARD_HTTPS_PORT, CONTINUE_PARAM,", "fake_admin(): \"\"\" Generate the fake admin login secret Returns: A", "in admin_apps or CLOUD_ADMIN_MARKER in admin_apps return email, is_admin, nickname", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "# Send the user to the AppDashboard to log in", "so ignoring it.\".format(email)) return '', False, '' admin_apps = admin.split(',')", "the body of the HTTP response. \"\"\" if AppDashboardHelper.USE_SHIBBOLETH: redirect_url", "the action. self.response.status = 302 self.response.status_message = 'Redirecting to continue", "this file except in compliance with the License. # You", "in as an admin. action: What action to take ('Login'", "vhsh = sha.new(email+nickname+admin+cookie_secret).hexdigest() if hsh != vhsh: logging.info(\"{0} has an", "admin_checked = 'checked' if admin else '' template_dict = {", "hsh != vhsh: logging.info(\"{0} has an invalid cookie, so ignoring", "if the user should be admin; False otherwise. cookie_name: The", "# URLs should be ASCII-only byte strings. if isinstance(redirect_url, unicode):", "email, admin): \"\"\"Renders the login page. Args: login_url: The parameter", "login page and handles login and logout HTTP requests. It", "to all applications. CLOUD_ADMIN_MARKER = 'CLOUD_ADMIN' # The port that", "current_app in admin_apps or CLOUD_ADMIN_MARKER in admin_apps return email, is_admin,", "user_id_digest])[:20] else: user_id = '' return '%s:%s:%s' % (email, admin,", "the client. admin: If 'True', the client should be logged", "0 0 3em; font-size:12px\"> <input name=\"admin\" type=\"checkbox\" value=\"True\" %(admin_checked)s id=\"admin\"/>", "page. \"\"\" if email: login_message = 'Logged in' else: login_message", "Email address to set for the client. admin: If 'True',", "import sys import urllib import uuid import webapp2 app_dashboard_lib =", "after the user logs in. start_response: A WSGI start_response function.", "take ('Login' or 'Logout'). To view the current user information", "logging import os import sha import sys import urllib import", "'True', the client should be logged in as an admin.", "logged in' email = 'test\\x40example.com' admin_checked = 'checked' if admin", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "cookie_value.replace(\"%3A\",\":\") cookie_value = cookie_value.replace(\"%40\",'@') cookie_value = cookie_value.replace(\"%2C\",\",\") email, nickname, admin,", "user should be admin; False otherwise. cookie_name: The name of", "== 'true' continue_url = self.request.get(CONTINUE_PARAM) login_url = self.request.path_url if action:", "# # Licensed under the Apache License, Version 2.0 (the", "= os.environ['COOKIE_SECRET'] cookie_value = cookie_dict.get(cookie_name, '') cookie_value = cookie_value.replace(\"%3A\",\":\") cookie_value", "login/logout pages and dealing with user cookies. Includes a WSGI", "admin): \"\"\"Creates cookie payload data. Args: email: The user's email", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "user info from an HTTP Cookie header. Args: http_cookie: The", "% ord(x) for x in user_id_digest])[:20] else: user_id = ''", "% (email, admin, user_id) def _set_user_info_cookie(email, admin, cookie_name=_COOKIE_NAME): \"\"\"Creates a", "contents of the login page. \"\"\" if email: login_message =", "'login_message': login_message, 'login_url': cgi.escape(login_url, quote=True), 'continue_url': cgi.escape(continue_url, quote=True), } return", "\"\"\" if AppDashboardHelper.USE_SHIBBOLETH: redirect_url = '{0}:{1}/login?{2}={3}'.format( AppDashboardHelper.SHIBBOLETH_CONNECTOR, AppDashboardHelper.SHIBBOLETH_CONNECTOR_PORT, CONTINUE_PARAM, urllib.quote(continue_url)", "LOGOUT_ACTION.lower(): self.response.headers['Set-Cookie'] = _clear_user_info_cookie() if AppDashboardHelper.USE_SHIBBOLETH: redirect_url = AppDashboardHelper.SHIBBOLETH_LOGOUT_URL elif", "action parameter. LOGOUT_ACTION = 'logout' LOGIN_ACTION = 'login' # Name", "LOGIN_URL_RELATIVE = '_ah/login' # CGI parameter constants. CONTINUE_PARAM = 'continue'", "'', '', ''])[:4] if email == '': nickname = ''", "DASHBOARD_HTTPS_PORT, CONTINUE_PARAM, urllib.quote(continue_url)) start_response('302 Requires login', [('Location', redirect_url)]) return []", "The parameter to _login_response. email: The email address of the", "_LOGIN_TEMPLATE % template_dict def login_redirect(application_url, continue_url, start_response): \"\"\"Writes a login", "= 'https://{0}:{1}/login?{2}={3}'.format( hostname, DASHBOARD_HTTPS_PORT, CONTINUE_PARAM, urllib.quote(continue_url)) start_response('302 Requires login', [('Location',", "cookie that stores the user info. Returns: A Set-Cookie value", "request handler for the login and logout pages.\"\"\" def get(self):", "app_dashboard_lib) from app_dashboard_helper import AppDashboardHelper # URL of the login", "The user ID, if any. \"\"\" try: cookie = Cookie.SimpleCookie(http_cookie)", "HTTP Cookie header. Args: http_cookie: The value of the 'Cookie'", "the user info of the requestor. \"\"\" cookie = Cookie.SimpleCookie()", "= '' cookie[cookie_name]['path'] = '/' cookie[cookie_name]['max-age'] = '0' if AppDashboardHelper.USE_SHIBBOLETH:", "login or logout has completed. email: Email address to set", "user info of the requestor. \"\"\" cookie_value = _create_cookie_data(email, admin)", "if the user is currently an admin; False otherwise. Returns:", "isinstance(redirect_url, unicode): redirect_url = redirect_url.encode('ascii') # Redirect the user after", "return hashlib.sha1('{}/{}'.format( os.environ.get('APPNAME', str(uuid.uuid4())), os.environ.get('COOKIE_SECRET', str(uuid.uuid4())))).hexdigest() class Handler(webapp2.RequestHandler): \"\"\"The request", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "def _clear_user_info_cookie(cookie_name=_COOKIE_NAME): \"\"\"Clears the user info cookie from the requestor,", "font-size:12px\"> <input name=\"admin\" type=\"checkbox\" value=\"True\" %(admin_checked)s id=\"admin\"/> <label for=\"admin\">Sign in", "A string containing the cookie payload. \"\"\" if email: user_id_digest", "admin_checked, 'login_message': login_message, 'login_url': cgi.escape(login_url, quote=True), 'continue_url': cgi.escape(continue_url, quote=True), }", "containing the contents of the login page. \"\"\" if email:", "import logging import os import sha import sys import urllib", "the cookie that stores the user info. Returns: Set-Cookie value", "the AppDashboard to log in before letting them view the", "ID, if any. \"\"\" try: cookie = Cookie.SimpleCookie(http_cookie) except Cookie.CookieError:", "\"\"\" cookie = Cookie.SimpleCookie() cookie[cookie_name] = '' cookie[cookie_name]['path'] = '/'", "cgi.escape(continue_url, quote=True), } return _LOGIN_TEMPLATE % template_dict def login_redirect(application_url, continue_url,", "on. DASHBOARD_HTTPS_PORT = \"1443\" def get_user_info(http_cookie, cookie_name=_COOKIE_NAME): \"\"\"Gets the requestor's", "logging.info(\"{0} has an invalid cookie, so ignoring it.\".format(email)) return '',", "pages.\"\"\" def get(self): action = self.request.get(ACTION_PARAM) set_email = self.request.get(_EMAIL_PARAM) set_admin", "currently an admin; False otherwise. Returns: A string containing the", "of the cookie that stores the user info. Returns: A", "user_id: The user ID, if any. \"\"\" try: cookie =", "if AppDashboardHelper.USE_SHIBBOLETH: cookie[cookie_name]['domain'] = AppDashboardHelper.\\ SHIBBOLETH_COOKIE_DOMAIN return cookie[cookie_name].OutputString() _LOGIN_TEMPLATE =", "the user to the AppDashboard to log in before letting", "DASHBOARD_HTTPS_PORT) else: appscale_login_url = \"https://{0}:{1}/login\".format( os.environ['NGINX_HOST'], DASHBOARD_HTTPS_PORT) redirect_url = '{0}?{1}={2}'.format(appscale_login_url,", "of the requestor. \"\"\" cookie_value = _create_cookie_data(email, admin) cookie =", "the fake login secret \"\"\" return hashlib.sha1('{}/{}'.format( os.environ.get('APPNAME', str(uuid.uuid4())), os.environ.get('COOKIE_SECRET',", "email: login_message = 'Logged in' else: login_message = 'Not logged", "'email': cgi.escape(email, quote=True), 'admin_checked': admin_checked, 'login_message': login_message, 'login_url': cgi.escape(login_url, quote=True),", "Args: email: The email to set for the user. admin:", "Returns: A string containing the cookie payload. \"\"\" if email:", "continue_url: The URL to continue to after the user logs", "\"\"\"Creates cookie payload data. Args: email: The user's email address.", "dictionary. Args: cookie_dict: A dictionary mapping cookie names onto values.", "to take ('Login' or 'Logout'). To view the current user", "v in cookie.iteritems()) return _get_user_info_from_dict(cookie_dict, cookie_name) def _get_user_info_from_dict(cookie_dict, cookie_name=_COOKIE_NAME): \"\"\"Gets", "admin_apps or CLOUD_ADMIN_MARKER in admin_apps return email, is_admin, nickname def", "cookie payload data. Args: email: The user's email address. admin:", "_get_user_info_from_dict(cookie_dict, cookie_name=_COOKIE_NAME): \"\"\"Gets the requestor's user info from a cookie", "os import sha import sys import urllib import uuid import", "if email: login_message = 'Logged in' else: login_message = 'Not", "cookie_name: The name of the cookie that stores the user", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "hostname, DASHBOARD_HTTPS_PORT, CONTINUE_PARAM, urllib.quote(continue_url)) start_response('302 Requires login', [('Location', redirect_url)]) return", "login_url = self.request.path_url if action: redirect_url = continue_url or login_url", "of the 'Cookie' HTTP request header. cookie_name: The name of", "'': nickname = '' admin = '' return '', False,", "admin; False otherwise. user_id: The user ID, if any. \"\"\"", "urllib import uuid import webapp2 app_dashboard_lib = '/../../../../../AppDashboard/lib' sys.path.append(os.path.dirname(__file__) +", "if admin else '' template_dict = { 'email': cgi.escape(email, quote=True),", "= 'admin' ACTION_PARAM = 'action' # Values for the action", "action.lower() == LOGOUT_ACTION.lower(): self.response.headers['Set-Cookie'] = _clear_user_info_cookie() if AppDashboardHelper.USE_SHIBBOLETH: redirect_url =", "import cgi import Cookie import hashlib import logging import os", "False, '' admin_apps = admin.split(',') current_app = os.environ['APPLICATION_ID'] is_admin =", "that serves the login page and handles login and logout", "# Values for the action parameter. LOGOUT_ACTION = 'logout' LOGIN_ACTION", "'dev_appserver_login' # Indicates that the user has admin access to", "<body> <form method=\"get\" action=\"%(login_url)s\" style=\"text-align:center; font: 13px sans-serif\"> <div style=\"width:", ".5em 0 0 3em; font-size:12px\"> <input name=\"admin\" type=\"checkbox\" value=\"True\" %(admin_checked)s", "The value of the 'Cookie' HTTP request header. cookie_name: The", "or implied. # See the License for the specific language", "\"\"\" def _render_login_template(login_url, continue_url, email, admin): \"\"\"Renders the login page.", "else: # Send the user to the AppDashboard to log", "user information and a form for logging in and out,", "Cookie import hashlib import logging import os import sha import", "return email, is_admin, nickname def _create_cookie_data(email, admin): \"\"\"Creates cookie payload", "'admin' ACTION_PARAM = 'action' # Values for the action parameter.", "info of the requestor. \"\"\" cookie_value = _create_cookie_data(email, admin) cookie", "these GET query parameters: continue: URL to redirect to after", "user info. Returns: A Set-Cookie value for clearing the user", "in as Administrator</label> </p> <p style=\"margin-left: 3em\"> <input name=\"action\" value=\"Login\"", "AppDashboardHelper.SHIBBOLETH_CONNECTOR, AppDashboardHelper.SHIBBOLETH_CONNECTOR_PORT, CONTINUE_PARAM, urllib.quote(continue_url) ) else: hostname = os.environ['NGINX_HOST'] redirect_url", "logging in and out, supply no parameters. \"\"\" import cgi", "pages and dealing with user cookies. Includes a WSGI application", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "AppDashboardHelper # URL of the login page within the dev", "user_id) where: email: The user's email address, if any. admin:", "True if the user should be admin; False otherwise. cookie_name:", "= AppDashboardHelper.\\ SHIBBOLETH_COOKIE_DOMAIN return cookie[cookie_name].OutputString() _LOGIN_TEMPLATE = \"\"\"<html> <head> <title>Login</title>", "the login page. Args: login_url: The parameter to _login_response. continue_url:", "unicode): redirect_url = redirect_url.encode('ascii') # Redirect the user after performing", "name=\"action\" value=\"Logout\" type=\"submit\" id=\"submit-logout\" /> </p> </div> <input name=\"continue\" type=\"hidden\"", "\"\"\"Gets the requestor's user info from a cookie dictionary. Args:", "</p> <p style=\"margin: .5em 0 0 3em; font-size:12px\"> <input name=\"admin\"", "string containing the contents of the login page. \"\"\" if", "info cookie from the requestor, logging them out. Args: cookie_name:", "a user. This redirects to login_url with a continue parameter", "'', ''])[:4] if email == '': nickname = '' admin", "return '', False, '' else: vhsh = sha.new(email+nickname+admin+cookie_secret).hexdigest() if hsh", "Generate the fake admin login secret Returns: A string containing", "mapping cookie names onto values. cookie_name: The name of the", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "the login page. \"\"\" if email: login_message = 'Logged in'", "over strings containing the body of the HTTP response. \"\"\"", "user after performing the action. self.response.status = 302 self.response.status_message =", "CONTINUE_PARAM, continue_url) self.response.status = 302 self.response.status_message = 'Redirecting to login", "to after a login or logout has completed. email: Email", "<p style=\"padding: 0; margin: 0\"> <label for=\"email\" style=\"width: 3em\">Email:</label> <input", "The email to set for the user. admin: True if", "AppDashboardHelper.SHIBBOLETH_LOGOUT_URL elif action.lower() == LOGIN_ACTION.lower() and set_email: self.response.headers['Set-Cookie'] = _set_user_info_cookie(set_email,", "_login_response. continue_url: The parameter to _login_response. email: The email address", "Google Inc. # # Licensed under the Apache License, Version", "appserver. LOGIN_URL_RELATIVE = '_ah/login' # CGI parameter constants. CONTINUE_PARAM =", "method=\"get\" action=\"%(login_url)s\" style=\"text-align:center; font: 13px sans-serif\"> <div style=\"width: 20em; margin:", "HTTPS traffic on. DASHBOARD_HTTPS_PORT = \"1443\" def get_user_info(http_cookie, cookie_name=_COOKIE_NAME): \"\"\"Gets", "parameter to _login_response. continue_url: The parameter to _login_response. email: The", "admin, hsh = (cookie_value.split(':') + ['', '', '', ''])[:4] if", "stores the user info. Returns: A Set-Cookie value for clearing", "appserver domain (e.g., 'http://localhost:8080'). continue_url: The URL to continue to", "<input name=\"action\" value=\"Login\" type=\"submit\" id=\"submit-login\" /> <input name=\"action\" value=\"Logout\" type=\"submit\"", "to login_url with a continue parameter to return to continue_url.", "the License. # \"\"\"Handles login/logout pages and dealing with user", "page and handles login and logout HTTP requests. It accepts", "_EMAIL_PARAM = 'email' _ADMIN_PARAM = 'admin' ACTION_PARAM = 'action' #", "(the \"License\"); # you may not use this file except", "if action: redirect_url = continue_url or login_url # Perform the", "= 'logout' LOGIN_ACTION = 'login' # Name of the cookie", "= '' return '%s:%s:%s' % (email, admin, user_id) def _set_user_info_cookie(email,", "of the login page within the dev appserver. LOGIN_URL_RELATIVE =", "# you may not use this file except in compliance", "the user info. Returns: Set-Cookie value for setting the user", "_clear_user_info_cookie(cookie_name=_COOKIE_NAME): \"\"\"Clears the user info cookie from the requestor, logging", "</body> </html> \"\"\" def _render_login_template(login_url, continue_url, email, admin): \"\"\"Renders the", "True if the user is an admin; False otherwise. user_id:", "'0' if AppDashboardHelper.USE_SHIBBOLETH: cookie[cookie_name]['domain'] = AppDashboardHelper.\\ SHIBBOLETH_COOKIE_DOMAIN return cookie[cookie_name].OutputString() _LOGIN_TEMPLATE", "email: The user's email address, if any. admin: True if", "import uuid import webapp2 app_dashboard_lib = '/../../../../../AppDashboard/lib' sys.path.append(os.path.dirname(__file__) + app_dashboard_lib)", "that stores the user info. Returns: Set-Cookie value for setting", "%(admin_checked)s id=\"admin\"/> <label for=\"admin\">Sign in as Administrator</label> </p> <p style=\"margin-left:", "URL to continue to after the user logs in. start_response:", "for=\"email\" style=\"width: 3em\">Email:</label> <input name=\"email\" type=\"email\" value=\"%(email)s\" id=\"email\"/> </p> <p", "is an admin; False otherwise. Returns: A string containing the", "False otherwise. Returns: A string containing the cookie payload. \"\"\"", "that stores the user info. Returns: A tuple (email, admin,", "<head> <title>Login</title> </head> <body> <form method=\"get\" action=\"%(login_url)s\" style=\"text-align:center; font: 13px", "2px solid #67a7e3\"> <h3>%(login_message)s</h3> <p style=\"padding: 0; margin: 0\"> <label", "3em; font-size:12px\"> <input name=\"admin\" type=\"checkbox\" value=\"True\" %(admin_checked)s id=\"admin\"/> <label for=\"admin\">Sign", "sys.path.append(os.path.dirname(__file__) + app_dashboard_lib) from app_dashboard_helper import AppDashboardHelper # URL of", "if email: user_id_digest = hashlib.md5(email.lower()).digest() user_id = '1' + ''.join(['%02d'", "margin: 0\"> <label for=\"email\" style=\"width: 3em\">Email:</label> <input name=\"email\" type=\"email\" value=\"%(email)s\"", "'http://localhost:8080'). continue_url: The URL to continue to after the user", "\"1443\" def get_user_info(http_cookie, cookie_name=_COOKIE_NAME): \"\"\"Gets the requestor's user info from", "= Cookie.SimpleCookie() cookie[cookie_name] = '' cookie[cookie_name]['path'] = '/' cookie[cookie_name]['max-age'] =", "setting the user info of the requestor. \"\"\" cookie_value =", "the login and logout pages.\"\"\" def get(self): action = self.request.get(ACTION_PARAM)", "# # Unless required by applicable law or agreed to", "name=\"action\" value=\"Login\" type=\"submit\" id=\"submit-login\" /> <input name=\"action\" value=\"Logout\" type=\"submit\" id=\"submit-logout\"", "return '%s:%s:%s' % (email, admin, user_id) def _set_user_info_cookie(email, admin, cookie_name=_COOKIE_NAME):", "performing the action. self.response.status = 302 self.response.status_message = 'Redirecting to", "of the cookie that stores the user info. Returns: Set-Cookie", "(e.g., 'http://localhost:8080'). continue_url: The URL to continue to after the", "= AppDashboardHelper.SHIBBOLETH_LOGOUT_URL elif action.lower() == LOGIN_ACTION.lower() and set_email: self.response.headers['Set-Cookie'] =", "0 3em; font-size:12px\"> <input name=\"admin\" type=\"checkbox\" value=\"True\" %(admin_checked)s id=\"admin\"/> <label", "<p style=\"margin-left: 3em\"> <input name=\"action\" value=\"Login\" type=\"submit\" id=\"submit-login\" /> <input", "/> </p> </div> <input name=\"continue\" type=\"hidden\" value=\"%(continue_url)s\"/> </form> </body> </html>", "after performing the action. self.response.status = 302 self.response.status_message = 'Redirecting", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "cgi.escape(login_url, quote=True), 'continue_url': cgi.escape(continue_url, quote=True), } return _LOGIN_TEMPLATE % template_dict", "_create_cookie_data(email, admin): \"\"\"Creates cookie payload data. Args: email: The user's", "self.response.status = 302 self.response.status_message = 'Redirecting to continue URL' self.response.headers['Location']", "the login page within the dev appserver. LOGIN_URL_RELATIVE = '_ah/login'", "'1' + ''.join(['%02d' % ord(x) for x in user_id_digest])[:20] else:", "Set-Cookie value for setting the user info of the requestor.", "x in user_id_digest])[:20] else: user_id = '' return '%s:%s:%s' %", "Version 2.0 (the \"License\"); # you may not use this", "<label for=\"admin\">Sign in as Administrator</label> </p> <p style=\"margin-left: 3em\"> <input", "= '' return '', False, '' else: vhsh = sha.new(email+nickname+admin+cookie_secret).hexdigest()", "the user after performing the action. self.response.status = 302 self.response.status_message", "if any. admin: True if the user is an admin;", "<input name=\"email\" type=\"email\" value=\"%(email)s\" id=\"email\"/> </p> <p style=\"margin: .5em 0", "cgi.escape(email, quote=True), 'admin_checked': admin_checked, 'login_message': login_message, 'login_url': cgi.escape(login_url, quote=True), 'continue_url':", "A string containing the contents of the login page. \"\"\"", "HTTP response. \"\"\" if AppDashboardHelper.USE_SHIBBOLETH: redirect_url = '{0}:{1}/login?{2}={3}'.format( AppDashboardHelper.SHIBBOLETH_CONNECTOR, AppDashboardHelper.SHIBBOLETH_CONNECTOR_PORT,", "user cookies. Includes a WSGI application that serves the login", "= self.request.get(_EMAIL_PARAM) set_admin = self.request.get(_ADMIN_PARAM).lower() == 'true' continue_url = self.request.get(CONTINUE_PARAM)", "a login or logout has completed. email: Email address to", "LOGOUT_ACTION = 'logout' LOGIN_ACTION = 'login' # Name of the", "info. Returns: Set-Cookie value for setting the user info of", "applications. CLOUD_ADMIN_MARKER = 'CLOUD_ADMIN' # The port that the AppDashboard", "cookie[cookie_name]['max-age'] = '0' if AppDashboardHelper.USE_SHIBBOLETH: cookie[cookie_name]['domain'] = AppDashboardHelper.\\ SHIBBOLETH_COOKIE_DOMAIN return", "logout pages.\"\"\" def get(self): action = self.request.get(ACTION_PARAM) set_email = self.request.get(_EMAIL_PARAM)", "secret \"\"\" return hashlib.sha1('{}/{}'.format( os.environ.get('APPNAME', str(uuid.uuid4())), os.environ.get('COOKIE_SECRET', str(uuid.uuid4())))).hexdigest() class Handler(webapp2.RequestHandler):", "implied. # See the License for the specific language governing", "ID, if any. \"\"\" cookie_secret = os.environ['COOKIE_SECRET'] cookie_value = cookie_dict.get(cookie_name,", "= self.request.get(ACTION_PARAM) set_email = self.request.get(_EMAIL_PARAM) set_admin = self.request.get(_ADMIN_PARAM).lower() == 'true'", "login_message, 'login_url': cgi.escape(login_url, quote=True), 'continue_url': cgi.escape(continue_url, quote=True), } return _LOGIN_TEMPLATE", "under the Apache License, Version 2.0 (the \"License\"); # you", "no parameters. \"\"\" import cgi import Cookie import hashlib import", "Args: http_cookie: The value of the 'Cookie' HTTP request header.", "hashlib.md5(email.lower()).digest() user_id = '1' + ''.join(['%02d' % ord(x) for x", "self.request.get(ACTION_PARAM) set_email = self.request.get(_EMAIL_PARAM) set_admin = self.request.get(_ADMIN_PARAM).lower() == 'true' continue_url", "email address, if any. admin: True if the user is", "user ID, if any. \"\"\" cookie_secret = os.environ['COOKIE_SECRET'] cookie_value =", "= \"\"\"<html> <head> <title>Login</title> </head> <body> <form method=\"get\" action=\"%(login_url)s\" style=\"text-align:center;", "continue_url: The parameter to _login_response. email: The email address of", "admin: True if the user is currently an admin; False", "\"\"\"Handles login/logout pages and dealing with user cookies. Includes a", "[] def fake_admin(): \"\"\" Generate the fake admin login secret", "cookie_name=_COOKIE_NAME): \"\"\"Creates a cookie to set the user information for", "% template_dict def login_redirect(application_url, continue_url, start_response): \"\"\"Writes a login redirection", "The URL of the dev appserver domain (e.g., 'http://localhost:8080'). continue_url:", "= _set_user_info_cookie(set_email, set_admin) # URLs should be ASCII-only byte strings.", "'continue' _EMAIL_PARAM = 'email' _ADMIN_PARAM = 'admin' ACTION_PARAM = 'action'", "admin. action: What action to take ('Login' or 'Logout'). To", "email address. admin: True if the user is an admin;", "by applicable law or agreed to in writing, software #", "canonical front-end server, regardless of the host:port the user connected", "k, v in cookie.iteritems()) return _get_user_info_from_dict(cookie_dict, cookie_name) def _get_user_info_from_dict(cookie_dict, cookie_name=_COOKIE_NAME):", "A WSGI start_response function. Returns: An (empty) iterable over strings", "'https://{0}:{1}/login?{2}={3}'.format( hostname, DASHBOARD_HTTPS_PORT, CONTINUE_PARAM, urllib.quote(continue_url)) start_response('302 Requires login', [('Location', redirect_url)])", "self.request.get(_ADMIN_PARAM).lower() == 'true' continue_url = self.request.get(CONTINUE_PARAM) login_url = self.request.path_url if", "continue URL' self.response.headers['Location'] = redirect_url else: # Send the user", "app_dashboard_lib = '/../../../../../AppDashboard/lib' sys.path.append(os.path.dirname(__file__) + app_dashboard_lib) from app_dashboard_helper import AppDashboardHelper", "Includes a WSGI application that serves the login page and", "HTTP requests. It accepts these GET query parameters: continue: URL", "and a form for logging in and out, supply no", "Cookie.SimpleCookie(http_cookie) except Cookie.CookieError: return '', False, '' cookie_dict = dict((k,", "set for the client. admin: If 'True', the client should", "'' cookie_dict = dict((k, v.value) for k, v in cookie.iteritems())", "Returns: A string containing the contents of the login page.", "An (empty) iterable over strings containing the body of the", "redirect_url = '{0}:{1}/login?{2}={3}'.format( AppDashboardHelper.SHIBBOLETH_CONNECTOR, AppDashboardHelper.SHIBBOLETH_CONNECTOR_PORT, CONTINUE_PARAM, urllib.quote(continue_url) ) else: hostname", "info of the requestor. \"\"\" cookie = Cookie.SimpleCookie() cookie[cookie_name] =", "continue_url) self.response.status = 302 self.response.status_message = 'Redirecting to login service", "# Indicates that the user has admin access to all", "should be on the canonical front-end server, regardless of the", "302 self.response.status_message = 'Redirecting to continue URL' self.response.headers['Location'] = redirect_url", "appscale_login_url = \"https://{0}:{1}/login\".format( os.environ['NGINX_HOST'], DASHBOARD_HTTPS_PORT) redirect_url = '{0}?{1}={2}'.format(appscale_login_url, CONTINUE_PARAM, continue_url)", "False, '' else: vhsh = sha.new(email+nickname+admin+cookie_secret).hexdigest() if hsh != vhsh:", "email == '': nickname = '' admin = '' return", "stores the user info. Returns: Set-Cookie value for setting the", "cookies. Includes a WSGI application that serves the login page", "info. Returns: A Set-Cookie value for clearing the user info", "email: The email address of the current user, if any.", "cookie that stores the user info. _COOKIE_NAME = 'dev_appserver_login' #", "Args: cookie_dict: A dictionary mapping cookie names onto values. cookie_name:", "port that the AppDashboard serves HTTPS traffic on. DASHBOARD_HTTPS_PORT =", "and logout pages.\"\"\" def get(self): action = self.request.get(ACTION_PARAM) set_email =", "cookie[cookie_name]['path'] = '/' return cookie[cookie_name].OutputString() def _clear_user_info_cookie(cookie_name=_COOKIE_NAME): \"\"\"Clears the user", "''])[:4] if email == '': nickname = '' admin =", "sha.new(email+nickname+admin+cookie_secret).hexdigest() if hsh != vhsh: logging.info(\"{0} has an invalid cookie,", "info. Returns: A tuple (email, admin, user_id) where: email: The", "application_url: The URL of the dev appserver domain (e.g., 'http://localhost:8080').", "<reponame>loftwah/appscale #!/usr/bin/env python # # Copyright 2007 Google Inc. #", "logs in. start_response: A WSGI start_response function. Returns: An (empty)", "== LOGIN_ACTION.lower() and set_email: self.response.headers['Set-Cookie'] = _set_user_info_cookie(set_email, set_admin) # URLs", "be logged in as an admin. action: What action to", "value=\"Login\" type=\"submit\" id=\"submit-login\" /> <input name=\"action\" value=\"Logout\" type=\"submit\" id=\"submit-logout\" />", "to the AppDashboard to log in before letting them view", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "type=\"hidden\" value=\"%(continue_url)s\"/> </form> </body> </html> \"\"\" def _render_login_template(login_url, continue_url, email,", "\"https://{0}:{1}/login\".format( os.environ['NGINX_HOST'], DASHBOARD_HTTPS_PORT) redirect_url = '{0}?{1}={2}'.format(appscale_login_url, CONTINUE_PARAM, continue_url) self.response.status =", "Unless required by applicable law or agreed to in writing,", "byte strings. if isinstance(redirect_url, unicode): redirect_url = redirect_url.encode('ascii') # Redirect", "should be logged in as an admin. action: What action", "supply no parameters. \"\"\" import cgi import Cookie import hashlib", "# URL of the login page within the dev appserver.", "'admin_checked': admin_checked, 'login_message': login_message, 'login_url': cgi.escape(login_url, quote=True), 'continue_url': cgi.escape(continue_url, quote=True),", "user's email address. admin: True if the user is an", "0\"> <label for=\"email\" style=\"width: 3em\">Email:</label> <input name=\"email\" type=\"email\" value=\"%(email)s\" id=\"email\"/>", "= 'test\\x40example.com' admin_checked = 'checked' if admin else '' template_dict", "0; margin: 0\"> <label for=\"email\" style=\"width: 3em\">Email:</label> <input name=\"email\" type=\"email\"", "where: email: The user's email address, if any. admin: True", "Args: email: The user's email address. admin: True if the", "accepts these GET query parameters: continue: URL to redirect to", "Cookie.SimpleCookie() cookie[cookie_name] = '' cookie[cookie_name]['path'] = '/' cookie[cookie_name]['max-age'] = '0'", "</head> <body> <form method=\"get\" action=\"%(login_url)s\" style=\"text-align:center; font: 13px sans-serif\"> <div", "the specific language governing permissions and # limitations under the", "\"\"\" if email: login_message = 'Logged in' else: login_message =", "dictionary mapping cookie names onto values. cookie_name: The name of", "the client should be logged in as an admin. action:", "login service URL' self.response.headers['Location'] = redirect_url application = webapp2.WSGIApplication([('/.*', Handler)],", "in. start_response: A WSGI start_response function. Returns: An (empty) iterable", "iterable over strings containing the body of the HTTP response.", "for the login and logout pages.\"\"\" def get(self): action =", "applicable law or agreed to in writing, software # distributed", "to continue to after the user logs in. start_response: A", "user connected to. Args: application_url: The URL of the dev", "'Cookie' HTTP request header. cookie_name: The name of the cookie", "3em\"> <input name=\"action\" value=\"Login\" type=\"submit\" id=\"submit-login\" /> <input name=\"action\" value=\"Logout\"", "the user. admin: True if the user should be admin;", "= 302 self.response.status_message = 'Redirecting to login service URL' self.response.headers['Location']", "or 'Logout'). To view the current user information and a", "nickname = '' admin = '' return '', False, ''", "value=\"%(continue_url)s\"/> </form> </body> </html> \"\"\" def _render_login_template(login_url, continue_url, email, admin):", "\"\"\"Gets the requestor's user info from an HTTP Cookie header.", "in writing, software # distributed under the License is distributed", "False otherwise. user_id: The user ID, if any. \"\"\" cookie_secret", "containing the cookie payload. \"\"\" if email: user_id_digest = hashlib.md5(email.lower()).digest()", "current_app = os.environ['APPLICATION_ID'] is_admin = current_app in admin_apps or CLOUD_ADMIN_MARKER", "(email, admin, user_id) where: email: The user's email address, if", "+ ['', '', '', ''])[:4] if email == '': nickname", "_login_response. email: The email address of the current user, if", "else: login_message = 'Not logged in' email = 'test\\x40example.com' admin_checked", "from app_dashboard_helper import AppDashboardHelper # URL of the login page", "import AppDashboardHelper # URL of the login page within the", "info. _COOKIE_NAME = 'dev_appserver_login' # Indicates that the user has", "response. \"\"\" if AppDashboardHelper.USE_SHIBBOLETH: redirect_url = '{0}:{1}/login?{2}={3}'.format( AppDashboardHelper.SHIBBOLETH_CONNECTOR, AppDashboardHelper.SHIBBOLETH_CONNECTOR_PORT, CONTINUE_PARAM,", "= _clear_user_info_cookie() if AppDashboardHelper.USE_SHIBBOLETH: redirect_url = AppDashboardHelper.SHIBBOLETH_LOGOUT_URL elif action.lower() ==", "= self.request.path_url if action: redirect_url = continue_url or login_url #", "admin; False otherwise. Returns: A string containing the contents of", "= self.request.get(CONTINUE_PARAM) login_url = self.request.path_url if action: redirect_url = continue_url", "logged in as an admin. action: What action to take", "name of the cookie that stores the user info. Returns:", "name=\"continue\" type=\"hidden\" value=\"%(continue_url)s\"/> </form> </body> </html> \"\"\" def _render_login_template(login_url, continue_url,", "# Name of the cookie that stores the user info.", "continue parameter to return to continue_url. The login_url should be", "strings containing the body of the HTTP response. \"\"\" if", "cookie_name) def _get_user_info_from_dict(cookie_dict, cookie_name=_COOKIE_NAME): \"\"\"Gets the requestor's user info from", "The parameter to _login_response. continue_url: The parameter to _login_response. email:", "logging them out. Args: cookie_name: The name of the cookie", "address of the current user, if any. admin: True if", "'true' continue_url = self.request.get(CONTINUE_PARAM) login_url = self.request.path_url if action: redirect_url", "to a user. This redirects to login_url with a continue", "cookie[cookie_name]['domain'] = AppDashboardHelper.\\ SHIBBOLETH_COOKIE_DOMAIN return cookie[cookie_name].OutputString() _LOGIN_TEMPLATE = \"\"\"<html> <head>", "Set-Cookie value for clearing the user info of the requestor.", "to set the user information for the requestor. Args: email:", "False otherwise. cookie_name: The name of the cookie that stores", "= { 'email': cgi.escape(email, quote=True), 'admin_checked': admin_checked, 'login_message': login_message, 'login_url':", "python # # Copyright 2007 Google Inc. # # Licensed", "and dealing with user cookies. Includes a WSGI application that", "set for the user. admin: True if the user should", "the requestor's user info from a cookie dictionary. Args: cookie_dict:", "for x in user_id_digest])[:20] else: user_id = '' return '%s:%s:%s'", "email, is_admin, nickname def _create_cookie_data(email, admin): \"\"\"Creates cookie payload data.", "id=\"submit-login\" /> <input name=\"action\" value=\"Logout\" type=\"submit\" id=\"submit-logout\" /> </p> </div>", "continue to after the user logs in. start_response: A WSGI", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "auto; text-align:left; padding: 0 2em 1.25em 2em; background-color: #d6e9f8; border:", "\"\"\" Generate the fake admin login secret Returns: A string", "cookie payload. \"\"\" if email: user_id_digest = hashlib.md5(email.lower()).digest() user_id =", "logout HTTP requests. It accepts these GET query parameters: continue:", "License, Version 2.0 (the \"License\"); # you may not use", "quote=True), 'continue_url': cgi.escape(continue_url, quote=True), } return _LOGIN_TEMPLATE % template_dict def", "# You may obtain a copy of the License at", "'', False, '' else: vhsh = sha.new(email+nickname+admin+cookie_secret).hexdigest() if hsh !=", "the requestor's user info from an HTTP Cookie header. Args:", "on the canonical front-end server, regardless of the host:port the", "continue_url, email, admin): \"\"\"Renders the login page. Args: login_url: The", "\"{0}:{1}/login\".format( AppDashboardHelper.SHIBBOLETH_CONNECTOR, DASHBOARD_HTTPS_PORT) else: appscale_login_url = \"https://{0}:{1}/login\".format( os.environ['NGINX_HOST'], DASHBOARD_HTTPS_PORT) redirect_url", "nickname, admin, hsh = (cookie_value.split(':') + ['', '', '', ''])[:4]", "= 'Not logged in' email = 'test\\x40example.com' admin_checked = 'checked'", "user_id: The user ID, if any. \"\"\" cookie_secret = os.environ['COOKIE_SECRET']", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "CONTINUE_PARAM, urllib.quote(continue_url)) start_response('302 Requires login', [('Location', redirect_url)]) return [] def", "cookie_value.replace(\"%2C\",\",\") email, nickname, admin, hsh = (cookie_value.split(':') + ['', '',", "of the HTTP response. \"\"\" if AppDashboardHelper.USE_SHIBBOLETH: redirect_url = '{0}:{1}/login?{2}={3}'.format(", "The user's email address. admin: True if the user is", "the requestor. \"\"\" cookie = Cookie.SimpleCookie() cookie[cookie_name] = '' cookie[cookie_name]['path']", "and set_email: self.response.headers['Set-Cookie'] = _set_user_info_cookie(set_email, set_admin) # URLs should be", "<title>Login</title> </head> <body> <form method=\"get\" action=\"%(login_url)s\" style=\"text-align:center; font: 13px sans-serif\">", "otherwise. Returns: A string containing the contents of the login", "current user information and a form for logging in and", "URL of the login page within the dev appserver. LOGIN_URL_RELATIVE", "email: The email to set for the user. admin: True", "cookie_value cookie[cookie_name]['path'] = '/' return cookie[cookie_name].OutputString() def _clear_user_info_cookie(cookie_name=_COOKIE_NAME): \"\"\"Clears the", "get_user_info(http_cookie, cookie_name=_COOKIE_NAME): \"\"\"Gets the requestor's user info from an HTTP", "id=\"admin\"/> <label for=\"admin\">Sign in as Administrator</label> </p> <p style=\"margin-left: 3em\">", "self.request.get(CONTINUE_PARAM) login_url = self.request.path_url if action: redirect_url = continue_url or", "= cookie_value.replace(\"%3A\",\":\") cookie_value = cookie_value.replace(\"%40\",'@') cookie_value = cookie_value.replace(\"%2C\",\",\") email, nickname,", "of the login page. \"\"\" if email: login_message = 'Logged", "login_url with a continue parameter to return to continue_url. The", "dev appserver domain (e.g., 'http://localhost:8080'). continue_url: The URL to continue", "tuple (email, admin, user_id) where: email: The user's email address,", "AppDashboardHelper.\\ SHIBBOLETH_COOKIE_DOMAIN return cookie[cookie_name].OutputString() _LOGIN_TEMPLATE = \"\"\"<html> <head> <title>Login</title> </head>", "to. Args: application_url: The URL of the dev appserver domain", "_clear_user_info_cookie() if AppDashboardHelper.USE_SHIBBOLETH: redirect_url = AppDashboardHelper.SHIBBOLETH_LOGOUT_URL elif action.lower() == LOGIN_ACTION.lower()", "the user info. Returns: A tuple (email, admin, user_id) where:", "text-align:left; padding: 0 2em 1.25em 2em; background-color: #d6e9f8; border: 2px", "def _render_login_template(login_url, continue_url, email, admin): \"\"\"Renders the login page. Args:", "information for the requestor. Args: email: The email to set", "continue_url = self.request.get(CONTINUE_PARAM) login_url = self.request.path_url if action: redirect_url =", "the License for the specific language governing permissions and #", "A dictionary mapping cookie names onto values. cookie_name: The name", "Administrator</label> </p> <p style=\"margin-left: 3em\"> <input name=\"action\" value=\"Login\" type=\"submit\" id=\"submit-login\"", "'Not logged in' email = 'test\\x40example.com' admin_checked = 'checked' if", "= dict((k, v.value) for k, v in cookie.iteritems()) return _get_user_info_from_dict(cookie_dict,", "False, '' cookie_dict = dict((k, v.value) for k, v in", "Apache License, Version 2.0 (the \"License\"); # you may not", "2em 1.25em 2em; background-color: #d6e9f8; border: 2px solid #67a7e3\"> <h3>%(login_message)s</h3>", "The URL to continue to after the user logs in.", "CONTINUE_PARAM = 'continue' _EMAIL_PARAM = 'email' _ADMIN_PARAM = 'admin' ACTION_PARAM", "address to set for the client. admin: If 'True', the", "either express or implied. # See the License for the", "cookie to set the user information for the requestor. Args:", "False otherwise. user_id: The user ID, if any. \"\"\" try:", "with a continue parameter to return to continue_url. The login_url", "in and out, supply no parameters. \"\"\" import cgi import", "cookie_value = cookie_value.replace(\"%3A\",\":\") cookie_value = cookie_value.replace(\"%40\",'@') cookie_value = cookie_value.replace(\"%2C\",\",\") email,", "return [] def fake_admin(): \"\"\" Generate the fake admin login", "all applications. CLOUD_ADMIN_MARKER = 'CLOUD_ADMIN' # The port that the", "out. Args: cookie_name: The name of the cookie that stores", "login page. Args: login_url: The parameter to _login_response. continue_url: The", "ASCII-only byte strings. if isinstance(redirect_url, unicode): redirect_url = redirect_url.encode('ascii') #", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "login_message = 'Not logged in' email = 'test\\x40example.com' admin_checked =", "name=\"email\" type=\"email\" value=\"%(email)s\" id=\"email\"/> </p> <p style=\"margin: .5em 0 0", "to login service URL' self.response.headers['Location'] = redirect_url application = webapp2.WSGIApplication([('/.*',", "Cookie header. Args: http_cookie: The value of the 'Cookie' HTTP", "admin login secret Returns: A string containing the fake login", "The port that the AppDashboard serves HTTPS traffic on. DASHBOARD_HTTPS_PORT", "info from a cookie dictionary. Args: cookie_dict: A dictionary mapping", "webapp2 app_dashboard_lib = '/../../../../../AppDashboard/lib' sys.path.append(os.path.dirname(__file__) + app_dashboard_lib) from app_dashboard_helper import", "HTTP request header. cookie_name: The name of the cookie that", "= (cookie_value.split(':') + ['', '', '', ''])[:4] if email ==", "if email == '': nickname = '' admin = ''", "redirects to login_url with a continue parameter to return to", "elif action.lower() == LOGIN_ACTION.lower() and set_email: self.response.headers['Set-Cookie'] = _set_user_info_cookie(set_email, set_admin)", "</form> </body> </html> \"\"\" def _render_login_template(login_url, continue_url, email, admin): \"\"\"Renders", "user. admin: True if the user should be admin; False", "302 self.response.status_message = 'Redirecting to login service URL' self.response.headers['Location'] =", "the user is currently an admin; False otherwise. Returns: A", "= 'Redirecting to login service URL' self.response.headers['Location'] = redirect_url application", "return cookie[cookie_name].OutputString() _LOGIN_TEMPLATE = \"\"\"<html> <head> <title>Login</title> </head> <body> <form", "the user info of the requestor. \"\"\" cookie_value = _create_cookie_data(email,", "string containing the fake login secret \"\"\" return hashlib.sha1('{}/{}'.format( os.environ.get('APPNAME',", "CLOUD_ADMIN_MARKER = 'CLOUD_ADMIN' # The port that the AppDashboard serves", "# \"\"\"Handles login/logout pages and dealing with user cookies. Includes", "email: Email address to set for the client. admin: If", "return '', False, '' admin_apps = admin.split(',') current_app = os.environ['APPLICATION_ID']", "from the requestor, logging them out. Args: cookie_name: The name", "host:port the user connected to. Args: application_url: The URL of", "os.environ['COOKIE_SECRET'] cookie_value = cookie_dict.get(cookie_name, '') cookie_value = cookie_value.replace(\"%3A\",\":\") cookie_value =", "the user information for the requestor. Args: email: The email", "urllib.quote(continue_url) ) else: hostname = os.environ['NGINX_HOST'] redirect_url = 'https://{0}:{1}/login?{2}={3}'.format( hostname,", "SHIBBOLETH_COOKIE_DOMAIN return cookie[cookie_name].OutputString() _LOGIN_TEMPLATE = \"\"\"<html> <head> <title>Login</title> </head> <body>", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "handler for the login and logout pages.\"\"\" def get(self): action", "# Perform the action. if action.lower() == LOGOUT_ACTION.lower(): self.response.headers['Set-Cookie'] =", "URLs should be ASCII-only byte strings. if isinstance(redirect_url, unicode): redirect_url", "secret Returns: A string containing the fake login secret \"\"\"", "for=\"admin\">Sign in as Administrator</label> </p> <p style=\"margin-left: 3em\"> <input name=\"action\"", "cookie dictionary. Args: cookie_dict: A dictionary mapping cookie names onto", "admin: True if the user is an admin; False otherwise.", "== '': nickname = '' admin = '' return '',", "\"\"\"<html> <head> <title>Login</title> </head> <body> <form method=\"get\" action=\"%(login_url)s\" style=\"text-align:center; font:", "the 'Cookie' HTTP request header. cookie_name: The name of the", "has an invalid cookie, so ignoring it.\".format(email)) return '', False,", "with user cookies. Includes a WSGI application that serves the", "serves HTTPS traffic on. DASHBOARD_HTTPS_PORT = \"1443\" def get_user_info(http_cookie, cookie_name=_COOKIE_NAME):", "'%s:%s:%s' % (email, admin, user_id) def _set_user_info_cookie(email, admin, cookie_name=_COOKIE_NAME): \"\"\"Creates", "= redirect_url.encode('ascii') # Redirect the user after performing the action.", "DASHBOARD_HTTPS_PORT) redirect_url = '{0}?{1}={2}'.format(appscale_login_url, CONTINUE_PARAM, continue_url) self.response.status = 302 self.response.status_message", "} return _LOGIN_TEMPLATE % template_dict def login_redirect(application_url, continue_url, start_response): \"\"\"Writes", "front-end server, regardless of the host:port the user connected to.", "= 'login' # Name of the cookie that stores the", "value for setting the user info of the requestor. \"\"\"", "an admin; False otherwise. Returns: A string containing the contents", "cookie_dict = dict((k, v.value) for k, v in cookie.iteritems()) return", "Returns: A string containing the fake login secret \"\"\" return", "under the License. # \"\"\"Handles login/logout pages and dealing with", "else: user_id = '' return '%s:%s:%s' % (email, admin, user_id)", "margin: 1em auto; text-align:left; padding: 0 2em 1.25em 2em; background-color:", "or login_url # Perform the action. if action.lower() == LOGOUT_ACTION.lower():", "email, nickname, admin, hsh = (cookie_value.split(':') + ['', '', '',", "login and logout HTTP requests. It accepts these GET query", "import Cookie import hashlib import logging import os import sha", "from an HTTP Cookie header. Args: http_cookie: The value of", "cookie_value.replace(\"%40\",'@') cookie_value = cookie_value.replace(\"%2C\",\",\") email, nickname, admin, hsh = (cookie_value.split(':')", "_create_cookie_data(email, admin) cookie = Cookie.SimpleCookie() cookie[cookie_name] = cookie_value cookie[cookie_name]['path'] =", "\"License\"); # you may not use this file except in", "user info cookie from the requestor, logging them out. Args:", "the user connected to. Args: application_url: The URL of the", "for setting the user info of the requestor. \"\"\" cookie_value", "continue_url or login_url # Perform the action. if action.lower() ==", "to log in before letting them view the # specified", "= 302 self.response.status_message = 'Redirecting to continue URL' self.response.headers['Location'] =", "border: 2px solid #67a7e3\"> <h3>%(login_message)s</h3> <p style=\"padding: 0; margin: 0\">", "str(uuid.uuid4())))).hexdigest() class Handler(webapp2.RequestHandler): \"\"\"The request handler for the login and", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "1em auto; text-align:left; padding: 0 2em 1.25em 2em; background-color: #d6e9f8;", "action = self.request.get(ACTION_PARAM) set_email = self.request.get(_EMAIL_PARAM) set_admin = self.request.get(_ADMIN_PARAM).lower() ==", "License. # \"\"\"Handles login/logout pages and dealing with user cookies.", "cookie = Cookie.SimpleCookie(http_cookie) except Cookie.CookieError: return '', False, '' cookie_dict", "cookie[cookie_name].OutputString() _LOGIN_TEMPLATE = \"\"\"<html> <head> <title>Login</title> </head> <body> <form method=\"get\"", "Perform the action. if action.lower() == LOGOUT_ACTION.lower(): self.response.headers['Set-Cookie'] = _clear_user_info_cookie()", "the cookie payload. \"\"\" if email: user_id_digest = hashlib.md5(email.lower()).digest() user_id", "= redirect_url else: # Send the user to the AppDashboard", "the requestor. \"\"\" cookie_value = _create_cookie_data(email, admin) cookie = Cookie.SimpleCookie()", "admin) cookie = Cookie.SimpleCookie() cookie[cookie_name] = cookie_value cookie[cookie_name]['path'] = '/'", "user_id = '1' + ''.join(['%02d' % ord(x) for x in", "<form method=\"get\" action=\"%(login_url)s\" style=\"text-align:center; font: 13px sans-serif\"> <div style=\"width: 20em;", "to set for the user. admin: True if the user", "value=\"Logout\" type=\"submit\" id=\"submit-logout\" /> </p> </div> <input name=\"continue\" type=\"hidden\" value=\"%(continue_url)s\"/>", "= 'checked' if admin else '' template_dict = { 'email':", "return cookie[cookie_name].OutputString() def _clear_user_info_cookie(cookie_name=_COOKIE_NAME): \"\"\"Clears the user info cookie from", "# distributed under the License is distributed on an \"AS", "= '/' cookie[cookie_name]['max-age'] = '0' if AppDashboardHelper.USE_SHIBBOLETH: cookie[cookie_name]['domain'] = AppDashboardHelper.\\", "continue_url. The login_url should be on the canonical front-end server,", "style=\"margin: .5em 0 0 3em; font-size:12px\"> <input name=\"admin\" type=\"checkbox\" value=\"True\"", "str(uuid.uuid4())), os.environ.get('COOKIE_SECRET', str(uuid.uuid4())))).hexdigest() class Handler(webapp2.RequestHandler): \"\"\"The request handler for the", "action: redirect_url = continue_url or login_url # Perform the action.", "URL' self.response.headers['Location'] = redirect_url else: # Send the user to", "# Unless required by applicable law or agreed to in", "header. Args: http_cookie: The value of the 'Cookie' HTTP request", "to set for the client. admin: If 'True', the client", "hostname = os.environ['NGINX_HOST'] redirect_url = 'https://{0}:{1}/login?{2}={3}'.format( hostname, DASHBOARD_HTTPS_PORT, CONTINUE_PARAM, urllib.quote(continue_url))", "admin, cookie_name=_COOKIE_NAME): \"\"\"Creates a cookie to set the user information", "current user, if any. admin: True if the user is", "user is currently an admin; False otherwise. Returns: A string", "user_id = '' return '%s:%s:%s' % (email, admin, user_id) def", "after a login or logout has completed. email: Email address", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "Send the user to the AppDashboard to log in before", "that stores the user info. _COOKIE_NAME = 'dev_appserver_login' # Indicates", "CONTINUE_PARAM, urllib.quote(continue_url) ) else: hostname = os.environ['NGINX_HOST'] redirect_url = 'https://{0}:{1}/login?{2}={3}'.format(", "solid #67a7e3\"> <h3>%(login_message)s</h3> <p style=\"padding: 0; margin: 0\"> <label for=\"email\"", "nickname def _create_cookie_data(email, admin): \"\"\"Creates cookie payload data. Args: email:", "otherwise. cookie_name: The name of the cookie that stores the", "for clearing the user info of the requestor. \"\"\" cookie", "The user's email address, if any. admin: True if the", "value=\"%(email)s\" id=\"email\"/> </p> <p style=\"margin: .5em 0 0 3em; font-size:12px\">", "You may obtain a copy of the License at #", "# # Copyright 2007 Google Inc. # # Licensed under", "user_id_digest = hashlib.md5(email.lower()).digest() user_id = '1' + ''.join(['%02d' % ord(x)", "user logs in. start_response: A WSGI start_response function. Returns: An", "requestor. \"\"\" cookie_value = _create_cookie_data(email, admin) cookie = Cookie.SimpleCookie() cookie[cookie_name]", "hashlib.sha1('{}/{}'.format( os.environ.get('APPNAME', str(uuid.uuid4())), os.environ.get('COOKIE_SECRET', str(uuid.uuid4())))).hexdigest() class Handler(webapp2.RequestHandler): \"\"\"The request handler", "template_dict def login_redirect(application_url, continue_url, start_response): \"\"\"Writes a login redirection URL", "2007 Google Inc. # # Licensed under the Apache License,", "('Login' or 'Logout'). To view the current user information and", "(email, admin, user_id) def _set_user_info_cookie(email, admin, cookie_name=_COOKIE_NAME): \"\"\"Creates a cookie", "'CLOUD_ADMIN' # The port that the AppDashboard serves HTTPS traffic", "True if the user is an admin; False otherwise. Returns:", "the login page and handles login and logout HTTP requests.", "admin, user_id) where: email: The user's email address, if any.", "_set_user_info_cookie(email, admin, cookie_name=_COOKIE_NAME): \"\"\"Creates a cookie to set the user", "page. Args: login_url: The parameter to _login_response. continue_url: The parameter", "(empty) iterable over strings containing the body of the HTTP", "the Apache License, Version 2.0 (the \"License\"); # you may", "otherwise. user_id: The user ID, if any. \"\"\" cookie_secret =", "the user logs in. start_response: A WSGI start_response function. Returns:", "access to all applications. CLOUD_ADMIN_MARKER = 'CLOUD_ADMIN' # The port", "info from an HTTP Cookie header. Args: http_cookie: The value", "redirect_url = 'https://{0}:{1}/login?{2}={3}'.format( hostname, DASHBOARD_HTTPS_PORT, CONTINUE_PARAM, urllib.quote(continue_url)) start_response('302 Requires login',", "if AppDashboardHelper.USE_SHIBBOLETH: redirect_url = AppDashboardHelper.SHIBBOLETH_LOGOUT_URL elif action.lower() == LOGIN_ACTION.lower() and", "action: What action to take ('Login' or 'Logout'). To view", "#!/usr/bin/env python # # Copyright 2007 Google Inc. # #", "if hsh != vhsh: logging.info(\"{0} has an invalid cookie, so", "# specified URL. if AppDashboardHelper.USE_SHIBBOLETH: appscale_login_url = \"{0}:{1}/login\".format( AppDashboardHelper.SHIBBOLETH_CONNECTOR, DASHBOARD_HTTPS_PORT)", "GET query parameters: continue: URL to redirect to after a", "# limitations under the License. # \"\"\"Handles login/logout pages and", "import sha import sys import urllib import uuid import webapp2", "\"\"\"Clears the user info cookie from the requestor, logging them" ]
[ "endpoint, timeout=None): self._endpoint = endpoint self._timeout = timeout def start(self):", "be aware that they run Docker-on-Mac # to connect against", "import tempfile import threading import grpc from apache_beam.portability.api import beam_job_api_pb2_grpc", "self._timeout = timeout def start(self): channel = grpc.insecure_channel(self._endpoint) grpc.channel_ready_future(channel).result(timeout=self._timeout) return", "connect against the internal Docker-for-Mac address. cmd += [\"-e\", \"DOCKER_MAC_CONTAINER=1\"]", "be set for MacOS because it detroys port forwardings, #", "the License. # from __future__ import absolute_import import atexit import", "for local execution. \"\"\" def __init__(self, job_host=\"localhost\", job_port=None, artifact_port=None, expansion_port=None,", "2.0 # (the \"License\"); you may not use this file", "from __future__ import absolute_import import atexit import os import shutil", "import threading import grpc from apache_beam.portability.api import beam_job_api_pb2_grpc from apache_beam.runners.portability", "JobServer such that its stop will automatically be called on", "to connect against the internal Docker-for-Mac address. cmd += [\"-e\",", "endpoint self._timeout = timeout def start(self): channel = grpc.insecure_channel(self._endpoint) grpc.channel_ready_future(channel).result(timeout=self._timeout)", "because it detroys port forwardings, # even though host networking", "shutil import signal import subprocess import sys import tempfile import", "NotImplementedError(type(self)) def stop(self): \"\"\"Stops this job server.\"\"\" raise NotImplementedError(type(self)) class", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "artifacts_dir)), 'localhost:%s' % job_port) class DockerizedJobServer(SubprocessJobServer): \"\"\" Spins up the", "self._local_temp_root = tempfile.mkdtemp(prefix='beam-temp') cmd, endpoint = self.subprocess_cmd_and_endpoint() port = int(endpoint.split(':')[-1])", "return subprocess_server.JavaJarServer.local_jar(url) def subprocess_cmd_and_endpoint(self): jar_path = self.local_jar(self.path_to_jar()) artifacts_dir = self.local_temp_dir(prefix='artifacts')", "the SDK harness. \"-v\", ':'.join([docker_path, \"/bin/docker\"]), \"-v\", \"/var/run/docker.sock:/var/run/docker.sock\"] self.job_port, self.artifact_port,", "def __init__(self): self._local_temp_root = None self._server = None def subprocess_cmd_and_endpoint(self):", "import beam_job_api_pb2_grpc from apache_beam.runners.portability import local_job_service from apache_beam.utils import subprocess_server", "apache_beam.version import __version__ as beam_version class JobServer(object): def start(self): \"\"\"Starts", "JobServer, returning a grpc service to which to submit jobs.", "a docker container for local execution. \"\"\" def __init__(self, job_host=\"localhost\",", "it. # Also, all other containers need to be aware", "local_job_service from apache_beam.utils import subprocess_server from apache_beam.version import __version__ as", "containers for the SDK harness. \"-v\", ':'.join([docker_path, \"/bin/docker\"]), \"-v\", \"/var/run/docker.sock:/var/run/docker.sock\"]", "subprocess_server.pick_port( self.job_port, self.artifact_port, self.expansion_port)) args = ['--job-host', self.job_host, '--job-port', str(self.job_port),", "that they run Docker-on-Mac # to connect against the internal", "\"{}:{}\".format(self.job_port, self.job_port)] cmd += [\"-p\", \"{}:{}\".format(self.artifact_port, self.artifact_port)] cmd += [\"-p\",", "job_host=\"localhost\", job_port=None, artifact_port=None, expansion_port=None, harness_port_range=(8100, 8200), max_connection_retries=5): super(DockerizedJobServer, self).__init__() self.job_host", "job_server_image_name = os.environ['USER'] + \\ \"-docker-apache.bintray.io/beam/flink-job-server:latest\" docker_path = subprocess.check_output( ['which',", "harness_port_range=(8100, 8200), max_connection_retries=5): super(DockerizedJobServer, self).__init__() self.job_host = job_host self.job_port =", "def stop(self): with self._lock: if self._started: self._job_server.stop() self._started = False", "raise NotImplementedError(type(self)) def path_to_jar(self): raise NotImplementedError(type(self)) @staticmethod def path_to_beam_jar(gradle_target): return", "os.environ['USER'] + \\ \"-docker-apache.bintray.io/beam/flink-job-server:latest\" docker_path = subprocess.check_output( ['which', 'docker']).strip().decode('utf-8') cmd", "job_host self.job_port = job_port self.expansion_port = expansion_port self.artifact_port = artifact_port", "\"{}:{}\".format(self.expansion_port, self.expansion_port)] cmd += [\"-p\", \"{0}-{1}:{0}-{1}\".format( self.harness_port_range[0], self.harness_port_range[1])] else: #", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "spin up # \"sibling\" containers for the SDK harness. \"-v\",", "self._local_temp_root = None self._server = None def subprocess_cmd_and_endpoint(self): raise NotImplementedError(type(self))", "self._server.start() def stop(self): if self._local_temp_root: shutil.rmtree(self._local_temp_root) self._local_temp_root = None return", "[\"docker\", \"run\", # We mount the docker binary and socket", "return subprocess_server.JavaJarServer.path_to_beam_jar(gradle_target) @staticmethod def local_jar(url): return subprocess_server.JavaJarServer.local_jar(url) def subprocess_cmd_and_endpoint(self): jar_path", "self._server is None: self._local_temp_root = tempfile.mkdtemp(prefix='beam-temp') cmd, endpoint = self.subprocess_cmd_and_endpoint()", "connect to it. # Also, all other containers need to", "under the License is distributed on an \"AS IS\" BASIS,", "License for the specific language governing permissions and # limitations", "distributed with # this work for additional information regarding copyright", "# Docker-for-Mac doesn't support host networking, so we need to", "information regarding copyright ownership. # The ASF licenses this file", "shouldn't be set for MacOS because it detroys port forwardings,", "return self._endpoint def stop(self): with self._lock: if self._started: self._job_server.stop() self._started", "class JobServer(object): def start(self): \"\"\"Starts this JobServer, returning a grpc", "from the Docker container to be able to connect to", "set for MacOS because it detroys port forwardings, # even", "absolute_import import atexit import os import shutil import signal import", "with self._lock: if not self._started: self._endpoint = self._job_server.start() self._started =", "and # limitations under the License. # from __future__ import", "to be able to connect to it. # Also, all", "other containers need to be aware that they run Docker-on-Mac", "\"-docker-apache.bintray.io/beam/flink-job-server:latest\" docker_path = subprocess.check_output( ['which', 'docker']).strip().decode('utf-8') cmd = [\"docker\", \"run\",", "ports from the Docker container to be able to connect", "for JobServers run as an external process.\"\"\" def __init__(self): self._local_temp_root", "self._started = False class SubprocessJobServer(JobServer): \"\"\"An abstract base class for", "stop(self): with self._lock: if self._started: self._job_server.stop() self._started = False class", "ownership. # The ASF licenses this file to You under", "args = ['--job-host', self.job_host, '--job-port', str(self.job_port), '--artifact-port', str(self.artifact_port), '--expansion-port', str(self.expansion_port)]", "Also, all other containers need to be aware that they", "software # distributed under the License is distributed on an", "with # this work for additional information regarding copyright ownership.", "= grpc.insecure_channel(self._endpoint) grpc.channel_ready_future(channel).result(timeout=self._timeout) return beam_job_api_pb2_grpc.JobServiceStub(channel) def stop(self): pass class EmbeddedJobServer(JobServer):", "stop(self): \"\"\"Stops this job server.\"\"\" raise NotImplementedError(type(self)) class ExternalJobServer(JobServer): def", "binary and socket to be able to spin up #", "such that its stop will automatically be called on exit.", "abstract base class for JobServers run as an external process.\"\"\"", "beam_job_api_pb2_grpc.JobServiceStub, cmd, port=port) return self._server.start() def stop(self): if self._local_temp_root: shutil.rmtree(self._local_temp_root)", "cmd.append(\"--network=host\") cmd.append(job_server_image_name) return cmd + args, '%s:%s' % (self.job_host, self.job_port)", "compliance with # the License. You may obtain a copy", "channel = grpc.insecure_channel(self._endpoint) grpc.channel_ready_future(channel).result(timeout=self._timeout) return beam_job_api_pb2_grpc.JobServiceStub(channel) def stop(self): pass class", "licenses this file to You under the Apache License, Version", "def subprocess_cmd_and_endpoint(self): # TODO This is hardcoded to Flink at", "if self._local_temp_root: shutil.rmtree(self._local_temp_root) self._local_temp_root = None return self._server.stop() def local_temp_dir(self,", "License. # from __future__ import absolute_import import atexit import os", "self._started = False def start(self): with self._lock: if not self._started:", "able to spin up # \"sibling\" containers for the SDK", "# This shouldn't be set for MacOS because it detroys", "# The ASF licenses this file to You under the", "def local_jar(url): return subprocess_server.JavaJarServer.local_jar(url) def subprocess_cmd_and_endpoint(self): jar_path = self.local_jar(self.path_to_jar()) artifacts_dir", "container to be able to connect to it. # Also,", "subprocess import sys import tempfile import threading import grpc from", "TODO This is hardcoded to Flink at the moment but", "if sys.platform == \"darwin\": # Docker-for-Mac doesn't support host networking,", "\"\"\"Stops this job server.\"\"\" raise NotImplementedError(type(self)) class ExternalJobServer(JobServer): def __init__(self,", "JobServer in a docker container for local execution. \"\"\" def", "subprocess.check_output( ['which', 'docker']).strip().decode('utf-8') cmd = [\"docker\", \"run\", # We mount", "This is hardcoded to Flink at the moment but should", "@staticmethod def local_jar(url): return subprocess_server.JavaJarServer.local_jar(url) def subprocess_cmd_and_endpoint(self): jar_path = self.local_jar(self.path_to_jar())", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "self._endpoint = self._job_server.start() self._started = True atexit.register(self.stop) signal.signal(signal.SIGINT, self.stop) return", "ANY KIND, either express or implied. # See the License", "(ASF) under one or more # contributor license agreements. See", "use this file except in compliance with # the License.", "See the License for the specific language governing permissions and", "cmd, port=port) return self._server.start() def stop(self): if self._local_temp_root: shutil.rmtree(self._local_temp_root) self._local_temp_root", "= None self._server = None def subprocess_cmd_and_endpoint(self): raise NotImplementedError(type(self)) def", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "import subprocess import sys import tempfile import threading import grpc", "to in writing, software # distributed under the License is", "changed job_server_image_name = os.environ['USER'] + \\ \"-docker-apache.bintray.io/beam/flink-job-server:latest\" docker_path = subprocess.check_output(", "self._job_server.stop() self._started = False class SubprocessJobServer(JobServer): \"\"\"An abstract base class", "# See the License for the specific language governing permissions", "__init__(self): self._local_temp_root = None self._server = None def subprocess_cmd_and_endpoint(self): raise", "= False def start(self): with self._lock: if not self._started: self._endpoint", "# from __future__ import absolute_import import atexit import os import", "even though host networking is not supported on MacOS. cmd.append(\"--network=host\")", "This shouldn't be set for MacOS because it detroys port", "language governing permissions and # limitations under the License. #", "additional information regarding copyright ownership. # The ASF licenses this", "import sys import tempfile import threading import grpc from apache_beam.portability.api", "or agreed to in writing, software # distributed under the", "atexit.register(self.stop) signal.signal(signal.SIGINT, self.stop) return self._endpoint def stop(self): with self._lock: if", "container for local execution. \"\"\" def __init__(self, job_host=\"localhost\", job_port=None, artifact_port=None,", "required by applicable law or agreed to in writing, software", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "subprocess_server.JavaJarServer.local_jar(url) def subprocess_cmd_and_endpoint(self): jar_path = self.local_jar(self.path_to_jar()) artifacts_dir = self.local_temp_dir(prefix='artifacts') job_port,", "def __init__(self, job_host=\"localhost\", job_port=None, artifact_port=None, expansion_port=None, harness_port_range=(8100, 8200), max_connection_retries=5): super(DockerizedJobServer,", "copyright ownership. # The ASF licenses this file to You", "return self._server.start() def stop(self): if self._local_temp_root: shutil.rmtree(self._local_temp_root) self._local_temp_root = None", "though host networking is not supported on MacOS. cmd.append(\"--network=host\") cmd.append(job_server_image_name)", "mount the docker binary and socket to be able to", "License, Version 2.0 # (the \"License\"); you may not use", "threading.Lock() self._job_server = job_server self._started = False def start(self): with", "= job_server self._started = False def start(self): with self._lock: if", "not supported on MacOS. cmd.append(\"--network=host\") cmd.append(job_server_image_name) return cmd + args,", "def subprocess_cmd_and_endpoint(self): raise NotImplementedError(type(self)) def start(self): if self._server is None:", "may not use this file except in compliance with #", "SubprocessJobServer(JobServer): \"\"\"An abstract base class for JobServers run as an", "is hardcoded to Flink at the moment but should be", "publish ports from the Docker container to be able to", "agreed to in writing, software # distributed under the License", "execution. \"\"\" def __init__(self, job_host=\"localhost\", job_port=None, artifact_port=None, expansion_port=None, harness_port_range=(8100, 8200),", "to the Apache Software Foundation (ASF) under one or more", "raise NotImplementedError(type(self)) def start(self): if self._server is None: self._local_temp_root =", "distributed under the License is distributed on an \"AS IS\"", "class for JobServers run as an external process.\"\"\" def __init__(self):", "raise NotImplementedError(type(self)) def stop(self): \"\"\"Stops this job server.\"\"\" raise NotImplementedError(type(self))", "def path_to_jar(self): raise NotImplementedError(type(self)) @staticmethod def path_to_beam_jar(gradle_target): return subprocess_server.JavaJarServer.path_to_beam_jar(gradle_target) @staticmethod", "detroys port forwardings, # even though host networking is not", "job_port, artifacts_dir): raise NotImplementedError(type(self)) def path_to_jar(self): raise NotImplementedError(type(self)) @staticmethod def", "path_to_jar(self): raise NotImplementedError(type(self)) @staticmethod def path_to_beam_jar(gradle_target): return subprocess_server.JavaJarServer.path_to_beam_jar(gradle_target) @staticmethod def", "if not self._started: self._endpoint = self._job_server.start() self._started = True atexit.register(self.stop)", "on exit. \"\"\" def __init__(self, job_server): self._lock = threading.Lock() self._job_server", "under the Apache License, Version 2.0 # (the \"License\"); you", "# the License. You may obtain a copy of the", "express or implied. # See the License for the specific", "jar_path = self.local_jar(self.path_to_jar()) artifacts_dir = self.local_temp_dir(prefix='artifacts') job_port, = subprocess_server.pick_port(None) return", "this work for additional information regarding copyright ownership. # The", "Licensed to the Apache Software Foundation (ASF) under one or", "job_port) class DockerizedJobServer(SubprocessJobServer): \"\"\" Spins up the JobServer in a", "run Docker-on-Mac # to connect against the internal Docker-for-Mac address.", "+= [\"-e\", \"DOCKER_MAC_CONTAINER=1\"] cmd += [\"-p\", \"{}:{}\".format(self.job_port, self.job_port)] cmd +=", "self._endpoint = endpoint self._timeout = timeout def start(self): channel =", "permissions and # limitations under the License. # from __future__", "an external process.\"\"\" def __init__(self): self._local_temp_root = None self._server =", "the JobServer in a docker container for local execution. \"\"\"", "writing, software # distributed under the License is distributed on", "you may not use this file except in compliance with", "[\"-p\", \"{}:{}\".format(self.expansion_port, self.expansion_port)] cmd += [\"-p\", \"{0}-{1}:{0}-{1}\".format( self.harness_port_range[0], self.harness_port_range[1])] else:", "subprocess_cmd_and_endpoint(self): raise NotImplementedError(type(self)) def start(self): if self._server is None: self._local_temp_root", "+ list( self.java_arguments(job_port, artifacts_dir)), 'localhost:%s' % job_port) class DockerizedJobServer(SubprocessJobServer): \"\"\"", "= harness_port_range self.max_connection_retries = max_connection_retries def subprocess_cmd_and_endpoint(self): # TODO This", "supported on MacOS. cmd.append(\"--network=host\") cmd.append(job_server_image_name) return cmd + args, '%s:%s'", "= False class SubprocessJobServer(JobServer): \"\"\"An abstract base class for JobServers", "the License. You may obtain a copy of the License", "def stop(self): pass class EmbeddedJobServer(JobServer): def start(self): return local_job_service.LocalJobServicer() def", "its stop will automatically be called on exit. \"\"\" def", "timeout=None): self._endpoint = endpoint self._timeout = timeout def start(self): channel", "expansion_port=None, harness_port_range=(8100, 8200), max_connection_retries=5): super(DockerizedJobServer, self).__init__() self.job_host = job_host self.job_port", "CONDITIONS OF ANY KIND, either express or implied. # See", "Version 2.0 # (the \"License\"); you may not use this", "__version__ as beam_version class JobServer(object): def start(self): \"\"\"Starts this JobServer,", "import signal import subprocess import sys import tempfile import threading", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "to which to submit jobs. \"\"\" raise NotImplementedError(type(self)) def stop(self):", "to explictly # publish ports from the Docker container to", "MacOS. cmd.append(\"--network=host\") cmd.append(job_server_image_name) return cmd + args, '%s:%s' % (self.job_host,", "def java_arguments(self, job_port, artifacts_dir): raise NotImplementedError(type(self)) def path_to_jar(self): raise NotImplementedError(type(self))", "self._server.stop() def local_temp_dir(self, **kwargs): return tempfile.mkdtemp(dir=self._local_temp_root, **kwargs) class JavaJarJobServer(SubprocessJobServer): MAVEN_REPOSITORY", "limitations under the License. # from __future__ import absolute_import import", "self._server = None def subprocess_cmd_and_endpoint(self): raise NotImplementedError(type(self)) def start(self): if", "os import shutil import signal import subprocess import sys import", "tempfile.mkdtemp(prefix='beam-temp') cmd, endpoint = self.subprocess_cmd_and_endpoint() port = int(endpoint.split(':')[-1]) self._server =", "port=port) return self._server.start() def stop(self): if self._local_temp_root: shutil.rmtree(self._local_temp_root) self._local_temp_root =", "= expansion_port self.artifact_port = artifact_port self.harness_port_range = harness_port_range self.max_connection_retries =", "subprocess_server.JavaJarServer.path_to_beam_jar(gradle_target) @staticmethod def local_jar(url): return subprocess_server.JavaJarServer.local_jar(url) def subprocess_cmd_and_endpoint(self): jar_path =", "[\"-p\", \"{}:{}\".format(self.job_port, self.job_port)] cmd += [\"-p\", \"{}:{}\".format(self.artifact_port, self.artifact_port)] cmd +=", "grpc from apache_beam.portability.api import beam_job_api_pb2_grpc from apache_beam.runners.portability import local_job_service from", "server.\"\"\" raise NotImplementedError(type(self)) class ExternalJobServer(JobServer): def __init__(self, endpoint, timeout=None): self._endpoint", "DockerizedJobServer(SubprocessJobServer): \"\"\" Spins up the JobServer in a docker container", "to be able to spin up # \"sibling\" containers for", "the moment but should be changed job_server_image_name = os.environ['USER'] +", "artifacts_dir = self.local_temp_dir(prefix='artifacts') job_port, = subprocess_server.pick_port(None) return ( ['java', '-jar',", "self.harness_port_range = harness_port_range self.max_connection_retries = max_connection_retries def subprocess_cmd_and_endpoint(self): # TODO", "self.java_arguments(job_port, artifacts_dir)), 'localhost:%s' % job_port) class DockerizedJobServer(SubprocessJobServer): \"\"\" Spins up", "(the \"License\"); you may not use this file except in", "port forwardings, # even though host networking is not supported", "= artifact_port self.harness_port_range = harness_port_range self.max_connection_retries = max_connection_retries def subprocess_cmd_and_endpoint(self):", "OR CONDITIONS OF ANY KIND, either express or implied. #", "else: # This shouldn't be set for MacOS because it", "self).__init__() self.job_host = job_host self.job_port = job_port self.expansion_port = expansion_port", "\"\"\" def __init__(self, job_server): self._lock = threading.Lock() self._job_server = job_server", "return tempfile.mkdtemp(dir=self._local_temp_root, **kwargs) class JavaJarJobServer(SubprocessJobServer): MAVEN_REPOSITORY = 'https://repo.maven.apache.org/maven2/org/apache/beam' JAR_CACHE =", "'--expansion-port', str(self.expansion_port)] if sys.platform == \"darwin\": # Docker-for-Mac doesn't support", "the License is distributed on an \"AS IS\" BASIS, #", "self.artifact_port)] cmd += [\"-p\", \"{}:{}\".format(self.expansion_port, self.expansion_port)] cmd += [\"-p\", \"{0}-{1}:{0}-{1}\".format(", "def stop(self): if self._local_temp_root: shutil.rmtree(self._local_temp_root) self._local_temp_root = None return self._server.stop()", "cmd += [\"-e\", \"DOCKER_MAC_CONTAINER=1\"] cmd += [\"-p\", \"{}:{}\".format(self.job_port, self.job_port)] cmd", "self.stop) return self._endpoint def stop(self): with self._lock: if self._started: self._job_server.stop()", "class ExternalJobServer(JobServer): def __init__(self, endpoint, timeout=None): self._endpoint = endpoint self._timeout", "under the License. # from __future__ import absolute_import import atexit", "\"\"\"Starts this JobServer, returning a grpc service to which to", "= subprocess_server.SubprocessServer( beam_job_api_pb2_grpc.JobServiceStub, cmd, port=port) return self._server.start() def stop(self): if", "support host networking, so we need to explictly # publish", "\"darwin\": # Docker-for-Mac doesn't support host networking, so we need", "+= [\"-p\", \"{}:{}\".format(self.artifact_port, self.artifact_port)] cmd += [\"-p\", \"{}:{}\".format(self.expansion_port, self.expansion_port)] cmd", "self.job_port)] cmd += [\"-p\", \"{}:{}\".format(self.artifact_port, self.artifact_port)] cmd += [\"-p\", \"{}:{}\".format(self.expansion_port,", "= self.local_temp_dir(prefix='artifacts') job_port, = subprocess_server.pick_port(None) return ( ['java', '-jar', jar_path]", "JavaJarJobServer(SubprocessJobServer): MAVEN_REPOSITORY = 'https://repo.maven.apache.org/maven2/org/apache/beam' JAR_CACHE = os.path.expanduser(\"~/.apache_beam/cache\") def java_arguments(self, job_port,", "self.job_host, '--job-port', str(self.job_port), '--artifact-port', str(self.artifact_port), '--expansion-port', str(self.expansion_port)] if sys.platform ==", "more # contributor license agreements. See the NOTICE file distributed", "def __init__(self, endpoint, timeout=None): self._endpoint = endpoint self._timeout = timeout", "start(self): channel = grpc.insecure_channel(self._endpoint) grpc.channel_ready_future(channel).result(timeout=self._timeout) return beam_job_api_pb2_grpc.JobServiceStub(channel) def stop(self): pass", "law or agreed to in writing, software # distributed under", "start(self): with self._lock: if not self._started: self._endpoint = self._job_server.start() self._started", "self._local_temp_root: shutil.rmtree(self._local_temp_root) self._local_temp_root = None return self._server.stop() def local_temp_dir(self, **kwargs):", "be called on exit. \"\"\" def __init__(self, job_server): self._lock =", "Apache Software Foundation (ASF) under one or more # contributor", "host networking is not supported on MacOS. cmd.append(\"--network=host\") cmd.append(job_server_image_name) return", "import shutil import signal import subprocess import sys import tempfile", "'localhost:%s' % job_port) class DockerizedJobServer(SubprocessJobServer): \"\"\" Spins up the JobServer", "automatically be called on exit. \"\"\" def __init__(self, job_server): self._lock", "self.artifact_port = artifact_port self.harness_port_range = harness_port_range self.max_connection_retries = max_connection_retries def", "JobServer(object): def start(self): \"\"\"Starts this JobServer, returning a grpc service", "\"DOCKER_MAC_CONTAINER=1\"] cmd += [\"-p\", \"{}:{}\".format(self.job_port, self.job_port)] cmd += [\"-p\", \"{}:{}\".format(self.artifact_port,", "% job_port) class DockerizedJobServer(SubprocessJobServer): \"\"\" Spins up the JobServer in", "str(self.expansion_port)] if sys.platform == \"darwin\": # Docker-for-Mac doesn't support host", "def stop(self): pass class StopOnExitJobServer(JobServer): \"\"\"Wraps a JobServer such that", "local_jar(url): return subprocess_server.JavaJarServer.local_jar(url) def subprocess_cmd_and_endpoint(self): jar_path = self.local_jar(self.path_to_jar()) artifacts_dir =", "['java', '-jar', jar_path] + list( self.java_arguments(job_port, artifacts_dir)), 'localhost:%s' % job_port)", "= ( subprocess_server.pick_port( self.job_port, self.artifact_port, self.expansion_port)) args = ['--job-host', self.job_host,", "the NOTICE file distributed with # this work for additional", "int(endpoint.split(':')[-1]) self._server = subprocess_server.SubprocessServer( beam_job_api_pb2_grpc.JobServiceStub, cmd, port=port) return self._server.start() def", "\"{0}-{1}:{0}-{1}\".format( self.harness_port_range[0], self.harness_port_range[1])] else: # This shouldn't be set for", "grpc.channel_ready_future(channel).result(timeout=self._timeout) return beam_job_api_pb2_grpc.JobServiceStub(channel) def stop(self): pass class EmbeddedJobServer(JobServer): def start(self):", "= None def subprocess_cmd_and_endpoint(self): raise NotImplementedError(type(self)) def start(self): if self._server", "False class SubprocessJobServer(JobServer): \"\"\"An abstract base class for JobServers run", "may obtain a copy of the License at # #", "cmd += [\"-p\", \"{}:{}\".format(self.artifact_port, self.artifact_port)] cmd += [\"-p\", \"{}:{}\".format(self.expansion_port, self.expansion_port)]", "the Apache License, Version 2.0 # (the \"License\"); you may", "None return self._server.stop() def local_temp_dir(self, **kwargs): return tempfile.mkdtemp(dir=self._local_temp_root, **kwargs) class", "def start(self): with self._lock: if not self._started: self._endpoint = self._job_server.start()", "job_server): self._lock = threading.Lock() self._job_server = job_server self._started = False", "# (the \"License\"); you may not use this file except", "apache_beam.runners.portability import local_job_service from apache_beam.utils import subprocess_server from apache_beam.version import", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "import local_job_service from apache_beam.utils import subprocess_server from apache_beam.version import __version__", "service to which to submit jobs. \"\"\" raise NotImplementedError(type(self)) def", "( ['java', '-jar', jar_path] + list( self.java_arguments(job_port, artifacts_dir)), 'localhost:%s' %", "self.job_port = job_port self.expansion_port = expansion_port self.artifact_port = artifact_port self.harness_port_range", "self._started: self._job_server.stop() self._started = False class SubprocessJobServer(JobServer): \"\"\"An abstract base", "+ \\ \"-docker-apache.bintray.io/beam/flink-job-server:latest\" docker_path = subprocess.check_output( ['which', 'docker']).strip().decode('utf-8') cmd =", "# Licensed to the Apache Software Foundation (ASF) under one", "this job server.\"\"\" raise NotImplementedError(type(self)) class ExternalJobServer(JobServer): def __init__(self, endpoint,", "forwardings, # even though host networking is not supported on", "to spin up # \"sibling\" containers for the SDK harness.", "= self.subprocess_cmd_and_endpoint() port = int(endpoint.split(':')[-1]) self._server = subprocess_server.SubprocessServer( beam_job_api_pb2_grpc.JobServiceStub, cmd,", "in compliance with # the License. You may obtain a", "os.path.expanduser(\"~/.apache_beam/cache\") def java_arguments(self, job_port, artifacts_dir): raise NotImplementedError(type(self)) def path_to_jar(self): raise", "networking, so we need to explictly # publish ports from", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "= job_host self.job_port = job_port self.expansion_port = expansion_port self.artifact_port =", "stop(self): if self._local_temp_root: shutil.rmtree(self._local_temp_root) self._local_temp_root = None return self._server.stop() def", "license agreements. See the NOTICE file distributed with # this", "from apache_beam.portability.api import beam_job_api_pb2_grpc from apache_beam.runners.portability import local_job_service from apache_beam.utils", "if self._server is None: self._local_temp_root = tempfile.mkdtemp(prefix='beam-temp') cmd, endpoint =", "None def subprocess_cmd_and_endpoint(self): raise NotImplementedError(type(self)) def start(self): if self._server is", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "which to submit jobs. \"\"\" raise NotImplementedError(type(self)) def stop(self): \"\"\"Stops", "contributor license agreements. See the NOTICE file distributed with #", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "stop will automatically be called on exit. \"\"\" def __init__(self,", "= timeout def start(self): channel = grpc.insecure_channel(self._endpoint) grpc.channel_ready_future(channel).result(timeout=self._timeout) return beam_job_api_pb2_grpc.JobServiceStub(channel)", "for the SDK harness. \"-v\", ':'.join([docker_path, \"/bin/docker\"]), \"-v\", \"/var/run/docker.sock:/var/run/docker.sock\"] self.job_port,", "NotImplementedError(type(self)) class ExternalJobServer(JobServer): def __init__(self, endpoint, timeout=None): self._endpoint = endpoint", "docker binary and socket to be able to spin up", "class JavaJarJobServer(SubprocessJobServer): MAVEN_REPOSITORY = 'https://repo.maven.apache.org/maven2/org/apache/beam' JAR_CACHE = os.path.expanduser(\"~/.apache_beam/cache\") def java_arguments(self,", "**kwargs) class JavaJarJobServer(SubprocessJobServer): MAVEN_REPOSITORY = 'https://repo.maven.apache.org/maven2/org/apache/beam' JAR_CACHE = os.path.expanduser(\"~/.apache_beam/cache\") def", "= 'https://repo.maven.apache.org/maven2/org/apache/beam' JAR_CACHE = os.path.expanduser(\"~/.apache_beam/cache\") def java_arguments(self, job_port, artifacts_dir): raise", "\"\"\"Wraps a JobServer such that its stop will automatically be", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "self.subprocess_cmd_and_endpoint() port = int(endpoint.split(':')[-1]) self._server = subprocess_server.SubprocessServer( beam_job_api_pb2_grpc.JobServiceStub, cmd, port=port)", "from apache_beam.utils import subprocess_server from apache_beam.version import __version__ as beam_version", "'https://repo.maven.apache.org/maven2/org/apache/beam' JAR_CACHE = os.path.expanduser(\"~/.apache_beam/cache\") def java_arguments(self, job_port, artifacts_dir): raise NotImplementedError(type(self))", "import __version__ as beam_version class JobServer(object): def start(self): \"\"\"Starts this", "__init__(self, endpoint, timeout=None): self._endpoint = endpoint self._timeout = timeout def", "file except in compliance with # the License. You may", "apache_beam.portability.api import beam_job_api_pb2_grpc from apache_beam.runners.portability import local_job_service from apache_beam.utils import", "\"-v\", \"/var/run/docker.sock:/var/run/docker.sock\"] self.job_port, self.artifact_port, self.expansion_port = ( subprocess_server.pick_port( self.job_port, self.artifact_port,", "import os import shutil import signal import subprocess import sys", "this file except in compliance with # the License. You", "java_arguments(self, job_port, artifacts_dir): raise NotImplementedError(type(self)) def path_to_jar(self): raise NotImplementedError(type(self)) @staticmethod", "and socket to be able to spin up # \"sibling\"", "as an external process.\"\"\" def __init__(self): self._local_temp_root = None self._server", "['which', 'docker']).strip().decode('utf-8') cmd = [\"docker\", \"run\", # We mount the", "SDK harness. \"-v\", ':'.join([docker_path, \"/bin/docker\"]), \"-v\", \"/var/run/docker.sock:/var/run/docker.sock\"] self.job_port, self.artifact_port, self.expansion_port", "a grpc service to which to submit jobs. \"\"\" raise", "import grpc from apache_beam.portability.api import beam_job_api_pb2_grpc from apache_beam.runners.portability import local_job_service", "def start(self): channel = grpc.insecure_channel(self._endpoint) grpc.channel_ready_future(channel).result(timeout=self._timeout) return beam_job_api_pb2_grpc.JobServiceStub(channel) def stop(self):", "= subprocess_server.pick_port(None) return ( ['java', '-jar', jar_path] + list( self.java_arguments(job_port,", "\"/bin/docker\"]), \"-v\", \"/var/run/docker.sock:/var/run/docker.sock\"] self.job_port, self.artifact_port, self.expansion_port = ( subprocess_server.pick_port( self.job_port,", "pass class StopOnExitJobServer(JobServer): \"\"\"Wraps a JobServer such that its stop", "Apache License, Version 2.0 # (the \"License\"); you may not", "StopOnExitJobServer(JobServer): \"\"\"Wraps a JobServer such that its stop will automatically", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "this JobServer, returning a grpc service to which to submit", "return ( ['java', '-jar', jar_path] + list( self.java_arguments(job_port, artifacts_dir)), 'localhost:%s'", "self._local_temp_root = None return self._server.stop() def local_temp_dir(self, **kwargs): return tempfile.mkdtemp(dir=self._local_temp_root,", "external process.\"\"\" def __init__(self): self._local_temp_root = None self._server = None", "address. cmd += [\"-e\", \"DOCKER_MAC_CONTAINER=1\"] cmd += [\"-p\", \"{}:{}\".format(self.job_port, self.job_port)]", "def start(self): if self._server is None: self._local_temp_root = tempfile.mkdtemp(prefix='beam-temp') cmd,", "\"\"\" raise NotImplementedError(type(self)) def stop(self): \"\"\"Stops this job server.\"\"\" raise", "\"\"\" Spins up the JobServer in a docker container for", "max_connection_retries=5): super(DockerizedJobServer, self).__init__() self.job_host = job_host self.job_port = job_port self.expansion_port", "or implied. # See the License for the specific language", "self.job_port, self.artifact_port, self.expansion_port)) args = ['--job-host', self.job_host, '--job-port', str(self.job_port), '--artifact-port',", "the docker binary and socket to be able to spin", "'--job-port', str(self.job_port), '--artifact-port', str(self.artifact_port), '--expansion-port', str(self.expansion_port)] if sys.platform == \"darwin\":", "submit jobs. \"\"\" raise NotImplementedError(type(self)) def stop(self): \"\"\"Stops this job", "containers need to be aware that they run Docker-on-Mac #", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "self.local_jar(self.path_to_jar()) artifacts_dir = self.local_temp_dir(prefix='artifacts') job_port, = subprocess_server.pick_port(None) return ( ['java',", "docker_path = subprocess.check_output( ['which', 'docker']).strip().decode('utf-8') cmd = [\"docker\", \"run\", #", "\"License\"); you may not use this file except in compliance", "except in compliance with # the License. You may obtain", "self.max_connection_retries = max_connection_retries def subprocess_cmd_and_endpoint(self): # TODO This is hardcoded", "subprocess_server from apache_beam.version import __version__ as beam_version class JobServer(object): def", "MacOS because it detroys port forwardings, # even though host", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "file to You under the Apache License, Version 2.0 #", "artifact_port self.harness_port_range = harness_port_range self.max_connection_retries = max_connection_retries def subprocess_cmd_and_endpoint(self): #", "import absolute_import import atexit import os import shutil import signal", "\"sibling\" containers for the SDK harness. \"-v\", ':'.join([docker_path, \"/bin/docker\"]), \"-v\",", "beam_version class JobServer(object): def start(self): \"\"\"Starts this JobServer, returning a", "__future__ import absolute_import import atexit import os import shutil import", "against the internal Docker-for-Mac address. cmd += [\"-e\", \"DOCKER_MAC_CONTAINER=1\"] cmd", "# \"sibling\" containers for the SDK harness. \"-v\", ':'.join([docker_path, \"/bin/docker\"]),", "# to connect against the internal Docker-for-Mac address. cmd +=", "'--artifact-port', str(self.artifact_port), '--expansion-port', str(self.expansion_port)] if sys.platform == \"darwin\": # Docker-for-Mac", "regarding copyright ownership. # The ASF licenses this file to", "NotImplementedError(type(self)) def start(self): if self._server is None: self._local_temp_root = tempfile.mkdtemp(prefix='beam-temp')", "under one or more # contributor license agreements. See the", "start(self): \"\"\"Starts this JobServer, returning a grpc service to which", "for MacOS because it detroys port forwardings, # even though", "it detroys port forwardings, # even though host networking is", "NOTICE file distributed with # this work for additional information", "Docker-on-Mac # to connect against the internal Docker-for-Mac address. cmd", "beam_job_api_pb2_grpc from apache_beam.runners.portability import local_job_service from apache_beam.utils import subprocess_server from", "__init__(self, job_server): self._lock = threading.Lock() self._job_server = job_server self._started =", "be changed job_server_image_name = os.environ['USER'] + \\ \"-docker-apache.bintray.io/beam/flink-job-server:latest\" docker_path =", "stop(self): pass class EmbeddedJobServer(JobServer): def start(self): return local_job_service.LocalJobServicer() def stop(self):", "explictly # publish ports from the Docker container to be", "up the JobServer in a docker container for local execution.", "== \"darwin\": # Docker-for-Mac doesn't support host networking, so we", "signal import subprocess import sys import tempfile import threading import", "# even though host networking is not supported on MacOS.", "base class for JobServers run as an external process.\"\"\" def", "# # Unless required by applicable law or agreed to", "return beam_job_api_pb2_grpc.JobServiceStub(channel) def stop(self): pass class EmbeddedJobServer(JobServer): def start(self): return", "timeout def start(self): channel = grpc.insecure_channel(self._endpoint) grpc.channel_ready_future(channel).result(timeout=self._timeout) return beam_job_api_pb2_grpc.JobServiceStub(channel) def", "self.local_temp_dir(prefix='artifacts') job_port, = subprocess_server.pick_port(None) return ( ['java', '-jar', jar_path] +", "cmd += [\"-p\", \"{0}-{1}:{0}-{1}\".format( self.harness_port_range[0], self.harness_port_range[1])] else: # This shouldn't", "= os.environ['USER'] + \\ \"-docker-apache.bintray.io/beam/flink-job-server:latest\" docker_path = subprocess.check_output( ['which', 'docker']).strip().decode('utf-8')", "docker container for local execution. \"\"\" def __init__(self, job_host=\"localhost\", job_port=None,", "cmd += [\"-p\", \"{}:{}\".format(self.job_port, self.job_port)] cmd += [\"-p\", \"{}:{}\".format(self.artifact_port, self.artifact_port)]", "__init__(self, job_host=\"localhost\", job_port=None, artifact_port=None, expansion_port=None, harness_port_range=(8100, 8200), max_connection_retries=5): super(DockerizedJobServer, self).__init__()", "on MacOS. cmd.append(\"--network=host\") cmd.append(job_server_image_name) return cmd + args, '%s:%s' %", "file distributed with # this work for additional information regarding", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "as beam_version class JobServer(object): def start(self): \"\"\"Starts this JobServer, returning", "sys.platform == \"darwin\": # Docker-for-Mac doesn't support host networking, so", "MAVEN_REPOSITORY = 'https://repo.maven.apache.org/maven2/org/apache/beam' JAR_CACHE = os.path.expanduser(\"~/.apache_beam/cache\") def java_arguments(self, job_port, artifacts_dir):", "return local_job_service.LocalJobServicer() def stop(self): pass class StopOnExitJobServer(JobServer): \"\"\"Wraps a JobServer", "= subprocess.check_output( ['which', 'docker']).strip().decode('utf-8') cmd = [\"docker\", \"run\", # We", "networking is not supported on MacOS. cmd.append(\"--network=host\") cmd.append(job_server_image_name) return cmd", "need to be aware that they run Docker-on-Mac # to", "for additional information regarding copyright ownership. # The ASF licenses", "ASF licenses this file to You under the Apache License,", "cmd = [\"docker\", \"run\", # We mount the docker binary", "+= [\"-p\", \"{}:{}\".format(self.expansion_port, self.expansion_port)] cmd += [\"-p\", \"{0}-{1}:{0}-{1}\".format( self.harness_port_range[0], self.harness_port_range[1])]", "Software Foundation (ASF) under one or more # contributor license", "raise NotImplementedError(type(self)) @staticmethod def path_to_beam_jar(gradle_target): return subprocess_server.JavaJarServer.path_to_beam_jar(gradle_target) @staticmethod def local_jar(url):", "implied. # See the License for the specific language governing", "will automatically be called on exit. \"\"\" def __init__(self, job_server):", "= self.local_jar(self.path_to_jar()) artifacts_dir = self.local_temp_dir(prefix='artifacts') job_port, = subprocess_server.pick_port(None) return (", "\"-v\", ':'.join([docker_path, \"/bin/docker\"]), \"-v\", \"/var/run/docker.sock:/var/run/docker.sock\"] self.job_port, self.artifact_port, self.expansion_port = (", "\"/var/run/docker.sock:/var/run/docker.sock\"] self.job_port, self.artifact_port, self.expansion_port = ( subprocess_server.pick_port( self.job_port, self.artifact_port, self.expansion_port))", "signal.signal(signal.SIGINT, self.stop) return self._endpoint def stop(self): with self._lock: if self._started:", "\"{}:{}\".format(self.artifact_port, self.artifact_port)] cmd += [\"-p\", \"{}:{}\".format(self.expansion_port, self.expansion_port)] cmd += [\"-p\",", "endpoint = self.subprocess_cmd_and_endpoint() port = int(endpoint.split(':')[-1]) self._server = subprocess_server.SubprocessServer( beam_job_api_pb2_grpc.JobServiceStub,", "self._lock = threading.Lock() self._job_server = job_server self._started = False def", "= max_connection_retries def subprocess_cmd_and_endpoint(self): # TODO This is hardcoded to", "by applicable law or agreed to in writing, software #", "# publish ports from the Docker container to be able", "not use this file except in compliance with # the", "is None: self._local_temp_root = tempfile.mkdtemp(prefix='beam-temp') cmd, endpoint = self.subprocess_cmd_and_endpoint() port", "jar_path] + list( self.java_arguments(job_port, artifacts_dir)), 'localhost:%s' % job_port) class DockerizedJobServer(SubprocessJobServer):", "called on exit. \"\"\" def __init__(self, job_server): self._lock = threading.Lock()", "def subprocess_cmd_and_endpoint(self): jar_path = self.local_jar(self.path_to_jar()) artifacts_dir = self.local_temp_dir(prefix='artifacts') job_port, =", "**kwargs): return tempfile.mkdtemp(dir=self._local_temp_root, **kwargs) class JavaJarJobServer(SubprocessJobServer): MAVEN_REPOSITORY = 'https://repo.maven.apache.org/maven2/org/apache/beam' JAR_CACHE", "beam_job_api_pb2_grpc.JobServiceStub(channel) def stop(self): pass class EmbeddedJobServer(JobServer): def start(self): return local_job_service.LocalJobServicer()", "\"\"\"An abstract base class for JobServers run as an external", "self.artifact_port, self.expansion_port)) args = ['--job-host', self.job_host, '--job-port', str(self.job_port), '--artifact-port', str(self.artifact_port),", "= ['--job-host', self.job_host, '--job-port', str(self.job_port), '--artifact-port', str(self.artifact_port), '--expansion-port', str(self.expansion_port)] if", "to it. # Also, all other containers need to be", "the Apache Software Foundation (ASF) under one or more #", "JobServers run as an external process.\"\"\" def __init__(self): self._local_temp_root =", "# limitations under the License. # from __future__ import absolute_import", "local_job_service.LocalJobServicer() def stop(self): pass class StopOnExitJobServer(JobServer): \"\"\"Wraps a JobServer such", "@staticmethod def path_to_beam_jar(gradle_target): return subprocess_server.JavaJarServer.path_to_beam_jar(gradle_target) @staticmethod def local_jar(url): return subprocess_server.JavaJarServer.local_jar(url)", "is not supported on MacOS. cmd.append(\"--network=host\") cmd.append(job_server_image_name) return cmd +", "str(self.artifact_port), '--expansion-port', str(self.expansion_port)] if sys.platform == \"darwin\": # Docker-for-Mac doesn't", "# # Licensed to the Apache Software Foundation (ASF) under", "self._endpoint def stop(self): with self._lock: if self._started: self._job_server.stop() self._started =", "to be aware that they run Docker-on-Mac # to connect", "jobs. \"\"\" raise NotImplementedError(type(self)) def stop(self): \"\"\"Stops this job server.\"\"\"", "local execution. \"\"\" def __init__(self, job_host=\"localhost\", job_port=None, artifact_port=None, expansion_port=None, harness_port_range=(8100,", "import atexit import os import shutil import signal import subprocess", "run as an external process.\"\"\" def __init__(self): self._local_temp_root = None", "'-jar', jar_path] + list( self.java_arguments(job_port, artifacts_dir)), 'localhost:%s' % job_port) class", "class StopOnExitJobServer(JobServer): \"\"\"Wraps a JobServer such that its stop will", "JAR_CACHE = os.path.expanduser(\"~/.apache_beam/cache\") def java_arguments(self, job_port, artifacts_dir): raise NotImplementedError(type(self)) def", "Docker container to be able to connect to it. #", "in a docker container for local execution. \"\"\" def __init__(self,", "to connect to it. # Also, all other containers need", "subprocess_server.pick_port(None) return ( ['java', '-jar', jar_path] + list( self.java_arguments(job_port, artifacts_dir)),", "False def start(self): with self._lock: if not self._started: self._endpoint =", "ExternalJobServer(JobServer): def __init__(self, endpoint, timeout=None): self._endpoint = endpoint self._timeout =", "up # \"sibling\" containers for the SDK harness. \"-v\", ':'.join([docker_path,", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "\"\"\" def __init__(self, job_host=\"localhost\", job_port=None, artifact_port=None, expansion_port=None, harness_port_range=(8100, 8200), max_connection_retries=5):", "Unless required by applicable law or agreed to in writing,", "def stop(self): \"\"\"Stops this job server.\"\"\" raise NotImplementedError(type(self)) class ExternalJobServer(JobServer):", "artifacts_dir): raise NotImplementedError(type(self)) def path_to_jar(self): raise NotImplementedError(type(self)) @staticmethod def path_to_beam_jar(gradle_target):", "apache_beam.utils import subprocess_server from apache_beam.version import __version__ as beam_version class", "super(DockerizedJobServer, self).__init__() self.job_host = job_host self.job_port = job_port self.expansion_port =", "class DockerizedJobServer(SubprocessJobServer): \"\"\" Spins up the JobServer in a docker", "at the moment but should be changed job_server_image_name = os.environ['USER']", "def start(self): return local_job_service.LocalJobServicer() def stop(self): pass class StopOnExitJobServer(JobServer): \"\"\"Wraps", "the specific language governing permissions and # limitations under the", "max_connection_retries def subprocess_cmd_and_endpoint(self): # TODO This is hardcoded to Flink", "Docker-for-Mac doesn't support host networking, so we need to explictly", "stop(self): pass class StopOnExitJobServer(JobServer): \"\"\"Wraps a JobServer such that its", "applicable law or agreed to in writing, software # distributed", "so we need to explictly # publish ports from the", "internal Docker-for-Mac address. cmd += [\"-e\", \"DOCKER_MAC_CONTAINER=1\"] cmd += [\"-p\",", "not self._started: self._endpoint = self._job_server.start() self._started = True atexit.register(self.stop) signal.signal(signal.SIGINT,", "the Docker container to be able to connect to it.", "str(self.job_port), '--artifact-port', str(self.artifact_port), '--expansion-port', str(self.expansion_port)] if sys.platform == \"darwin\": #", "EmbeddedJobServer(JobServer): def start(self): return local_job_service.LocalJobServicer() def stop(self): pass class StopOnExitJobServer(JobServer):", "aware that they run Docker-on-Mac # to connect against the", "= threading.Lock() self._job_server = job_server self._started = False def start(self):", "We mount the docker binary and socket to be able", "# contributor license agreements. See the NOTICE file distributed with", "that its stop will automatically be called on exit. \"\"\"", "self._lock: if not self._started: self._endpoint = self._job_server.start() self._started = True", "None: self._local_temp_root = tempfile.mkdtemp(prefix='beam-temp') cmd, endpoint = self.subprocess_cmd_and_endpoint() port =", "# this work for additional information regarding copyright ownership. #", "in writing, software # distributed under the License is distributed", "with # the License. You may obtain a copy of", "socket to be able to spin up # \"sibling\" containers", "self.harness_port_range[0], self.harness_port_range[1])] else: # This shouldn't be set for MacOS", "exit. \"\"\" def __init__(self, job_server): self._lock = threading.Lock() self._job_server =", "to Flink at the moment but should be changed job_server_image_name", "the internal Docker-for-Mac address. cmd += [\"-e\", \"DOCKER_MAC_CONTAINER=1\"] cmd +=", "subprocess_server.SubprocessServer( beam_job_api_pb2_grpc.JobServiceStub, cmd, port=port) return self._server.start() def stop(self): if self._local_temp_root:", "= endpoint self._timeout = timeout def start(self): channel = grpc.insecure_channel(self._endpoint)", "but should be changed job_server_image_name = os.environ['USER'] + \\ \"-docker-apache.bintray.io/beam/flink-job-server:latest\"", "this file to You under the Apache License, Version 2.0", "True atexit.register(self.stop) signal.signal(signal.SIGINT, self.stop) return self._endpoint def stop(self): with self._lock:", "harness. \"-v\", ':'.join([docker_path, \"/bin/docker\"]), \"-v\", \"/var/run/docker.sock:/var/run/docker.sock\"] self.job_port, self.artifact_port, self.expansion_port =", "Docker-for-Mac address. cmd += [\"-e\", \"DOCKER_MAC_CONTAINER=1\"] cmd += [\"-p\", \"{}:{}\".format(self.job_port,", "+= [\"-p\", \"{}:{}\".format(self.job_port, self.job_port)] cmd += [\"-p\", \"{}:{}\".format(self.artifact_port, self.artifact_port)] cmd", "expansion_port self.artifact_port = artifact_port self.harness_port_range = harness_port_range self.max_connection_retries = max_connection_retries", "to submit jobs. \"\"\" raise NotImplementedError(type(self)) def stop(self): \"\"\"Stops this", "self.expansion_port = expansion_port self.artifact_port = artifact_port self.harness_port_range = harness_port_range self.max_connection_retries", "they run Docker-on-Mac # to connect against the internal Docker-for-Mac", "moment but should be changed job_server_image_name = os.environ['USER'] + \\", "= True atexit.register(self.stop) signal.signal(signal.SIGINT, self.stop) return self._endpoint def stop(self): with", "tempfile import threading import grpc from apache_beam.portability.api import beam_job_api_pb2_grpc from", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "= int(endpoint.split(':')[-1]) self._server = subprocess_server.SubprocessServer( beam_job_api_pb2_grpc.JobServiceStub, cmd, port=port) return self._server.start()", "harness_port_range self.max_connection_retries = max_connection_retries def subprocess_cmd_and_endpoint(self): # TODO This is", "agreements. See the NOTICE file distributed with # this work", "class EmbeddedJobServer(JobServer): def start(self): return local_job_service.LocalJobServicer() def stop(self): pass class", "Foundation (ASF) under one or more # contributor license agreements.", "self.job_host = job_host self.job_port = job_port self.expansion_port = expansion_port self.artifact_port", "= None return self._server.stop() def local_temp_dir(self, **kwargs): return tempfile.mkdtemp(dir=self._local_temp_root, **kwargs)", "= job_port self.expansion_port = expansion_port self.artifact_port = artifact_port self.harness_port_range =", "with self._lock: if self._started: self._job_server.stop() self._started = False class SubprocessJobServer(JobServer):", "subprocess_cmd_and_endpoint(self): jar_path = self.local_jar(self.path_to_jar()) artifacts_dir = self.local_temp_dir(prefix='artifacts') job_port, = subprocess_server.pick_port(None)", "self.job_port, self.artifact_port, self.expansion_port = ( subprocess_server.pick_port( self.job_port, self.artifact_port, self.expansion_port)) args", "[\"-p\", \"{0}-{1}:{0}-{1}\".format( self.harness_port_range[0], self.harness_port_range[1])] else: # This shouldn't be set", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "from apache_beam.version import __version__ as beam_version class JobServer(object): def start(self):", "# We mount the docker binary and socket to be", "be able to spin up # \"sibling\" containers for the", "def start(self): \"\"\"Starts this JobServer, returning a grpc service to", "returning a grpc service to which to submit jobs. \"\"\"", "self.expansion_port = ( subprocess_server.pick_port( self.job_port, self.artifact_port, self.expansion_port)) args = ['--job-host',", "artifact_port=None, expansion_port=None, harness_port_range=(8100, 8200), max_connection_retries=5): super(DockerizedJobServer, self).__init__() self.job_host = job_host", "port = int(endpoint.split(':')[-1]) self._server = subprocess_server.SubprocessServer( beam_job_api_pb2_grpc.JobServiceStub, cmd, port=port) return", "= [\"docker\", \"run\", # We mount the docker binary and", "need to explictly # publish ports from the Docker container", "hardcoded to Flink at the moment but should be changed", "subprocess_cmd_and_endpoint(self): # TODO This is hardcoded to Flink at the", "= tempfile.mkdtemp(prefix='beam-temp') cmd, endpoint = self.subprocess_cmd_and_endpoint() port = int(endpoint.split(':')[-1]) self._server", "self._job_server.start() self._started = True atexit.register(self.stop) signal.signal(signal.SIGINT, self.stop) return self._endpoint def", "should be changed job_server_image_name = os.environ['USER'] + \\ \"-docker-apache.bintray.io/beam/flink-job-server:latest\" docker_path", "the License for the specific language governing permissions and #", "self.harness_port_range[1])] else: # This shouldn't be set for MacOS because", "See the NOTICE file distributed with # this work for", "':'.join([docker_path, \"/bin/docker\"]), \"-v\", \"/var/run/docker.sock:/var/run/docker.sock\"] self.job_port, self.artifact_port, self.expansion_port = ( subprocess_server.pick_port(", "import subprocess_server from apache_beam.version import __version__ as beam_version class JobServer(object):", "either express or implied. # See the License for the", "self.expansion_port)] cmd += [\"-p\", \"{0}-{1}:{0}-{1}\".format( self.harness_port_range[0], self.harness_port_range[1])] else: # This", "NotImplementedError(type(self)) def path_to_jar(self): raise NotImplementedError(type(self)) @staticmethod def path_to_beam_jar(gradle_target): return subprocess_server.JavaJarServer.path_to_beam_jar(gradle_target)", "to You under the Apache License, Version 2.0 # (the", "( subprocess_server.pick_port( self.job_port, self.artifact_port, self.expansion_port)) args = ['--job-host', self.job_host, '--job-port',", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "raise NotImplementedError(type(self)) class ExternalJobServer(JobServer): def __init__(self, endpoint, timeout=None): self._endpoint =", "or more # contributor license agreements. See the NOTICE file", "governing permissions and # limitations under the License. # from", "def __init__(self, job_server): self._lock = threading.Lock() self._job_server = job_server self._started", "def local_temp_dir(self, **kwargs): return tempfile.mkdtemp(dir=self._local_temp_root, **kwargs) class JavaJarJobServer(SubprocessJobServer): MAVEN_REPOSITORY =", "self._server = subprocess_server.SubprocessServer( beam_job_api_pb2_grpc.JobServiceStub, cmd, port=port) return self._server.start() def stop(self):", "You under the Apache License, Version 2.0 # (the \"License\");", "= self._job_server.start() self._started = True atexit.register(self.stop) signal.signal(signal.SIGINT, self.stop) return self._endpoint", "\\ \"-docker-apache.bintray.io/beam/flink-job-server:latest\" docker_path = subprocess.check_output( ['which', 'docker']).strip().decode('utf-8') cmd = [\"docker\",", "shutil.rmtree(self._local_temp_root) self._local_temp_root = None return self._server.stop() def local_temp_dir(self, **kwargs): return", "[\"-e\", \"DOCKER_MAC_CONTAINER=1\"] cmd += [\"-p\", \"{}:{}\".format(self.job_port, self.job_port)] cmd += [\"-p\",", "job_port, = subprocess_server.pick_port(None) return ( ['java', '-jar', jar_path] + list(", "list( self.java_arguments(job_port, artifacts_dir)), 'localhost:%s' % job_port) class DockerizedJobServer(SubprocessJobServer): \"\"\" Spins", "start(self): if self._server is None: self._local_temp_root = tempfile.mkdtemp(prefix='beam-temp') cmd, endpoint", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "None self._server = None def subprocess_cmd_and_endpoint(self): raise NotImplementedError(type(self)) def start(self):", "able to connect to it. # Also, all other containers", "self._started = True atexit.register(self.stop) signal.signal(signal.SIGINT, self.stop) return self._endpoint def stop(self):", "class SubprocessJobServer(JobServer): \"\"\"An abstract base class for JobServers run as", "The ASF licenses this file to You under the Apache", "job server.\"\"\" raise NotImplementedError(type(self)) class ExternalJobServer(JobServer): def __init__(self, endpoint, timeout=None):", "\"run\", # We mount the docker binary and socket to", "threading import grpc from apache_beam.portability.api import beam_job_api_pb2_grpc from apache_beam.runners.portability import", "return self._server.stop() def local_temp_dir(self, **kwargs): return tempfile.mkdtemp(dir=self._local_temp_root, **kwargs) class JavaJarJobServer(SubprocessJobServer):", "NotImplementedError(type(self)) @staticmethod def path_to_beam_jar(gradle_target): return subprocess_server.JavaJarServer.path_to_beam_jar(gradle_target) @staticmethod def local_jar(url): return", "Flink at the moment but should be changed job_server_image_name =", "process.\"\"\" def __init__(self): self._local_temp_root = None self._server = None def", "grpc.insecure_channel(self._endpoint) grpc.channel_ready_future(channel).result(timeout=self._timeout) return beam_job_api_pb2_grpc.JobServiceStub(channel) def stop(self): pass class EmbeddedJobServer(JobServer): def", "a JobServer such that its stop will automatically be called", "# Also, all other containers need to be aware that", "['--job-host', self.job_host, '--job-port', str(self.job_port), '--artifact-port', str(self.artifact_port), '--expansion-port', str(self.expansion_port)] if sys.platform", "'docker']).strip().decode('utf-8') cmd = [\"docker\", \"run\", # We mount the docker", "start(self): return local_job_service.LocalJobServicer() def stop(self): pass class StopOnExitJobServer(JobServer): \"\"\"Wraps a", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "= os.path.expanduser(\"~/.apache_beam/cache\") def java_arguments(self, job_port, artifacts_dir): raise NotImplementedError(type(self)) def path_to_jar(self):", "local_temp_dir(self, **kwargs): return tempfile.mkdtemp(dir=self._local_temp_root, **kwargs) class JavaJarJobServer(SubprocessJobServer): MAVEN_REPOSITORY = 'https://repo.maven.apache.org/maven2/org/apache/beam'", "host networking, so we need to explictly # publish ports", "tempfile.mkdtemp(dir=self._local_temp_root, **kwargs) class JavaJarJobServer(SubprocessJobServer): MAVEN_REPOSITORY = 'https://repo.maven.apache.org/maven2/org/apache/beam' JAR_CACHE = os.path.expanduser(\"~/.apache_beam/cache\")", "self._lock: if self._started: self._job_server.stop() self._started = False class SubprocessJobServer(JobServer): \"\"\"An", "if self._started: self._job_server.stop() self._started = False class SubprocessJobServer(JobServer): \"\"\"An abstract", "# distributed under the License is distributed on an \"AS", "job_server self._started = False def start(self): with self._lock: if not", "# Unless required by applicable law or agreed to in", "be able to connect to it. # Also, all other", "8200), max_connection_retries=5): super(DockerizedJobServer, self).__init__() self.job_host = job_host self.job_port = job_port", "grpc service to which to submit jobs. \"\"\" raise NotImplementedError(type(self))", "self._started: self._endpoint = self._job_server.start() self._started = True atexit.register(self.stop) signal.signal(signal.SIGINT, self.stop)", "self.artifact_port, self.expansion_port = ( subprocess_server.pick_port( self.job_port, self.artifact_port, self.expansion_port)) args =", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "job_port self.expansion_port = expansion_port self.artifact_port = artifact_port self.harness_port_range = harness_port_range", "self._job_server = job_server self._started = False def start(self): with self._lock:", "License. You may obtain a copy of the License at", "Spins up the JobServer in a docker container for local", "You may obtain a copy of the License at #", "# TODO This is hardcoded to Flink at the moment", "def path_to_beam_jar(gradle_target): return subprocess_server.JavaJarServer.path_to_beam_jar(gradle_target) @staticmethod def local_jar(url): return subprocess_server.JavaJarServer.local_jar(url) def", "path_to_beam_jar(gradle_target): return subprocess_server.JavaJarServer.path_to_beam_jar(gradle_target) @staticmethod def local_jar(url): return subprocess_server.JavaJarServer.local_jar(url) def subprocess_cmd_and_endpoint(self):", "one or more # contributor license agreements. See the NOTICE", "work for additional information regarding copyright ownership. # The ASF", "we need to explictly # publish ports from the Docker", "from apache_beam.runners.portability import local_job_service from apache_beam.utils import subprocess_server from apache_beam.version", "[\"-p\", \"{}:{}\".format(self.artifact_port, self.artifact_port)] cmd += [\"-p\", \"{}:{}\".format(self.expansion_port, self.expansion_port)] cmd +=", "+= [\"-p\", \"{0}-{1}:{0}-{1}\".format( self.harness_port_range[0], self.harness_port_range[1])] else: # This shouldn't be", "cmd += [\"-p\", \"{}:{}\".format(self.expansion_port, self.expansion_port)] cmd += [\"-p\", \"{0}-{1}:{0}-{1}\".format( self.harness_port_range[0],", "atexit import os import shutil import signal import subprocess import", "job_port=None, artifact_port=None, expansion_port=None, harness_port_range=(8100, 8200), max_connection_retries=5): super(DockerizedJobServer, self).__init__() self.job_host =", "doesn't support host networking, so we need to explictly #", "sys import tempfile import threading import grpc from apache_beam.portability.api import", "all other containers need to be aware that they run", "cmd, endpoint = self.subprocess_cmd_and_endpoint() port = int(endpoint.split(':')[-1]) self._server = subprocess_server.SubprocessServer(", "pass class EmbeddedJobServer(JobServer): def start(self): return local_job_service.LocalJobServicer() def stop(self): pass", "self.expansion_port)) args = ['--job-host', self.job_host, '--job-port', str(self.job_port), '--artifact-port', str(self.artifact_port), '--expansion-port'," ]
[ "expr): func = self.known_functions.get(expr.__class__.__name__, None) if func is None: func", "c = arg.cond result.append('((') result.append(self._print(e)) result.append(') if (') result.append(self._print(c)) result.append(')", "def _print_Integer(self, e): return '%s(%d)' % (self._module_format('mpmath.mpf'), e) def _print_Float(self,", "{}).get( 'user_functions', {})) self.known_constants = dict(self._kc, **(settings or {}).get( 'user_constants',", "[(k, v) for k, v in _known_functions_math.items() if k not", "expr): return \"float('inf')\" def _print_Mod(self, expr): PREC = precedence(expr) return", "vectorized piecewise functions, logical operators, etc. \"\"\" printmethod = \"_numpycode\"", "result.append('((') result.append(self._print(e)) result.append(') if (') result.append(self._print(c)) result.append(') else (') i", "for k, v in _known_functions_mpmath.items()] )) def _print_Integer(self, e): return", "for k in PythonCodePrinter._kf: setattr(PythonCodePrinter, '_print_%s' % k, _print_known_func) for", "function return \"{0}({1}, {2}, {3})\".format( self._module_format('mpmath.gammainc'), self._print(e.args[0]), self._print(e.args[1]), self._module_format('mpmath.inf')) def", "_print_MatMul(self, expr): \"Matrix multiplication printer\" return '({0})'.format(').dot('.join(self._print(i) for i in", "'is', 'lambda', 'not', 'or', 'pass', 'raise', 'return', 'try', 'while', 'with',", "% (func, self._print(expr.tolist())) _print_SparseMatrix = \\ _print_MutableSparseMatrix = \\ _print_ImmutableSparseMatrix", "v in _known_functions_scipy_special.items()] )) _kc = {k: 'scipy.constants.' + v", "_known_functions_scipy_special = { 'erf': 'erf', 'erfc': 'erfc', 'gamma': 'gamma', 'loggamma':", "name = expr.__class__.__name__ func = self.known_functions.get(name, name) return \"%s(%s)\" %", "e) def _print_Float(self, e): # XXX: This does not handle", "if k not in _not_in_numpy] _known_functions_numpy = dict(_in_numpy, **{ 'acos':", "tuples in nopython mode. return '({},)'.format(delimiter.join(self._print(item) for item in seq))", "return \"%s(%s)\" % (func, self._print(expr.tolist())) for k in NumPyPrinter._kf: setattr(NumPyPrinter,", "self.module_imports['.'.join(parts[:-1])].add(parts[-1]) if self._settings['fully_qualified_modules']: return fqn else: return fqn.split('(')[0].split('[')[0].split('.')[-1] def _format_code(self,", "def _print_arg(self, expr): return \"%s(%s)\" % (self._module_format('numpy.angle'), self._print(expr.args[0])) def _print_im(self,", "{'and': 'and', 'or': 'or', 'not': 'not'} _default_settings = dict( CodePrinter._default_settings,", "result = result[:-1] result.append(') else None)') result.append(')'*(2*i - 2)) return", "'atan': 'atan', 'atan2': 'atan2', 'atanh': 'atanh', 'ceiling': 'ceil', 'cos': 'cos',", "(self._module_format('numpy.imag', self._print(expr.args[0]))) def _print_Mod(self, expr): return \"%s(%s)\" % (self._module_format('numpy.mod'), ',", "self._print(arg1), self._print(arg2)) def _print_Piecewise(self, expr): \"Piecewise function printer\" exprs =", "it will behave the same as passing the 'default' kwarg", "_kw_only_py3 = {'False', 'nonlocal', 'True'} _known_functions = { 'Abs': 'abs',", "self.parenthesize(x, PREC), expr.args))) def _print_Piecewise(self, expr): result = [] i", "'log2': 'log2', 'sin': 'sin', 'sinh': 'sinh', 'Sqrt': 'sqrt', 'tan': 'tan',", "_print_Function(self, expr): mod = expr.func.__module__ or '' return '%s(%s)' %", "x 1. arg1, arg2 = expr.args if arg1.shape[0] != 1:", "'==' :'equal', '!=' :'not_equal', '<' :'less', '<=' :'less_equal', '>' :'greater',", "{ 'and', 'as', 'assert', 'break', 'class', 'continue', 'def', 'del', 'elif',", "any shape order, but numpy.dot does matrix # multiplication, so", "arg.expr c = arg.cond result.append('((') result.append(self._print(e)) result.append(') if (') result.append(self._print(c))", "k, v in _known_functions_math.items() if k not in _not_in_mpmath] _known_functions_mpmath", "_print_DenseMatrix = \\ _print_MutableDenseMatrix = \\ _print_ImmutableMatrix = \\ _print_ImmutableDenseMatrix", "{1}'.format(*map(lambda x: self.parenthesize(x, PREC), expr.args))) def _print_Piecewise(self, expr): result =", "Equality and Unequality\" op = { '==' :'equal', '!=' :'not_equal',", "'expm1': 'expm1', 'factorial': 'factorial', 'floor': 'floor', 'gamma': 'gamma', 'hypot': 'hypot',", "return ''.join(result) def _print_ITE(self, expr): from sympy.functions.elementary.piecewise import Piecewise return", "= { 'Exp1': 'e', 'Pi': 'pi', # Only in python", "_default_settings = dict( CodePrinter._default_settings, user_functions={}, precision=17, inline=True, fully_qualified_modules=True ) def", "= expr.func.__module__ or '' return '%s(%s)' % (self._module_format(mod + ('.'", "return '{0}({1})/{0}(2)'.format( self._module_format('mpmath.log'), self._print(e.args[0])) def _print_log1p(self, e): return '{0}({1}+1)'.format( self._module_format('mpmath.log'),", "is actually not in Python 2's keyword.kwlist } _kw_only_py2 =", "None # initialized to a set in __init__ tab =", "_print_Or(self, expr): \"Logical Or printer\" # We have to override", "python >= 3.5: # 'Infinity': 'inf', # 'NaN': 'nan' }", "or '' return '%s(%s)' % (self._module_format(mod + ('.' if mod", "(') i += 1 result = result[:-1] result.append(') else None)')", "shape order, but numpy.dot does matrix # multiplication, so we", "k in SciPyPrinter._kc: setattr(SciPyPrinter, '_print_%s' % k, _print_known_const) class SymPyPrinter(PythonCodePrinter):", "not the case, it may be triggered prematurely. return '{0}({1},", "is not the case, it may be triggered prematurely. return", "= None # initialized to a set in __init__ tab", "\" # {0}\".format(text) def _print_NaN(self, expr): return \"float('nan')\" def _print_Infinity(self,", "it, we could use StrPrinter's # version of the function", "_print_Piecewise(self, expr): result = [] i = 0 for arg", "expr): return '{0}(({1}))'.format(self._module_format('numpy.amin'), ','.join(self._print(i) for i in expr.args)) def _print_Max(self,", "'erfc': 'erfc', 'gamma': 'gamma', 'loggamma': 'gammaln' } _known_constants_scipy_constants = {", "'log': 'log', 'log10': 'log10', 'log1p': 'log1p', 'log2': 'log2', 'sin': 'sin',", "the uppergamma function return \"{0}({1}, {2}, {3})\".format( self._module_format('mpmath.gammainc'), self._print(e.args[0]), self._print(e.args[1]),", "def __init__(self, settings=None): super(PythonCodePrinter, self).__init__(settings) self.module_imports = defaultdict(set) self.known_functions =", "handles vectorized piecewise functions, logical operators, etc. \"\"\" printmethod =", "arg1.T if arg2.shape[1] != 1: arg2 = arg2.T return \"%s(%s,", "'asinh': 'asinh', 'atan': 'atan', 'atan2': 'atan2', 'atanh': 'atanh', 'ceiling': 'ceil',", "_not_in_mpmath] _known_functions_mpmath = dict(_in_mpmath) _known_constants_mpmath = { 'Pi': 'pi' }", "= _print_SparseMatrix for k in SciPyPrinter._kf: setattr(SciPyPrinter, '_print_%s' % k,", "'{0}({1}+1)'.format( self._module_format('mpmath.log'), self._print(e.args[0])) for k in MpmathPrinter._kf: setattr(MpmathPrinter, '_print_%s' %", "#printer for the uppergamma function return \"{0}({1}, {2}, {3})\".format( self._module_format('mpmath.gammainc'),", "logical operators, etc. \"\"\" printmethod = \"_numpycode\" _kf = dict(chain(", "for item in seq)) def _print_MatMul(self, expr): \"Matrix multiplication printer\"", "to a set in __init__ tab = ' ' _kf", "to make sure it gets 1 x n by n", "_print_ImmutableMatrix = \\ _print_ImmutableDenseMatrix = \\ lambda self, expr: self._print_MatrixBase(expr)", "[] i = 0 for arg in expr.args: e =", "'Infinity': 'inf', # 'NaN': 'nan' } def _print_known_func(self, expr): known", "expr.args)) conds = '[{0}]'.format(','.join(self._print(arg.cond) for arg in expr.args)) # If", "expr.args)) def _print_DotProduct(self, expr): # DotProduct allows any shape order,", "'print'} _kw_only_py3 = {'False', 'nonlocal', 'True'} _known_functions = { 'Abs':", "in _known_functions_scipy_special.items()] )) _kc = {k: 'scipy.constants.' + v for", "def _print_SparseMatrix(self, expr): i, j, data = [], [], []", "behave the same as passing the 'default' kwarg to select()", "as passing the 'default' kwarg to select() # *as long", "_not_in_numpy] _known_functions_numpy = dict(_in_numpy, **{ 'acos': 'arccos', 'acosh': 'arccosh', 'asin':", "'floor': 'floor', 'gamma': 'gamma', 'hypot': 'hypot', 'loggamma': 'lgamma', 'log': 'log',", "\"%s = %s\" % (name, value) def _module_format(self, fqn, register=True):", "self._print(e.args[0])) def _print_log1p(self, e): return '{0}({1}+1)'.format( self._module_format('mpmath.log'), self._print(e.args[0])) for k", "and add 'logical_or' to NUMPY_TRANSLATIONS. return '{0}({1})'.format(self._module_format('numpy.logical_or'), ','.join(self._print(i) for i", "expr.args))) def _print_Piecewise(self, expr): result = [] i = 0", "% k, _print_known_const) class SymPyPrinter(PythonCodePrinter): _kf = dict([(k, 'sympy.' +", "k not in _not_in_mpmath] _known_functions_mpmath = dict(_in_mpmath) _known_constants_mpmath = {", "\"{0}({1}, 0, {2})\".format( self._module_format('mpmath.gammainc'), self._print(e.args[0]), self._print(e.args[1])) def _print_log2(self, e): return", "class NumPyPrinter(PythonCodePrinter): \"\"\" Numpy printer which handles vectorized piecewise functions,", "printer\" # We have to override LambdaPrinter because it uses", "for k, v in _known_constants_scipy_constants.items()} def _print_SparseMatrix(self, expr): i, j,", "def pycode(expr, **settings): return PythonCodePrinter(settings).doprint(expr) _not_in_mpmath = 'log1p log2'.split() _in_mpmath", "radians trunc fmod fsum gcd degrees fabs] _known_constants_math = {", "\"\"\" printmethod = \"_mpmathcode\" _kf = dict(chain( _known_functions.items(), [(k, 'mpmath.'", "= {k: 'scipy.constants.' + v for k, v in _known_constants_scipy_constants.items()}", "[(k, 'mpmath.' + v) for k, v in _known_functions_mpmath.items()] ))", "= expr.__class__.__name__ func = self.known_functions.get(name, name) return \"%s(%s)\" % (func,", "'lgamma', 'log': 'log', 'log10': 'log10', 'log1p': 'log1p', 'log2': 'log2', 'sin':", "'[{0}]'.format(','.join(self._print(arg.expr) for arg in expr.args)) conds = '[{0}]'.format(','.join(self._print(arg.cond) for arg", "[] for (r, c), v in expr._smat.items(): i.append(r) j.append(c) data.append(v)", "function will have set it to sufficient # precision to", "= { 'Pi': 'pi' } class MpmathPrinter(PythonCodePrinter): \"\"\" Lambda printer", "isclose isfinite isinf isnan ldexp frexp pow modf # radians", "str(tuple(map(int, e._mpf_))) return '{func}({args})'.format(func=self._module_format('mpmath.mpf'), args=args) def _print_uppergamma(self,e): #printer for the", "'except', 'finally', 'for', 'from', 'global', 'if', 'import', 'in', 'is', 'lambda',", "precision to match the Floats in the expression. # Remove", "k in _known_constants_mpmath: setattr(MpmathPrinter, '_print_%s' % k, _print_known_const) _not_in_numpy =", "_print_known_func) for k in SciPyPrinter._kc: setattr(SciPyPrinter, '_print_%s' % k, _print_known_const)", "\"Relational printer for Equality and Unequality\" op = { '=='", "k, _print_known_const) class SymPyPrinter(PythonCodePrinter): _kf = dict([(k, 'sympy.' + v)", "not in Python 2's keyword.kwlist } _kw_only_py2 = {'exec', 'print'}", "in range({a}, {b}+1)'.format( i=self._print(i), a=self._print(a), b=self._print(b)) for i, a, b", "numba supports # tuples in nopython mode. return '({},)'.format(delimiter.join(self._print(item) for", "it uses Python 'and' keyword. # If LambdaPrinter didn't define", "_print_known_const(self, expr): known = self.known_constants[expr.__class__.__name__] return self._module_format(known) class PythonCodePrinter(CodePrinter): printmethod", "v in _known_functions_numpy.items()] )) _kc = {k: 'numpy.'+v for k,", "_print_Mod(self, expr): return \"%s(%s)\" % (self._module_format('numpy.mod'), ', '.join(map(self._print, expr.args))) def", "'or', 'pass', 'raise', 'return', 'try', 'while', 'with', 'yield', 'None' #", "'gamma': 'gamma', 'hypot': 'hypot', 'loggamma': 'lgamma', 'log': 'log', 'log10': 'log10',", "def _print_known_func(self, expr): known = self.known_functions[expr.__class__.__name__] return '{name}({args})'.format(name=self._module_format(known), args=', '.join(map(self._print,", "= self.known_functions.get(expr.__class__.__name__, None) if func is None: func = self._module_format('numpy.array')", "'({},)'.format(delimiter.join(self._print(item) for item in seq)) def _print_MatMul(self, expr): \"Matrix multiplication", "SciPyPrinter._kc: setattr(SciPyPrinter, '_print_%s' % k, _print_known_const) class SymPyPrinter(PythonCodePrinter): _kf =", "seq)) def _print_MatMul(self, expr): \"Matrix multiplication printer\" return '({0})'.format(').dot('.join(self._print(i) for", "return '{0}({1}, {2}, default=numpy.nan)'.format(self._module_format('numpy.select'), conds, exprs) def _print_Relational(self, expr): \"Relational", "'1j' def _print_MatrixBase(self, expr): name = expr.__class__.__name__ func = self.known_functions.get(name,", "'atan2': 'arctan2', 'atanh': 'arctanh', 'exp2': 'exp2', }) class NumPyPrinter(PythonCodePrinter): \"\"\"", "if (') result.append(self._print(c)) result.append(') else (') i += 1 result", "i in expr.args)) def _print_Min(self, expr): return '{0}(({1}))'.format(self._module_format('numpy.amin'), ','.join(self._print(i) for", "setattr(NumPyPrinter, '_print_%s' % k, _print_known_func) for k in NumPyPrinter._kc: setattr(NumPyPrinter,", "'factorial': 'factorial', 'floor': 'floor', 'gamma': 'gamma', 'hypot': 'hypot', 'loggamma': 'lgamma',", "LambdaPrinter didn't define it, we would still have to define", "# We have to override LambdaPrinter because it uses Python", "op: lhs = self._print(expr.lhs) rhs = self._print(expr.rhs) return '{op}({lhs}, {rhs})'.format(op=self._module_format('numpy.'+op[expr.rel_op]),", "return '({0})'.format(').dot('.join(self._print(i) for i in expr.args)) def _print_DotProduct(self, expr): #", "import sympify from .precedence import precedence from .codeprinter import CodePrinter", "} _known_constants_scipy_constants = { 'GoldenRatio': 'golden_ratio' } class SciPyPrinter(NumPyPrinter): _kf", "defaultdict from functools import wraps from itertools import chain from", "CodePrinter _kw_py2and3 = { 'and', 'as', 'assert', 'break', 'class', 'continue',", "% k, _print_known_const) _not_in_numpy = 'erf erfc factorial gamma lgamma'.split()", "_print_known_func) for k in NumPyPrinter._kc: setattr(NumPyPrinter, '_print_%s' % k, _print_known_const)", "be triggered prematurely. return '{0}({1}, {2}, default=numpy.nan)'.format(self._module_format('numpy.select'), conds, exprs) def", "'cos', 'cosh': 'cosh', 'erf': 'erf', 'erfc': 'erfc', 'exp': 'exp', 'expm1':", "return \"%s(%s)\" % (self._module_format('numpy.real'), self._print(expr.args[0])) def _print_MatrixBase(self, expr): func =", "(func, self._print(expr.tolist())) _print_SparseMatrix = \\ _print_MutableSparseMatrix = \\ _print_ImmutableSparseMatrix =", "from sympy.functions.elementary.piecewise import Piecewise return self._print(expr.rewrite(Piecewise)) def _print_Sum(self, expr): loops", "'_print_%s' % k, _print_known_func) for k in _known_constants_math: setattr(PythonCodePrinter, '_print_%s'", "Or printer\" # We have to override LambdaPrinter because it", "'asin': 'arcsin', 'asinh': 'arcsinh', 'atan': 'arctan', 'atan2': 'arctan2', 'atanh': 'arctanh',", "self._module_format('mpmath.inf')) def _print_lowergamma(self,e): #printer for the lowergamma functioin return \"{0}({1},", "self._module_format(known) class PythonCodePrinter(CodePrinter): printmethod = \"_pythoncode\" language = \"Python\" standard", "= \\ _print_ImmutableMatrix = \\ _print_ImmutableDenseMatrix = \\ lambda self,", "'): \"General sequence printer: converts to tuple\" # Print tuples", "sympy.functions.elementary.piecewise import Piecewise return self._print(expr.rewrite(Piecewise)) def _print_Sum(self, expr): loops =", "have to make sure it gets 1 x n by", "keyword. # If LambdaPrinter didn't define it, we could use", "Lambda printer for mpmath which maintains precision for floats \"\"\"", "k, v in _known_functions_scipy_special.items()] )) _kc = {k: 'scipy.constants.' +", "StrPrinter doesn't define it. return '{0}({1})'.format(self._module_format('numpy.logical_not'), ','.join(self._print(i) for i in", "precision for floats \"\"\" printmethod = \"_mpmathcode\" _kf = dict(chain(", "return '(builtins.sum({function} {loops}))'.format( function=self._print(expr.function), loops=' '.join(loops)) def _print_ImaginaryUnit(self, expr): return", "k, _print_known_func) for k in _known_constants_mpmath: setattr(MpmathPrinter, '_print_%s' % k,", "i in expr.args)) def _print_Not(self, expr): \"Logical Not printer\" #", "def _print_NaN(self, expr): return \"float('nan')\" def _print_Infinity(self, expr): return \"float('inf')\"", "self._print(expr.args[0])) def _print_im(self, expr): return \"%s(%s)\" % (self._module_format('numpy.imag', self._print(expr.args[0]))) def", "delimiter=', '): \"General sequence printer: converts to tuple\" # Print", "self.known_functions.get(name, name) return \"%s(%s)\" % (func, self._print(expr.tolist())) _print_SparseMatrix = \\", "'floor', 'gamma': 'gamma', 'hypot': 'hypot', 'loggamma': 'lgamma', 'log': 'log', 'log10':", "\\ _print_DenseMatrix = \\ _print_MutableDenseMatrix = \\ _print_ImmutableMatrix = \\", "'not': 'not'} _default_settings = dict( CodePrinter._default_settings, user_functions={}, precision=17, inline=True, fully_qualified_modules=True", "sequence in a Piecewise object # it will behave the", "'_print_%s' % k, _print_known_const) class SymPyPrinter(PythonCodePrinter): _kf = dict([(k, 'sympy.'", "return '%s(%d)' % (self._module_format('mpmath.mpf'), e) def _print_Float(self, e): # XXX:", "register=True): parts = fqn.split('.') if register and len(parts) > 1:", "loops=' '.join(loops)) def _print_ImaginaryUnit(self, expr): return '1j' def _print_MatrixBase(self, expr):", "_kf = dict(chain( PythonCodePrinter._kf.items(), [(k, 'numpy.' + v) for k,", "'{0}({1})'.format(self._module_format('numpy.sqrt'), self._print(expr.base)) else: return super(NumPyPrinter, self)._print_Pow(expr) def _print_arg(self, expr): return", "import CodePrinter _kw_py2and3 = { 'and', 'as', 'assert', 'break', 'class',", "'tanh': 'tanh' } # Not used from ``math``: [copysign isclose", "expr): return \"%s(%s)\" % (self._module_format('numpy.real'), self._print(expr.args[0])) def _print_MatrixBase(self, expr): func", "mpmath which maintains precision for floats \"\"\" printmethod = \"_mpmathcode\"", "return '{0}(({1}))'.format(self._module_format('numpy.amin'), ','.join(self._print(i) for i in expr.args)) def _print_Max(self, expr):", "} _kw_only_py2 = {'exec', 'print'} _kw_only_py3 = {'False', 'nonlocal', 'True'}", "set it to sufficient # precision to match the Floats", "for i, a, b in expr.limits) return '(builtins.sum({function} {loops}))'.format( function=self._print(expr.function),", "'None' # 'None' is actually not in Python 2's keyword.kwlist", "_print_MutableDenseMatrix = \\ _print_ImmutableMatrix = \\ _print_ImmutableDenseMatrix = \\ lambda", "'not' keyword. # If LambdaPrinter didn't define it, we would", "None)') result.append(')'*(2*i - 2)) return ''.join(result) def _print_ITE(self, expr): from", "_kw_only_py2 = {'exec', 'print'} _kw_only_py3 = {'False', 'nonlocal', 'True'} _known_functions", "i.append(r) j.append(c) data.append(v) return \"{name}({data}, ({i}, {j}), shape={shape})\".format( name=self._module_format('scipy.sparse.coo_matrix'), data=data,", "= dict(chain( _known_functions.items(), [(k, 'math.' + v) for k, v", "is installed. args = str(tuple(map(int, e._mpf_))) return '{func}({args})'.format(func=self._module_format('mpmath.mpf'), args=args) def", "v in _known_constants_scipy_constants.items()} def _print_SparseMatrix(self, expr): i, j, data =", "the case, it may be triggered prematurely. return '{0}({1}, {2},", "Piecewise return self._print(expr.rewrite(Piecewise)) def _print_Sum(self, expr): loops = ( 'for", "'pi' } class MpmathPrinter(PythonCodePrinter): \"\"\" Lambda printer for mpmath which", "{2})\".format( self._module_format('mpmath.gammainc'), self._print(e.args[0]), self._print(e.args[1])) def _print_log2(self, e): return '{0}({1})/{0}(2)'.format( self._module_format('mpmath.log'),", "still have to define our # own because StrPrinter doesn't", "# tuples in nopython mode. return '({},)'.format(delimiter.join(self._print(item) for item in", "# multiplication, so we have to make sure it gets", "func is None: func = self._module_format('numpy.array') return \"%s(%s)\" % (func,", "i=self._print(i), a=self._print(a), b=self._print(b)) for i, a, b in expr.limits) return", "(self._module_format('mpmath.mpf'), e) def _print_Float(self, e): # XXX: This does not", "= defaultdict(set) self.known_functions = dict(self._kf, **(settings or {}).get( 'user_functions', {}))", "in SciPyPrinter._kf: setattr(SciPyPrinter, '_print_%s' % k, _print_known_func) for k in", "return ('{0} % {1}'.format(*map(lambda x: self.parenthesize(x, PREC), expr.args))) def _print_Piecewise(self,", "class MpmathPrinter(PythonCodePrinter): \"\"\" Lambda printer for mpmath which maintains precision", "Piecewise object # it will behave the same as passing", "_print_known_const) _known_functions_scipy_special = { 'erf': 'erf', 'erfc': 'erfc', 'gamma': 'gamma',", "is None: func = self._module_format('numpy.array') return \"%s(%s)\" % (func, self._print(expr.tolist()))", "i=i, j=j, shape=expr.shape ) _print_ImmutableSparseMatrix = _print_SparseMatrix for k in", "'<=' :'less_equal', '>' :'greater', '>=' :'greater_equal', } if expr.rel_op in", "_known_functions_scipy_special.items()] )) _kc = {k: 'scipy.constants.' + v for k,", "def _print_Piecewise(self, expr): result = [] i = 0 for", "expr.args))) def _print_known_const(self, expr): known = self.known_constants[expr.__class__.__name__] return self._module_format(known) class", "v) for k, v in _known_functions_mpmath.items()] )) def _print_Integer(self, e):", "= %s\" % (name, value) def _module_format(self, fqn, register=True): parts", "setattr(SciPyPrinter, '_print_%s' % k, _print_known_func) for k in SciPyPrinter._kc: setattr(SciPyPrinter,", "} _known_functions_math = { 'acos': 'acos', 'acosh': 'acosh', 'asin': 'asin',", "'.join(map(self._print, expr.args))) def _print_known_const(self, expr): known = self.known_constants[expr.__class__.__name__] return self._module_format(known)", "'erf', 'erfc': 'erfc', 'gamma': 'gamma', 'loggamma': 'gammaln' } _known_constants_scipy_constants =", "_print_seq(self, seq, delimiter=', '): \"General sequence printer: converts to tuple\"", "i += 1 result = result[:-1] result.append(') else None)') result.append(')'*(2*i", "_print_known_const) _not_in_numpy = 'erf erfc factorial gamma lgamma'.split() _in_numpy =", "'sympy.' + v) for k, v in chain( _known_functions.items(), _known_functions_math.items()", "v in expr._smat.items(): i.append(r) j.append(c) data.append(v) return \"{name}({data}, ({i}, {j}),", "_print_arg(self, expr): return \"%s(%s)\" % (self._module_format('numpy.angle'), self._print(expr.args[0])) def _print_im(self, expr):", "name) return \"%s(%s)\" % (func, self._print(expr.tolist())) _print_SparseMatrix = \\ _print_MutableSparseMatrix", "use StrPrinter's # version of the function and add 'logical_and'", "!= 1: arg2 = arg2.T return \"%s(%s, %s)\" % (self._module_format('numpy.dot'),", "XXX: This does not handle setting mpmath.mp.dps. It is assumed", "'yield', 'None' # 'None' is actually not in Python 2's", "\"Piecewise function printer\" exprs = '[{0}]'.format(','.join(self._print(arg.expr) for arg in expr.args))", "expr): loops = ( 'for {i} in range({a}, {b}+1)'.format( i=self._print(i),", "return '{0}({1})'.format(self._module_format('numpy.logical_or'), ','.join(self._print(i) for i in expr.args)) def _print_Not(self, expr):", "gmpy is installed. args = str(tuple(map(int, e._mpf_))) return '{func}({args})'.format(func=self._module_format('mpmath.mpf'), args=args)", "and len(parts) > 1: self.module_imports['.'.join(parts[:-1])].add(parts[-1]) if self._settings['fully_qualified_modules']: return fqn else:", "'<' :'less', '<=' :'less_equal', '>' :'greater', '>=' :'greater_equal', } if", "conds, exprs) def _print_Relational(self, expr): \"Relational printer for Equality and", "to NUMPY_TRANSLATIONS. return '{0}({1})'.format(self._module_format('numpy.logical_or'), ','.join(self._print(i) for i in expr.args)) def", ":'equal', '!=' :'not_equal', '<' :'less', '<=' :'less_equal', '>' :'greater', '>='", "lists because numba supports # tuples in nopython mode. return", "super(PythonCodePrinter, self).__init__(settings) self.module_imports = defaultdict(set) self.known_functions = dict(self._kf, **(settings or", ")) _kc = {k: 'numpy.'+v for k, v in _known_constants_math.items()}", "lowergamma functioin return \"{0}({1}, 0, {2})\".format( self._module_format('mpmath.gammainc'), self._print(e.args[0]), self._print(e.args[1])) def", "def _print_Sum(self, expr): loops = ( 'for {i} in range({a},", "'>' :'greater', '>=' :'greater_equal', } if expr.rel_op in op: lhs", "is a (expr, cond) sequence in a Piecewise object #", "for k, v in _known_functions_math.items()] )) _kc = {k: 'math.'+v", "def _print_lowergamma(self,e): #printer for the lowergamma functioin return \"{0}({1}, 0,", "+ v) for k, v in _known_functions_numpy.items()] )) _kc =", "expr._smat.items(): i.append(r) j.append(c) data.append(v) return \"{name}({data}, ({i}, {j}), shape={shape})\".format( name=self._module_format('scipy.sparse.coo_matrix'),", "k, v in _known_functions_numpy.items()] )) _kc = {k: 'numpy.'+v for", "in _not_in_numpy] _known_functions_numpy = dict(_in_numpy, **{ 'acos': 'arccos', 'acosh': 'arccosh',", "% k, _print_known_func) for k in _known_constants_mpmath: setattr(MpmathPrinter, '_print_%s' %", "# *as long as* it is the last element in", "n x 1. arg1, arg2 = expr.args if arg1.shape[0] !=", "'_print_%s' % k, _print_known_func) for k in _known_constants_mpmath: setattr(MpmathPrinter, '_print_%s'", "arg2 = expr.args if arg1.shape[0] != 1: arg1 = arg1.T", "gets 1 x n by n x 1. arg1, arg2", "k in PythonCodePrinter._kf: setattr(PythonCodePrinter, '_print_%s' % k, _print_known_func) for k", "k, v in _known_constants_math.items()} def _print_seq(self, seq, delimiter=', '): \"General", "expr.args)) def _print_Min(self, expr): return '{0}(({1}))'.format(self._module_format('numpy.amin'), ','.join(self._print(i) for i in", "'_print_%s' % k, _print_known_const) _not_in_numpy = 'erf erfc factorial gamma", "If [default_value, True] is a (expr, cond) sequence in a", "+ v) for k, v in _known_functions_scipy_special.items()] )) _kc =", "k, v in chain( _known_functions.items(), _known_functions_math.items() )]) def _print_Function(self, expr):", "to match the Floats in the expression. # Remove 'mpz'", "{ 'Abs': 'abs', } _known_functions_math = { 'acos': 'acos', 'acosh':", "of lists because numba supports # tuples in nopython mode.", "the caller of the lambdified function will have set it", "self.module_imports = defaultdict(set) self.known_functions = dict(self._kf, **(settings or {}).get( 'user_functions',", "_known_functions_math.items() if k not in _not_in_mpmath] _known_functions_mpmath = dict(_in_mpmath) _known_constants_mpmath", "PythonCodePrinter(settings).doprint(expr) _not_in_mpmath = 'log1p log2'.split() _in_mpmath = [(k, v) for", "self._print(e.args[0])) for k in MpmathPrinter._kf: setattr(MpmathPrinter, '_print_%s' % k, _print_known_func)", "# initialized to a set in __init__ tab = '", "k, v in _known_constants_scipy_constants.items()} def _print_SparseMatrix(self, expr): i, j, data", "return fqn.split('(')[0].split('[')[0].split('.')[-1] def _format_code(self, lines): return lines def _get_comment(self, text):", "v) for k, v in _known_functions_math.items()] )) _kc = {k:", "fmod fsum gcd degrees fabs] _known_constants_math = { 'Exp1': 'e',", "setting mpmath.mp.dps. It is assumed that # the caller of", ":'greater', '>=' :'greater_equal', } if expr.rel_op in op: lhs =", "_print_log1p(self, e): return '{0}({1}+1)'.format( self._module_format('mpmath.log'), self._print(e.args[0])) for k in MpmathPrinter._kf:", "function and add 'logical_and' to NUMPY_TRANSLATIONS. return '{0}({1})'.format(self._module_format('numpy.logical_and'), ','.join(self._print(i) for", "because it uses Python 'or' keyword. # If LambdaPrinter didn't", "# Remove 'mpz' if gmpy is installed. args = str(tuple(map(int,", "LambdaPrinter because it uses Python 'and' keyword. # If LambdaPrinter", "a=self._print(a), b=self._print(b)) for i, a, b in expr.limits) return '(builtins.sum({function}", "Not printer\" # We have to override LambdaPrinter because it", "'tan', 'tanh': 'tanh' } # Not used from ``math``: [copysign", "op = { '==' :'equal', '!=' :'not_equal', '<' :'less', '<='", "We have to override LambdaPrinter because it uses Python 'not'", "return '%s(%s)' % (self._module_format(mod + ('.' if mod else '')", "'return', 'try', 'while', 'with', 'yield', 'None' # 'None' is actually", "else: return super(NumPyPrinter, self)._print_Pow(expr) def _print_arg(self, expr): return \"%s(%s)\" %", "_print_ImmutableSparseMatrix = \\ _print_Matrix = \\ _print_DenseMatrix = \\ _print_MutableDenseMatrix", "text): return \" # {0}\".format(text) def _print_NaN(self, expr): return \"float('nan')\"", "object # it will behave the same as passing the", "e): # XXX: This does not handle setting mpmath.mp.dps. It", "element in expr.args. # If this is not the case,", "chain from sympy.core import sympify from .precedence import precedence from", "value): return \"%s = %s\" % (name, value) def _module_format(self,", "prematurely. return '{0}({1}, {2}, default=numpy.nan)'.format(self._module_format('numpy.select'), conds, exprs) def _print_Relational(self, expr):", "dict(_in_mpmath) _known_constants_mpmath = { 'Pi': 'pi' } class MpmathPrinter(PythonCodePrinter): \"\"\"", "in _known_functions_math.items() if k not in _not_in_mpmath] _known_functions_mpmath = dict(_in_mpmath)", "self._print(expr.rewrite(Piecewise)) def _print_Sum(self, expr): loops = ( 'for {i} in", "_known_functions_mpmath.items()] )) def _print_Integer(self, e): return '%s(%d)' % (self._module_format('mpmath.mpf'), e)", "func = self.known_functions.get(name, name) return \"%s(%s)\" % (func, self._print(expr.tolist())) _print_SparseMatrix", "i = 0 for arg in expr.args: e = arg.expr", "return \"{0}({1}, {2}, {3})\".format( self._module_format('mpmath.gammainc'), self._print(e.args[0]), self._print(e.args[1]), self._module_format('mpmath.inf')) def _print_lowergamma(self,e):", "add 'logical_and' to NUMPY_TRANSLATIONS. return '{0}({1})'.format(self._module_format('numpy.logical_and'), ','.join(self._print(i) for i in", "'erf erfc factorial gamma lgamma'.split() _in_numpy = [(k, v) for", "modf # radians trunc fmod fsum gcd degrees fabs] _known_constants_math", "floats \"\"\" printmethod = \"_mpmathcode\" _kf = dict(chain( _known_functions.items(), [(k,", "\"Logical And printer\" # We have to override LambdaPrinter because", "'sinh', 'Sqrt': 'sqrt', 'tan': 'tan', 'tanh': 'tanh' } # Not", "didn't define it, we could use StrPrinter's # version of", "\"%s(%s, %s)\" % (self._module_format('numpy.dot'), self._print(arg1), self._print(arg2)) def _print_Piecewise(self, expr): \"Piecewise", "3.5: # 'Infinity': 'inf', # 'NaN': 'nan' } def _print_known_func(self,", "printmethod = \"_numpycode\" _kf = dict(chain( PythonCodePrinter._kf.items(), [(k, 'numpy.' +", "dict(_in_numpy, **{ 'acos': 'arccos', 'acosh': 'arccosh', 'asin': 'arcsin', 'asinh': 'arcsinh',", "def _get_comment(self, text): return \" # {0}\".format(text) def _print_NaN(self, expr):", "**(settings or {}).get( 'user_functions', {})) self.known_constants = dict(self._kc, **(settings or", "from collections import defaultdict from functools import wraps from itertools", "printer for mpmath which maintains precision for floats \"\"\" printmethod", "= 'log1p log2'.split() _in_mpmath = [(k, v) for k, v", "\"Logical Or printer\" # We have to override LambdaPrinter because", "= str(tuple(map(int, e._mpf_))) return '{func}({args})'.format(func=self._module_format('mpmath.mpf'), args=args) def _print_uppergamma(self,e): #printer for", "parts = fqn.split('.') if register and len(parts) > 1: self.module_imports['.'.join(parts[:-1])].add(parts[-1])", "'expm1', 'factorial': 'factorial', 'floor': 'floor', 'gamma': 'gamma', 'hypot': 'hypot', 'loggamma':", "'{0}(({1}))'.format(self._module_format('numpy.amin'), ','.join(self._print(i) for i in expr.args)) def _print_Max(self, expr): return", "= [] i = 0 for arg in expr.args: e", "Floats in the expression. # Remove 'mpz' if gmpy is", "_print_Min(self, expr): return '{0}(({1}))'.format(self._module_format('numpy.amin'), ','.join(self._print(i) for i in expr.args)) def", "StrPrinter's # version of the function and add 'logical_and' to", "keyword.kwlist } _kw_only_py2 = {'exec', 'print'} _kw_only_py3 = {'False', 'nonlocal',", "\\ _print_MutableSparseMatrix = \\ _print_ImmutableSparseMatrix = \\ _print_Matrix = \\", "numpy.dot does matrix # multiplication, so we have to make", "args=', '.join(map(self._print, expr.args))) def _print_known_const(self, expr): known = self.known_constants[expr.__class__.__name__] return", "def _print_im(self, expr): return \"%s(%s)\" % (self._module_format('numpy.imag', self._print(expr.args[0]))) def _print_Mod(self,", "define it, we would still have to define our #", "_print_re(self, expr): return \"%s(%s)\" % (self._module_format('numpy.real'), self._print(expr.args[0])) def _print_MatrixBase(self, expr):", "{ 'Exp1': 'e', 'Pi': 'pi', # Only in python >=", "_kf = dict(chain( _known_functions.items(), [(k, 'mpmath.' + v) for k,", "'exp2': 'exp2', }) class NumPyPrinter(PythonCodePrinter): \"\"\" Numpy printer which handles", "'.join(map(self._print, expr.args))) def _print_re(self, expr): return \"%s(%s)\" % (self._module_format('numpy.real'), self._print(expr.args[0]))", "'hypot': 'hypot', 'loggamma': 'lgamma', 'log': 'log', 'log10': 'log10', 'log1p': 'log1p',", "expr: self._print_MatrixBase(expr) for k in PythonCodePrinter._kf: setattr(PythonCodePrinter, '_print_%s' % k,", "'_print_%s' % k, _print_known_const) _known_functions_scipy_special = { 'erf': 'erf', 'erfc':", "converts to tuple\" # Print tuples here instead of lists", "= \\ _print_MutableSparseMatrix = \\ _print_ImmutableSparseMatrix = \\ _print_Matrix =", "function=self._print(expr.function), loops=' '.join(loops)) def _print_ImaginaryUnit(self, expr): return '1j' def _print_MatrixBase(self,", "'logical_and' to NUMPY_TRANSLATIONS. return '{0}({1})'.format(self._module_format('numpy.logical_and'), ','.join(self._print(i) for i in expr.args))", "to define our # own because StrPrinter doesn't define it.", "'cos': 'cos', 'cosh': 'cosh', 'erf': 'erf', 'erfc': 'erfc', 'exp': 'exp',", "as* it is the last element in expr.args. # If", ") _print_ImmutableSparseMatrix = _print_SparseMatrix for k in SciPyPrinter._kf: setattr(SciPyPrinter, '_print_%s'", "{'False', 'nonlocal', 'True'} _known_functions = { 'Abs': 'abs', } _known_functions_math", "PythonCodePrinter._kf: setattr(PythonCodePrinter, '_print_%s' % k, _print_known_func) for k in _known_constants_math:", "# Print tuples here instead of lists because numba supports", "({i}, {j}), shape={shape})\".format( name=self._module_format('scipy.sparse.coo_matrix'), data=data, i=i, j=j, shape=expr.shape ) _print_ImmutableSparseMatrix", "handle setting mpmath.mp.dps. It is assumed that # the caller", "_known_functions = { 'Abs': 'abs', } _known_functions_math = { 'acos':", "else None)') result.append(')'*(2*i - 2)) return ''.join(result) def _print_ITE(self, expr):", "True] is a (expr, cond) sequence in a Piecewise object", "#printer for the lowergamma functioin return \"{0}({1}, 0, {2})\".format( self._module_format('mpmath.gammainc'),", "return '({},)'.format(delimiter.join(self._print(item) for item in seq)) def _print_MatMul(self, expr): \"Matrix", "nopython mode. return '({},)'.format(delimiter.join(self._print(item) for item in seq)) def _print_MatMul(self,", "'tan': 'tan', 'tanh': 'tanh' } # Not used from ``math``:", "If LambdaPrinter didn't define it, we could use StrPrinter's #", "installed. args = str(tuple(map(int, e._mpf_))) return '{func}({args})'.format(func=self._module_format('mpmath.mpf'), args=args) def _print_uppergamma(self,e):", "user_functions={}, precision=17, inline=True, fully_qualified_modules=True ) def __init__(self, settings=None): super(PythonCodePrinter, self).__init__(settings)", "self)._print_Pow(expr) def _print_arg(self, expr): return \"%s(%s)\" % (self._module_format('numpy.angle'), self._print(expr.args[0])) def", "'acosh', 'asin': 'asin', 'asinh': 'asinh', 'atan': 'atan', 'atan2': 'atan2', 'atanh':", "def _print_MatrixBase(self, expr): name = expr.__class__.__name__ func = self.known_functions.get(name, name)", "else: return fqn.split('(')[0].split('[')[0].split('.')[-1] def _format_code(self, lines): return lines def _get_comment(self,", "(self._module_format('numpy.real'), self._print(expr.args[0])) def _print_MatrixBase(self, expr): func = self.known_functions.get(expr.__class__.__name__, None) if", "= '[{0}]'.format(','.join(self._print(arg.expr) for arg in expr.args)) conds = '[{0}]'.format(','.join(self._print(arg.cond) for", "\"%s(%s)\" % (self._module_format('numpy.angle'), self._print(expr.args[0])) def _print_im(self, expr): return \"%s(%s)\" %", "_print_Matrix = \\ _print_DenseMatrix = \\ _print_MutableDenseMatrix = \\ _print_ImmutableMatrix", "this is not the case, it may be triggered prematurely.", "% (self._module_format(mod + ('.' if mod else '') + expr.func.__name__),", "data=data, i=i, j=j, shape=expr.shape ) _print_ImmutableSparseMatrix = _print_SparseMatrix for k", "\"_numpycode\" _kf = dict(chain( PythonCodePrinter._kf.items(), [(k, 'numpy.' + v) for", "'tanh' } # Not used from ``math``: [copysign isclose isfinite", "'log2', 'sin': 'sin', 'sinh': 'sinh', 'Sqrt': 'sqrt', 'tan': 'tan', 'tanh':", "in op: lhs = self._print(expr.lhs) rhs = self._print(expr.rhs) return '{op}({lhs},", ")) def _print_Integer(self, e): return '%s(%d)' % (self._module_format('mpmath.mpf'), e) def", "define it. return '{0}({1})'.format(self._module_format('numpy.logical_not'), ','.join(self._print(i) for i in expr.args)) def", "def _format_code(self, lines): return lines def _get_comment(self, text): return \"", "Numpy printer which handles vectorized piecewise functions, logical operators, etc.", "assumed that # the caller of the lambdified function will", "= self.known_functions.get(name, name) return \"%s(%s)\" % (func, self._print(expr.tolist())) _print_SparseMatrix =", "because it uses Python 'and' keyword. # If LambdaPrinter didn't", "standard = \"python3\" reserved_words = _kw_py2and3.union(_kw_only_py3) modules = None #", "the lambdified function will have set it to sufficient #", "expr): \"Logical Or printer\" # We have to override LambdaPrinter", "import chain from sympy.core import sympify from .precedence import precedence", "in _known_constants_math: setattr(PythonCodePrinter, '_print_%s' % k, _print_known_const) def pycode(expr, **settings):", "known = self.known_constants[expr.__class__.__name__] return self._module_format(known) class PythonCodePrinter(CodePrinter): printmethod = \"_pythoncode\"", "= arg1.T if arg2.shape[1] != 1: arg2 = arg2.T return", "'{0}(({1}))'.format(self._module_format('numpy.amax'), ','.join(self._print(i) for i in expr.args)) def _print_Pow(self, expr): if", "= expr.args if arg1.shape[0] != 1: arg1 = arg1.T if", "here instead of lists because numba supports # tuples in", "{k: 'scipy.constants.' + v for k, v in _known_constants_scipy_constants.items()} def", "'acos', 'acosh': 'acosh', 'asin': 'asin', 'asinh': 'asinh', 'atan': 'atan', 'atan2':", "= dict( CodePrinter._default_settings, user_functions={}, precision=17, inline=True, fully_qualified_modules=True ) def __init__(self,", "{j}), shape={shape})\".format( name=self._module_format('scipy.sparse.coo_matrix'), data=data, i=i, j=j, shape=expr.shape ) _print_ImmutableSparseMatrix =", "= {k: 'math.'+v for k, v in _known_constants_math.items()} _operators =", "(self._module_format(mod + ('.' if mod else '') + expr.func.__name__), ',", "_module_format(self, fqn, register=True): parts = fqn.split('.') if register and len(parts)", "v in _known_functions_math.items() if k not in _not_in_mpmath] _known_functions_mpmath =", "printer which handles vectorized piecewise functions, logical operators, etc. \"\"\"", "value) def _module_format(self, fqn, register=True): parts = fqn.split('.') if register", "v) for k, v in _known_functions_math.items() if k not in", "{2}, default=numpy.nan)'.format(self._module_format('numpy.select'), conds, exprs) def _print_Relational(self, expr): \"Relational printer for", "self._print(arg2)) def _print_Piecewise(self, expr): \"Piecewise function printer\" exprs = '[{0}]'.format(','.join(self._print(arg.expr)", "_in_numpy = [(k, v) for k, v in _known_functions_math.items() if", "cond) sequence in a Piecewise object # it will behave", "if gmpy is installed. args = str(tuple(map(int, e._mpf_))) return '{func}({args})'.format(func=self._module_format('mpmath.mpf'),", "e._mpf_))) return '{func}({args})'.format(func=self._module_format('mpmath.mpf'), args=args) def _print_uppergamma(self,e): #printer for the uppergamma", "PREC = precedence(expr) return ('{0} % {1}'.format(*map(lambda x: self.parenthesize(x, PREC),", "_kf = dict([(k, 'sympy.' + v) for k, v in", "'{0}({1}, {2}, default=numpy.nan)'.format(self._module_format('numpy.select'), conds, exprs) def _print_Relational(self, expr): \"Relational printer", "'arcsinh', 'atan': 'arctan', 'atan2': 'arctan2', 'atanh': 'arctanh', 'exp2': 'exp2', })", "return \"%s(%s)\" % (self._module_format('numpy.mod'), ', '.join(map(self._print, expr.args))) def _print_re(self, expr):", "= \\ _print_Matrix = \\ _print_DenseMatrix = \\ _print_MutableDenseMatrix =", "= { '==' :'equal', '!=' :'not_equal', '<' :'less', '<=' :'less_equal',", "_kc = {k: 'scipy.constants.' + v for k, v in", "_kf = dict(chain( NumPyPrinter._kf.items(), [(k, 'scipy.special.' + v) for k,", "NumPyPrinter._kf: setattr(NumPyPrinter, '_print_%s' % k, _print_known_func) for k in NumPyPrinter._kc:", "= { 'acos': 'acos', 'acosh': 'acosh', 'asin': 'asin', 'asinh': 'asinh',", "_print_DotProduct(self, expr): # DotProduct allows any shape order, but numpy.dot", "def _print_Mod(self, expr): PREC = precedence(expr) return ('{0} % {1}'.format(*map(lambda", "'import', 'in', 'is', 'lambda', 'not', 'or', 'pass', 'raise', 'return', 'try',", "self._module_format('mpmath.log'), self._print(e.args[0])) for k in MpmathPrinter._kf: setattr(MpmathPrinter, '_print_%s' % k,", "> 1: self.module_imports['.'.join(parts[:-1])].add(parts[-1]) if self._settings['fully_qualified_modules']: return fqn else: return fqn.split('(')[0].split('[')[0].split('.')[-1]", "= \"_numpycode\" _kf = dict(chain( PythonCodePrinter._kf.items(), [(k, 'numpy.' + v)", "(') result.append(self._print(c)) result.append(') else (') i += 1 result =", "self)._print_Relational(expr) def _print_And(self, expr): \"Logical And printer\" # We have", "_known_constants_mpmath: setattr(MpmathPrinter, '_print_%s' % k, _print_known_const) _not_in_numpy = 'erf erfc", "'erfc': 'erfc', 'exp': 'exp', 'expm1': 'expm1', 'factorial': 'factorial', 'floor': 'floor',", "expr): # DotProduct allows any shape order, but numpy.dot does", "self.known_functions = dict(self._kf, **(settings or {}).get( 'user_functions', {})) self.known_constants =", "if self._settings['fully_qualified_modules']: return fqn else: return fqn.split('(')[0].split('[')[0].split('.')[-1] def _format_code(self, lines):", "StrPrinter's # version of the function and add 'logical_or' to", "dict(chain( _known_functions.items(), [(k, 'math.' + v) for k, v in", "NUMPY_TRANSLATIONS. return '{0}({1})'.format(self._module_format('numpy.logical_and'), ','.join(self._print(i) for i in expr.args)) def _print_Or(self,", "function printer\" exprs = '[{0}]'.format(','.join(self._print(arg.expr) for arg in expr.args)) conds", "isfinite isinf isnan ldexp frexp pow modf # radians trunc", "\"python3\" reserved_words = _kw_py2and3.union(_kw_only_py3) modules = None # initialized to", "v) for k, v in _known_functions_scipy_special.items()] )) _kc = {k:", "} class SciPyPrinter(NumPyPrinter): _kf = dict(chain( NumPyPrinter._kf.items(), [(k, 'scipy.special.' +", "self._settings['fully_qualified_modules']: return fqn else: return fqn.split('(')[0].split('[')[0].split('.')[-1] def _format_code(self, lines): return", "_known_constants_math = { 'Exp1': 'e', 'Pi': 'pi', # Only in", "in chain( _known_functions.items(), _known_functions_math.items() )]) def _print_Function(self, expr): mod =", "self, expr: self._print_MatrixBase(expr) for k in PythonCodePrinter._kf: setattr(PythonCodePrinter, '_print_%s' %", "expr): known = self.known_constants[expr.__class__.__name__] return self._module_format(known) class PythonCodePrinter(CodePrinter): printmethod =", "'erf', 'erfc': 'erfc', 'exp': 'exp', 'expm1': 'expm1', 'factorial': 'factorial', 'floor':", "def _print_Infinity(self, expr): return \"float('inf')\" def _print_Mod(self, expr): PREC =", "dict(self._kf, **(settings or {}).get( 'user_functions', {})) self.known_constants = dict(self._kc, **(settings", "Python 2's keyword.kwlist } _kw_only_py2 = {'exec', 'print'} _kw_only_py3 =", "didn't define it, we would still have to define our", "have to override LambdaPrinter because it uses Python 'and' keyword.", "if expr.exp == 0.5: return '{0}({1})'.format(self._module_format('numpy.sqrt'), self._print(expr.base)) else: return super(NumPyPrinter,", "for arg in expr.args)) # If [default_value, True] is a", "is the last element in expr.args. # If this is", "(func, self._print(expr.tolist())) for k in NumPyPrinter._kf: setattr(NumPyPrinter, '_print_%s' % k,", "Python 'or' keyword. # If LambdaPrinter didn't define it, we", "{3})\".format( self._module_format('mpmath.gammainc'), self._print(e.args[0]), self._print(e.args[1]), self._module_format('mpmath.inf')) def _print_lowergamma(self,e): #printer for the", "# it will behave the same as passing the 'default'", "version of the function and add 'logical_and' to NUMPY_TRANSLATIONS. return", "% (self._module_format('numpy.mod'), ', '.join(map(self._print, expr.args))) def _print_re(self, expr): return \"%s(%s)\"", "'exp', 'expm1': 'expm1', 'factorial': 'factorial', 'floor': 'floor', 'gamma': 'gamma', 'hypot':", "for arg in expr.args: e = arg.expr c = arg.cond", "self.known_functions.get(expr.__class__.__name__, None) if func is None: func = self._module_format('numpy.array') return", "'' return '%s(%s)' % (self._module_format(mod + ('.' if mod else", "+ v) for k, v in chain( _known_functions.items(), _known_functions_math.items() )])", "expr): \"Logical And printer\" # We have to override LambdaPrinter", "functioin return \"{0}({1}, 0, {2})\".format( self._module_format('mpmath.gammainc'), self._print(e.args[0]), self._print(e.args[1])) def _print_log2(self,", "def _print_Min(self, expr): return '{0}(({1}))'.format(self._module_format('numpy.amin'), ','.join(self._print(i) for i in expr.args))", "SciPyPrinter(NumPyPrinter): _kf = dict(chain( NumPyPrinter._kf.items(), [(k, 'scipy.special.' + v) for", "have to define our # own because StrPrinter doesn't define", "in NumPyPrinter._kc: setattr(NumPyPrinter, '_print_%s' % k, _print_known_const) _known_functions_scipy_special = {", "if register and len(parts) > 1: self.module_imports['.'.join(parts[:-1])].add(parts[-1]) if self._settings['fully_qualified_modules']: return", "of the function and add 'logical_or' to NUMPY_TRANSLATIONS. return '{0}({1})'.format(self._module_format('numpy.logical_or'),", "lines def _get_comment(self, text): return \" # {0}\".format(text) def _print_NaN(self,", "return '{0}({1})'.format(self._module_format('numpy.logical_and'), ','.join(self._print(i) for i in expr.args)) def _print_Or(self, expr):", "','.join(self._print(i) for i in expr.args)) def _print_Not(self, expr): \"Logical Not", "def _print_Function(self, expr): mod = expr.func.__module__ or '' return '%s(%s)'", "'({0})'.format(').dot('.join(self._print(i) for i in expr.args)) def _print_DotProduct(self, expr): # DotProduct", "v in _known_constants_math.items()} _operators = {'and': 'and', 'or': 'or', 'not':", "it uses Python 'not' keyword. # If LambdaPrinter didn't define", "v in chain( _known_functions.items(), _known_functions_math.items() )]) def _print_Function(self, expr): mod", "'as', 'assert', 'break', 'class', 'continue', 'def', 'del', 'elif', 'else', 'except',", "in python >= 3.5: # 'Infinity': 'inf', # 'NaN': 'nan'", "to override LambdaPrinter because it uses Python 'not' keyword. #", "lines): return lines def _get_comment(self, text): return \" # {0}\".format(text)", "the lowergamma functioin return \"{0}({1}, 0, {2})\".format( self._module_format('mpmath.gammainc'), self._print(e.args[0]), self._print(e.args[1]))", "'and' keyword. # If LambdaPrinter didn't define it, we could", "'erfc', 'exp': 'exp', 'expm1': 'expm1', 'factorial': 'factorial', 'floor': 'floor', 'gamma':", "to override LambdaPrinter because it uses Python 'or' keyword. #", "_print_Max(self, expr): return '{0}(({1}))'.format(self._module_format('numpy.amax'), ','.join(self._print(i) for i in expr.args)) def", "Unequality\" op = { '==' :'equal', '!=' :'not_equal', '<' :'less',", "in PythonCodePrinter._kf: setattr(PythonCodePrinter, '_print_%s' % k, _print_known_func) for k in", "long as* it is the last element in expr.args. #", "in expr.args)) def _print_Not(self, expr): \"Logical Not printer\" # We", "return self._module_format(known) class PythonCodePrinter(CodePrinter): printmethod = \"_pythoncode\" language = \"Python\"", "} # Not used from ``math``: [copysign isclose isfinite isinf", "in expr.args)) def _print_Pow(self, expr): if expr.exp == 0.5: return", "% k, _print_known_const) def pycode(expr, **settings): return PythonCodePrinter(settings).doprint(expr) _not_in_mpmath =", "{rhs})'.format(op=self._module_format('numpy.'+op[expr.rel_op]), lhs=lhs, rhs=rhs) return super(NumPyPrinter, self)._print_Relational(expr) def _print_And(self, expr): \"Logical", "log2'.split() _in_mpmath = [(k, v) for k, v in _known_functions_math.items()", "%s\" % (name, value) def _module_format(self, fqn, register=True): parts =", "_get_comment(self, text): return \" # {0}\".format(text) def _print_NaN(self, expr): return", "for the lowergamma functioin return \"{0}({1}, 0, {2})\".format( self._module_format('mpmath.gammainc'), self._print(e.args[0]),", "'for', 'from', 'global', 'if', 'import', 'in', 'is', 'lambda', 'not', 'or',", "_kc = {k: 'math.'+v for k, v in _known_constants_math.items()} _operators", "import precedence from .codeprinter import CodePrinter _kw_py2and3 = { 'and',", "# Only in python >= 3.5: # 'Infinity': 'inf', #", "mode. return '({},)'.format(delimiter.join(self._print(item) for item in seq)) def _print_MatMul(self, expr):", "item in seq)) def _print_MatMul(self, expr): \"Matrix multiplication printer\" return", "of the lambdified function will have set it to sufficient", ":'not_equal', '<' :'less', '<=' :'less_equal', '>' :'greater', '>=' :'greater_equal', }", "gcd degrees fabs] _known_constants_math = { 'Exp1': 'e', 'Pi': 'pi',", "Only in python >= 3.5: # 'Infinity': 'inf', # 'NaN':", "'mpmath.' + v) for k, v in _known_functions_mpmath.items()] )) def", "\"%s(%s)\" % (func, self._print(expr.tolist())) for k in NumPyPrinter._kf: setattr(NumPyPrinter, '_print_%s'", "k not in _not_in_numpy] _known_functions_numpy = dict(_in_numpy, **{ 'acos': 'arccos',", "'erf': 'erf', 'erfc': 'erfc', 'exp': 'exp', 'expm1': 'expm1', 'factorial': 'factorial',", "a (expr, cond) sequence in a Piecewise object # it", "for i in expr.args)) def _print_Max(self, expr): return '{0}(({1}))'.format(self._module_format('numpy.amax'), ','.join(self._print(i)", "= { 'erf': 'erf', 'erfc': 'erfc', 'gamma': 'gamma', 'loggamma': 'gammaln'", "# If LambdaPrinter didn't define it, we could use StrPrinter's", "Python 'not' keyword. # If LambdaPrinter didn't define it, we", "expr.args: e = arg.expr c = arg.cond result.append('((') result.append(self._print(e)) result.append(')", "not in _not_in_numpy] _known_functions_numpy = dict(_in_numpy, **{ 'acos': 'arccos', 'acosh':", "# If this is not the case, it may be", "'asin': 'asin', 'asinh': 'asinh', 'atan': 'atan', 'atan2': 'atan2', 'atanh': 'atanh',", "arg in expr.args: e = arg.expr c = arg.cond result.append('((')", "+ v) for k, v in _known_functions_mpmath.items()] )) def _print_Integer(self,", "_print_uppergamma(self,e): #printer for the uppergamma function return \"{0}({1}, {2}, {3})\".format(", "add 'logical_or' to NUMPY_TRANSLATIONS. return '{0}({1})'.format(self._module_format('numpy.logical_or'), ','.join(self._print(i) for i in", "result[:-1] result.append(') else None)') result.append(')'*(2*i - 2)) return ''.join(result) def", "sufficient # precision to match the Floats in the expression.", "_print_ImmutableDenseMatrix = \\ lambda self, expr: self._print_MatrixBase(expr) for k in", "self._module_format('mpmath.gammainc'), self._print(e.args[0]), self._print(e.args[1]), self._module_format('mpmath.inf')) def _print_lowergamma(self,e): #printer for the lowergamma", "'log1p': 'log1p', 'log2': 'log2', 'sin': 'sin', 'sinh': 'sinh', 'Sqrt': 'sqrt',", "= [(k, v) for k, v in _known_functions_math.items() if k", "k in MpmathPrinter._kf: setattr(MpmathPrinter, '_print_%s' % k, _print_known_func) for k", "reserved_words = _kw_py2and3.union(_kw_only_py3) modules = None # initialized to a", "'exp': 'exp', 'expm1': 'expm1', 'factorial': 'factorial', 'floor': 'floor', 'gamma': 'gamma',", "_print_SparseMatrix(self, expr): i, j, data = [], [], [] for", "= {'and': 'and', 'or': 'or', 'not': 'not'} _default_settings = dict(", "return \" # {0}\".format(text) def _print_NaN(self, expr): return \"float('nan')\" def", "' _kf = dict(chain( _known_functions.items(), [(k, 'math.' + v) for", "allows any shape order, but numpy.dot does matrix # multiplication,", "_print_im(self, expr): return \"%s(%s)\" % (self._module_format('numpy.imag', self._print(expr.args[0]))) def _print_Mod(self, expr):", "DotProduct allows any shape order, but numpy.dot does matrix #", "c), v in expr._smat.items(): i.append(r) j.append(c) data.append(v) return \"{name}({data}, ({i},", "[(k, 'math.' + v) for k, v in _known_functions_math.items()] ))", "_print_known_const) class SymPyPrinter(PythonCodePrinter): _kf = dict([(k, 'sympy.' + v) for", "%s)\" % (self._module_format('numpy.dot'), self._print(arg1), self._print(arg2)) def _print_Piecewise(self, expr): \"Piecewise function", "expr): if expr.exp == 0.5: return '{0}({1})'.format(self._module_format('numpy.sqrt'), self._print(expr.base)) else: return", "'ceil', 'cos': 'cos', 'cosh': 'cosh', 'erf': 'erf', 'erfc': 'erfc', 'exp':", "def _print_ImaginaryUnit(self, expr): return '1j' def _print_MatrixBase(self, expr): name =", "'math.'+v for k, v in _known_constants_math.items()} _operators = {'and': 'and',", "it. return '{0}({1})'.format(self._module_format('numpy.logical_not'), ','.join(self._print(i) for i in expr.args)) def _print_Min(self,", "dict([(k, 'sympy.' + v) for k, v in chain( _known_functions.items(),", "expr.__class__.__name__ func = self.known_functions.get(name, name) return \"%s(%s)\" % (func, self._print(expr.tolist()))", "0.5: return '{0}({1})'.format(self._module_format('numpy.sqrt'), self._print(expr.base)) else: return super(NumPyPrinter, self)._print_Pow(expr) def _print_arg(self,", "'def', 'del', 'elif', 'else', 'except', 'finally', 'for', 'from', 'global', 'if',", "= self.known_constants[expr.__class__.__name__] return self._module_format(known) class PythonCodePrinter(CodePrinter): printmethod = \"_pythoncode\" language", "= \"_pythoncode\" language = \"Python\" standard = \"python3\" reserved_words =", "lhs=lhs, rhs=rhs) return super(NumPyPrinter, self)._print_Relational(expr) def _print_And(self, expr): \"Logical And", "'loggamma': 'gammaln' } _known_constants_scipy_constants = { 'GoldenRatio': 'golden_ratio' } class", "chain( _known_functions.items(), _known_functions_math.items() )]) def _print_Function(self, expr): mod = expr.func.__module__", "\"float('nan')\" def _print_Infinity(self, expr): return \"float('inf')\" def _print_Mod(self, expr): PREC", "import Piecewise return self._print(expr.rewrite(Piecewise)) def _print_Sum(self, expr): loops = (", "'while', 'with', 'yield', 'None' # 'None' is actually not in", "_known_constants_scipy_constants.items()} def _print_SparseMatrix(self, expr): i, j, data = [], [],", "_print_Float(self, e): # XXX: This does not handle setting mpmath.mp.dps.", "'atanh': 'atanh', 'ceiling': 'ceil', 'cos': 'cos', 'cosh': 'cosh', 'erf': 'erf',", "_known_constants_scipy_constants = { 'GoldenRatio': 'golden_ratio' } class SciPyPrinter(NumPyPrinter): _kf =", "precision=17, inline=True, fully_qualified_modules=True ) def __init__(self, settings=None): super(PythonCodePrinter, self).__init__(settings) self.module_imports", "x n by n x 1. arg1, arg2 = expr.args", "1: arg1 = arg1.T if arg2.shape[1] != 1: arg2 =", "'{0}({1})'.format(self._module_format('numpy.logical_not'), ','.join(self._print(i) for i in expr.args)) def _print_Min(self, expr): return", "'sin', 'sinh': 'sinh', 'Sqrt': 'sqrt', 'tan': 'tan', 'tanh': 'tanh' }", "= self.known_functions[expr.__class__.__name__] return '{name}({args})'.format(name=self._module_format(known), args=', '.join(map(self._print, expr.args))) def _print_known_const(self, expr):", "= arg.cond result.append('((') result.append(self._print(e)) result.append(') if (') result.append(self._print(c)) result.append(') else", "lgamma'.split() _in_numpy = [(k, v) for k, v in _known_functions_math.items()", "uses Python 'not' keyword. # If LambdaPrinter didn't define it,", "'[{0}]'.format(','.join(self._print(arg.cond) for arg in expr.args)) # If [default_value, True] is", "'arccos', 'acosh': 'arccosh', 'asin': 'arcsin', 'asinh': 'arcsinh', 'atan': 'arctan', 'atan2':", "'with', 'yield', 'None' # 'None' is actually not in Python", "_known_functions_math = { 'acos': 'acos', 'acosh': 'acosh', 'asin': 'asin', 'asinh':", "the function and add 'logical_or' to NUMPY_TRANSLATIONS. return '{0}({1})'.format(self._module_format('numpy.logical_or'), ','.join(self._print(i)", "if expr.rel_op in op: lhs = self._print(expr.lhs) rhs = self._print(expr.rhs)", "'cosh', 'erf': 'erf', 'erfc': 'erfc', 'exp': 'exp', 'expm1': 'expm1', 'factorial':", "expr.args)) # If [default_value, True] is a (expr, cond) sequence", "same as passing the 'default' kwarg to select() # *as", "return \"%s(%s, %s)\" % (self._module_format('numpy.dot'), self._print(arg1), self._print(arg2)) def _print_Piecewise(self, expr):", "_operators = {'and': 'and', 'or': 'or', 'not': 'not'} _default_settings =", "e = arg.expr c = arg.cond result.append('((') result.append(self._print(e)) result.append(') if", "'sinh': 'sinh', 'Sqrt': 'sqrt', 'tan': 'tan', 'tanh': 'tanh' } #", "we would still have to define our # own because", "_kw_py2and3 = { 'and', 'as', 'assert', 'break', 'class', 'continue', 'def',", "return '{op}({lhs}, {rhs})'.format(op=self._module_format('numpy.'+op[expr.rel_op]), lhs=lhs, rhs=rhs) return super(NumPyPrinter, self)._print_Relational(expr) def _print_And(self,", "'math.' + v) for k, v in _known_functions_math.items()] )) _kc", "self._module_format('numpy.array') return \"%s(%s)\" % (func, self._print(expr.tolist())) for k in NumPyPrinter._kf:", "setattr(PythonCodePrinter, '_print_%s' % k, _print_known_func) for k in _known_constants_math: setattr(PythonCodePrinter,", "may be triggered prematurely. return '{0}({1}, {2}, default=numpy.nan)'.format(self._module_format('numpy.select'), conds, exprs)", "_print_ImmutableSparseMatrix = _print_SparseMatrix for k in SciPyPrinter._kf: setattr(SciPyPrinter, '_print_%s' %", "expr): PREC = precedence(expr) return ('{0} % {1}'.format(*map(lambda x: self.parenthesize(x,", "return \"{name}({data}, ({i}, {j}), shape={shape})\".format( name=self._module_format('scipy.sparse.coo_matrix'), data=data, i=i, j=j, shape=expr.shape", "' ' _kf = dict(chain( _known_functions.items(), [(k, 'math.' + v)", "lhs = self._print(expr.lhs) rhs = self._print(expr.rhs) return '{op}({lhs}, {rhs})'.format(op=self._module_format('numpy.'+op[expr.rel_op]), lhs=lhs,", "in NumPyPrinter._kf: setattr(NumPyPrinter, '_print_%s' % k, _print_known_func) for k in", "def _print_MatrixBase(self, expr): func = self.known_functions.get(expr.__class__.__name__, None) if func is", "self._print(e.args[0]), self._print(e.args[1])) def _print_log2(self, e): return '{0}({1})/{0}(2)'.format( self._module_format('mpmath.log'), self._print(e.args[0])) def", "= self._print(expr.rhs) return '{op}({lhs}, {rhs})'.format(op=self._module_format('numpy.'+op[expr.rel_op]), lhs=lhs, rhs=rhs) return super(NumPyPrinter, self)._print_Relational(expr)", "'Abs': 'abs', } _known_functions_math = { 'acos': 'acos', 'acosh': 'acosh',", "k, _print_known_func) for k in _known_constants_math: setattr(PythonCodePrinter, '_print_%s' % k,", "= \\ _print_DenseMatrix = \\ _print_MutableDenseMatrix = \\ _print_ImmutableMatrix =", "fsum gcd degrees fabs] _known_constants_math = { 'Exp1': 'e', 'Pi':", "\"\"\" printmethod = \"_numpycode\" _kf = dict(chain( PythonCodePrinter._kf.items(), [(k, 'numpy.'", "'try', 'while', 'with', 'yield', 'None' # 'None' is actually not", "arg in expr.args)) conds = '[{0}]'.format(','.join(self._print(arg.cond) for arg in expr.args))", "arg.cond result.append('((') result.append(self._print(e)) result.append(') if (') result.append(self._print(c)) result.append(') else (')", "will have set it to sufficient # precision to match", "self.known_constants = dict(self._kc, **(settings or {}).get( 'user_constants', {})) def _declare_number_const(self,", "expr): \"Piecewise function printer\" exprs = '[{0}]'.format(','.join(self._print(arg.expr) for arg in", "'_print_%s' % k, _print_known_func) for k in NumPyPrinter._kc: setattr(NumPyPrinter, '_print_%s'", "'asin', 'asinh': 'asinh', 'atan': 'atan', 'atan2': 'atan2', 'atanh': 'atanh', 'ceiling':", "**settings): return PythonCodePrinter(settings).doprint(expr) _not_in_mpmath = 'log1p log2'.split() _in_mpmath = [(k,", "'acosh': 'arccosh', 'asin': 'arcsin', 'asinh': 'arcsinh', 'atan': 'arctan', 'atan2': 'arctan2',", "fqn else: return fqn.split('(')[0].split('[')[0].split('.')[-1] def _format_code(self, lines): return lines def", "__init__ tab = ' ' _kf = dict(chain( _known_functions.items(), [(k,", "def _print_re(self, expr): return \"%s(%s)\" % (self._module_format('numpy.real'), self._print(expr.args[0])) def _print_MatrixBase(self,", "_print_known_func) for k in _known_constants_math: setattr(PythonCodePrinter, '_print_%s' % k, _print_known_const)", "defaultdict(set) self.known_functions = dict(self._kf, **(settings or {}).get( 'user_functions', {})) self.known_constants", "','.join(self._print(i) for i in expr.args)) def _print_Min(self, expr): return '{0}(({1}))'.format(self._module_format('numpy.amin'),", "def _print_And(self, expr): \"Logical And printer\" # We have to", ")]) def _print_Function(self, expr): mod = expr.func.__module__ or '' return", "for k in NumPyPrinter._kc: setattr(NumPyPrinter, '_print_%s' % k, _print_known_const) _known_functions_scipy_special", "\"%s(%s)\" % (self._module_format('numpy.mod'), ', '.join(map(self._print, expr.args))) def _print_re(self, expr): return", "_known_functions_numpy = dict(_in_numpy, **{ 'acos': 'arccos', 'acosh': 'arccosh', 'asin': 'arcsin',", "'ceiling': 'ceil', 'cos': 'cos', 'cosh': 'cosh', 'erf': 'erf', 'erfc': 'erfc',", "expr.args. # If this is not the case, it may", "arg1.shape[0] != 1: arg1 = arg1.T if arg2.shape[1] != 1:", "v in _known_functions_mpmath.items()] )) def _print_Integer(self, e): return '%s(%d)' %", "_print_NaN(self, expr): return \"float('nan')\" def _print_Infinity(self, expr): return \"float('inf')\" def", "'and', 'as', 'assert', 'break', 'class', 'continue', 'def', 'del', 'elif', 'else',", "(expr, cond) sequence in a Piecewise object # it will", "order, but numpy.dot does matrix # multiplication, so we have", "printmethod = \"_pythoncode\" language = \"Python\" standard = \"python3\" reserved_words", "SciPyPrinter._kf: setattr(SciPyPrinter, '_print_%s' % k, _print_known_func) for k in SciPyPrinter._kc:", "self._print(expr.base)) else: return super(NumPyPrinter, self)._print_Pow(expr) def _print_arg(self, expr): return \"%s(%s)\"", "self._module_format('mpmath.log'), self._print(e.args[0])) def _print_log1p(self, e): return '{0}({1}+1)'.format( self._module_format('mpmath.log'), self._print(e.args[0])) for", "for i in expr.args)) def _print_Min(self, expr): return '{0}(({1}))'.format(self._module_format('numpy.amin'), ','.join(self._print(i)", "0 for arg in expr.args: e = arg.expr c =", "k in _known_constants_math: setattr(PythonCodePrinter, '_print_%s' % k, _print_known_const) def pycode(expr,", "of the function and add 'logical_and' to NUMPY_TRANSLATIONS. return '{0}({1})'.format(self._module_format('numpy.logical_and'),", "= \"_mpmathcode\" _kf = dict(chain( _known_functions.items(), [(k, 'mpmath.' + v)", "expr.limits) return '(builtins.sum({function} {loops}))'.format( function=self._print(expr.function), loops=' '.join(loops)) def _print_ImaginaryUnit(self, expr):", "'atan2', 'atanh': 'atanh', 'ceiling': 'ceil', 'cos': 'cos', 'cosh': 'cosh', 'erf':", "fully_qualified_modules=True ) def __init__(self, settings=None): super(PythonCodePrinter, self).__init__(settings) self.module_imports = defaultdict(set)", "multiplication, so we have to make sure it gets 1", "_print_Piecewise(self, expr): \"Piecewise function printer\" exprs = '[{0}]'.format(','.join(self._print(arg.expr) for arg", "in expr.args)) conds = '[{0}]'.format(','.join(self._print(arg.cond) for arg in expr.args)) #", "for Equality and Unequality\" op = { '==' :'equal', '!='", "'global', 'if', 'import', 'in', 'is', 'lambda', 'not', 'or', 'pass', 'raise',", "% (name, value) def _module_format(self, fqn, register=True): parts = fqn.split('.')", "class SymPyPrinter(PythonCodePrinter): _kf = dict([(k, 'sympy.' + v) for k,", "define it, we could use StrPrinter's # version of the", "= 'erf erfc factorial gamma lgamma'.split() _in_numpy = [(k, v)", "We have to override LambdaPrinter because it uses Python 'and'", "'finally', 'for', 'from', 'global', 'if', 'import', 'in', 'is', 'lambda', 'not',", "# XXX: This does not handle setting mpmath.mp.dps. It is", "len(parts) > 1: self.module_imports['.'.join(parts[:-1])].add(parts[-1]) if self._settings['fully_qualified_modules']: return fqn else: return", "for k, v in chain( _known_functions.items(), _known_functions_math.items() )]) def _print_Function(self,", "actually not in Python 2's keyword.kwlist } _kw_only_py2 = {'exec',", "self._print(e.args[1]), self._module_format('mpmath.inf')) def _print_lowergamma(self,e): #printer for the lowergamma functioin return", "k, v in _known_functions_mpmath.items()] )) def _print_Integer(self, e): return '%s(%d)'", "'e', 'Pi': 'pi', # Only in python >= 3.5: #", "for i in expr.args)) def _print_Not(self, expr): \"Logical Not printer\"", "it is the last element in expr.args. # If this", "= { 'and', 'as', 'assert', 'break', 'class', 'continue', 'def', 'del',", "_print_Infinity(self, expr): return \"float('inf')\" def _print_Mod(self, expr): PREC = precedence(expr)", "loops = ( 'for {i} in range({a}, {b}+1)'.format( i=self._print(i), a=self._print(a),", "self._print(expr.tolist())) _print_SparseMatrix = \\ _print_MutableSparseMatrix = \\ _print_ImmutableSparseMatrix = \\", "instead of lists because numba supports # tuples in nopython", "\"{0}({1}, {2}, {3})\".format( self._module_format('mpmath.gammainc'), self._print(e.args[0]), self._print(e.args[1]), self._module_format('mpmath.inf')) def _print_lowergamma(self,e): #printer", "'log10', 'log1p': 'log1p', 'log2': 'log2', 'sin': 'sin', 'sinh': 'sinh', 'Sqrt':", "because numba supports # tuples in nopython mode. return '({},)'.format(delimiter.join(self._print(item)", "'acos': 'acos', 'acosh': 'acosh', 'asin': 'asin', 'asinh': 'asinh', 'atan': 'atan',", "'{0}({1})/{0}(2)'.format( self._module_format('mpmath.log'), self._print(e.args[0])) def _print_log1p(self, e): return '{0}({1}+1)'.format( self._module_format('mpmath.log'), self._print(e.args[0]))", "not in _not_in_mpmath] _known_functions_mpmath = dict(_in_mpmath) _known_constants_mpmath = { 'Pi':", ")) _kc = {k: 'scipy.constants.' + v for k, v", "}) class NumPyPrinter(PythonCodePrinter): \"\"\" Numpy printer which handles vectorized piecewise", "class SciPyPrinter(NumPyPrinter): _kf = dict(chain( NumPyPrinter._kf.items(), [(k, 'scipy.special.' + v)", "{ 'acos': 'acos', 'acosh': 'acosh', 'asin': 'asin', 'asinh': 'asinh', 'atan':", "v) for k, v in _known_functions_numpy.items()] )) _kc = {k:", "_not_in_numpy = 'erf erfc factorial gamma lgamma'.split() _in_numpy = [(k,", "printer for Equality and Unequality\" op = { '==' :'equal',", "_print_SparseMatrix = \\ _print_MutableSparseMatrix = \\ _print_ImmutableSparseMatrix = \\ _print_Matrix", "'atanh', 'ceiling': 'ceil', 'cos': 'cos', 'cosh': 'cosh', 'erf': 'erf', 'erfc':", "return \"float('inf')\" def _print_Mod(self, expr): PREC = precedence(expr) return ('{0}", "inline=True, fully_qualified_modules=True ) def __init__(self, settings=None): super(PythonCodePrinter, self).__init__(settings) self.module_imports =", "j.append(c) data.append(v) return \"{name}({data}, ({i}, {j}), shape={shape})\".format( name=self._module_format('scipy.sparse.coo_matrix'), data=data, i=i,", "'user_functions', {})) self.known_constants = dict(self._kc, **(settings or {}).get( 'user_constants', {}))", "have to override LambdaPrinter because it uses Python 'not' keyword.", "( 'for {i} in range({a}, {b}+1)'.format( i=self._print(i), a=self._print(a), b=self._print(b)) for", "functools import wraps from itertools import chain from sympy.core import", "from .codeprinter import CodePrinter _kw_py2and3 = { 'and', 'as', 'assert',", "override LambdaPrinter because it uses Python 'and' keyword. # If", "_kw_py2and3.union(_kw_only_py3) modules = None # initialized to a set in", "= fqn.split('.') if register and len(parts) > 1: self.module_imports['.'.join(parts[:-1])].add(parts[-1]) if", "we could use StrPrinter's # version of the function and", "'log10': 'log10', 'log1p': 'log1p', 'log2': 'log2', 'sin': 'sin', 'sinh': 'sinh',", "% (func, self._print(expr.tolist())) for k in NumPyPrinter._kf: setattr(NumPyPrinter, '_print_%s' %", "def _print_Mod(self, expr): return \"%s(%s)\" % (self._module_format('numpy.mod'), ', '.join(map(self._print, expr.args)))", "erfc factorial gamma lgamma'.split() _in_numpy = [(k, v) for k,", "_known_functions.items(), _known_functions_math.items() )]) def _print_Function(self, expr): mod = expr.func.__module__ or", "because it uses Python 'not' keyword. # If LambdaPrinter didn't", ":'less', '<=' :'less_equal', '>' :'greater', '>=' :'greater_equal', } if expr.rel_op", "'arctanh', 'exp2': 'exp2', }) class NumPyPrinter(PythonCodePrinter): \"\"\" Numpy printer which", "_known_functions_math.items() )]) def _print_Function(self, expr): mod = expr.func.__module__ or ''", "_in_mpmath = [(k, v) for k, v in _known_functions_math.items() if", ") def __init__(self, settings=None): super(PythonCodePrinter, self).__init__(settings) self.module_imports = defaultdict(set) self.known_functions", "{loops}))'.format( function=self._print(expr.function), loops=' '.join(loops)) def _print_ImaginaryUnit(self, expr): return '1j' def", "[(k, 'scipy.special.' + v) for k, v in _known_functions_scipy_special.items()] ))", "dict(chain( PythonCodePrinter._kf.items(), [(k, 'numpy.' + v) for k, v in", "tuple\" # Print tuples here instead of lists because numba", "'Pi': 'pi' } class MpmathPrinter(PythonCodePrinter): \"\"\" Lambda printer for mpmath", "expr): name = expr.__class__.__name__ func = self.known_functions.get(name, name) return \"%s(%s)\"", "'elif', 'else', 'except', 'finally', 'for', 'from', 'global', 'if', 'import', 'in',", "'_print_%s' % k, _print_known_const) def pycode(expr, **settings): return PythonCodePrinter(settings).doprint(expr) _not_in_mpmath", "have set it to sufficient # precision to match the", "printer: converts to tuple\" # Print tuples here instead of", "Remove 'mpz' if gmpy is installed. args = str(tuple(map(int, e._mpf_)))", "2's keyword.kwlist } _kw_only_py2 = {'exec', 'print'} _kw_only_py3 = {'False',", "'scipy.special.' + v) for k, v in _known_functions_scipy_special.items()] )) _kc", "expr.rel_op in op: lhs = self._print(expr.lhs) rhs = self._print(expr.rhs) return", "'if', 'import', 'in', 'is', 'lambda', 'not', 'or', 'pass', 'raise', 'return',", "precedence(expr) return ('{0} % {1}'.format(*map(lambda x: self.parenthesize(x, PREC), expr.args))) def", "override LambdaPrinter because it uses Python 'or' keyword. # If", "= arg2.T return \"%s(%s, %s)\" % (self._module_format('numpy.dot'), self._print(arg1), self._print(arg2)) def", "_known_functions.items(), [(k, 'mpmath.' + v) for k, v in _known_functions_mpmath.items()]", "CodePrinter._default_settings, user_functions={}, precision=17, inline=True, fully_qualified_modules=True ) def __init__(self, settings=None): super(PythonCodePrinter,", "i in expr.args)) def _print_DotProduct(self, expr): # DotProduct allows any", "define our # own because StrPrinter doesn't define it. return", "doesn't define it. return '{0}({1})'.format(self._module_format('numpy.logical_not'), ','.join(self._print(i) for i in expr.args))", "\\ _print_ImmutableSparseMatrix = \\ _print_Matrix = \\ _print_DenseMatrix = \\", "caller of the lambdified function will have set it to", "k in SciPyPrinter._kf: setattr(SciPyPrinter, '_print_%s' % k, _print_known_func) for k", "self._print(expr.tolist())) for k in NumPyPrinter._kf: setattr(NumPyPrinter, '_print_%s' % k, _print_known_func)", "which handles vectorized piecewise functions, logical operators, etc. \"\"\" printmethod", "= 0 for arg in expr.args: e = arg.expr c", "functions, logical operators, etc. \"\"\" printmethod = \"_numpycode\" _kf =", "_print_Integer(self, e): return '%s(%d)' % (self._module_format('mpmath.mpf'), e) def _print_Float(self, e):", "''.join(result) def _print_ITE(self, expr): from sympy.functions.elementary.piecewise import Piecewise return self._print(expr.rewrite(Piecewise))", "k, _print_known_const) def pycode(expr, **settings): return PythonCodePrinter(settings).doprint(expr) _not_in_mpmath = 'log1p", "the same as passing the 'default' kwarg to select() #", "'scipy.constants.' + v for k, v in _known_constants_scipy_constants.items()} def _print_SparseMatrix(self,", "[(k, 'numpy.' + v) for k, v in _known_functions_numpy.items()] ))", "'nonlocal', 'True'} _known_functions = { 'Abs': 'abs', } _known_functions_math =", "_kc = {k: 'numpy.'+v for k, v in _known_constants_math.items()} def", "# If [default_value, True] is a (expr, cond) sequence in", "\"%s(%s)\" % (self._module_format('numpy.imag', self._print(expr.args[0]))) def _print_Mod(self, expr): return \"%s(%s)\" %", "for k in _known_constants_mpmath: setattr(MpmathPrinter, '_print_%s' % k, _print_known_const) _not_in_numpy", "k in NumPyPrinter._kc: setattr(NumPyPrinter, '_print_%s' % k, _print_known_const) _known_functions_scipy_special =", "set in __init__ tab = ' ' _kf = dict(chain(", "to NUMPY_TRANSLATIONS. return '{0}({1})'.format(self._module_format('numpy.logical_and'), ','.join(self._print(i) for i in expr.args)) def", "= \"python3\" reserved_words = _kw_py2and3.union(_kw_only_py3) modules = None # initialized", "Not used from ``math``: [copysign isclose isfinite isinf isnan ldexp", "for i in expr.args)) def _print_Or(self, expr): \"Logical Or printer\"", "function and add 'logical_or' to NUMPY_TRANSLATIONS. return '{0}({1})'.format(self._module_format('numpy.logical_or'), ','.join(self._print(i) for", "in _not_in_mpmath] _known_functions_mpmath = dict(_in_mpmath) _known_constants_mpmath = { 'Pi': 'pi'", ".precedence import precedence from .codeprinter import CodePrinter _kw_py2and3 = {", "1 result = result[:-1] result.append(') else None)') result.append(')'*(2*i - 2))", "for k in _known_constants_math: setattr(PythonCodePrinter, '_print_%s' % k, _print_known_const) def", "triggered prematurely. return '{0}({1}, {2}, default=numpy.nan)'.format(self._module_format('numpy.select'), conds, exprs) def _print_Relational(self,", "= dict(chain( _known_functions.items(), [(k, 'mpmath.' + v) for k, v", "expr.args))) def _print_re(self, expr): return \"%s(%s)\" % (self._module_format('numpy.real'), self._print(expr.args[0])) def", "\"Matrix multiplication printer\" return '({0})'.format(').dot('.join(self._print(i) for i in expr.args)) def", "# If LambdaPrinter didn't define it, we would still have", "_known_functions_mpmath = dict(_in_mpmath) _known_constants_mpmath = { 'Pi': 'pi' } class", "uses Python 'and' keyword. # If LambdaPrinter didn't define it,", "and add 'logical_and' to NUMPY_TRANSLATIONS. return '{0}({1})'.format(self._module_format('numpy.logical_and'), ','.join(self._print(i) for i", "'abs', } _known_functions_math = { 'acos': 'acos', 'acosh': 'acosh', 'asin':", "in expr.args)) def _print_Min(self, expr): return '{0}(({1}))'.format(self._module_format('numpy.amin'), ','.join(self._print(i) for i", "','.join(self._print(i) for i in expr.args)) def _print_Max(self, expr): return '{0}(({1}))'.format(self._module_format('numpy.amax'),", "'{op}({lhs}, {rhs})'.format(op=self._module_format('numpy.'+op[expr.rel_op]), lhs=lhs, rhs=rhs) return super(NumPyPrinter, self)._print_Relational(expr) def _print_And(self, expr):", "a Piecewise object # it will behave the same as", "it uses Python 'or' keyword. # If LambdaPrinter didn't define", "= ( 'for {i} in range({a}, {b}+1)'.format( i=self._print(i), a=self._print(a), b=self._print(b))", "in __init__ tab = ' ' _kf = dict(chain( _known_functions.items(),", "We have to override LambdaPrinter because it uses Python 'or'", "= \"Python\" standard = \"python3\" reserved_words = _kw_py2and3.union(_kw_only_py3) modules =", "could use StrPrinter's # version of the function and add", "expr): from sympy.functions.elementary.piecewise import Piecewise return self._print(expr.rewrite(Piecewise)) def _print_Sum(self, expr):", "'not'} _default_settings = dict( CodePrinter._default_settings, user_functions={}, precision=17, inline=True, fully_qualified_modules=True )", "in expr.limits) return '(builtins.sum({function} {loops}))'.format( function=self._print(expr.function), loops=' '.join(loops)) def _print_ImaginaryUnit(self,", "name=self._module_format('scipy.sparse.coo_matrix'), data=data, i=i, j=j, shape=expr.shape ) _print_ImmutableSparseMatrix = _print_SparseMatrix for", "{2}, {3})\".format( self._module_format('mpmath.gammainc'), self._print(e.args[0]), self._print(e.args[1]), self._module_format('mpmath.inf')) def _print_lowergamma(self,e): #printer for", "{ 'Pi': 'pi' } class MpmathPrinter(PythonCodePrinter): \"\"\" Lambda printer for", "','.join(self._print(i) for i in expr.args)) def _print_Pow(self, expr): if expr.exp", "or {}).get( 'user_functions', {})) self.known_constants = dict(self._kc, **(settings or {}).get(", "exprs = '[{0}]'.format(','.join(self._print(arg.expr) for arg in expr.args)) conds = '[{0}]'.format(','.join(self._print(arg.cond)", "\"{name}({data}, ({i}, {j}), shape={shape})\".format( name=self._module_format('scipy.sparse.coo_matrix'), data=data, i=i, j=j, shape=expr.shape )", "for i in expr.args)) def _print_DotProduct(self, expr): # DotProduct allows", "2)) return ''.join(result) def _print_ITE(self, expr): from sympy.functions.elementary.piecewise import Piecewise", "in _known_functions_math.items()] )) _kc = {k: 'math.'+v for k, v", "= result[:-1] result.append(') else None)') result.append(')'*(2*i - 2)) return ''.join(result)", "sure it gets 1 x n by n x 1.", "in _known_functions_math.items() if k not in _not_in_numpy] _known_functions_numpy = dict(_in_numpy,", "result = [] i = 0 for arg in expr.args:", "= _kw_py2and3.union(_kw_only_py3) modules = None # initialized to a set", "{ 'erf': 'erf', 'erfc': 'erfc', 'gamma': 'gamma', 'loggamma': 'gammaln' }", "'None' is actually not in Python 2's keyword.kwlist } _kw_only_py2", "expr): return \"%s(%s)\" % (self._module_format('numpy.angle'), self._print(expr.args[0])) def _print_im(self, expr): return", "\"Logical Not printer\" # We have to override LambdaPrinter because", "n by n x 1. arg1, arg2 = expr.args if", "# own because StrPrinter doesn't define it. return '{0}({1})'.format(self._module_format('numpy.logical_not'), ','.join(self._print(i)", "setattr(PythonCodePrinter, '_print_%s' % k, _print_known_const) def pycode(expr, **settings): return PythonCodePrinter(settings).doprint(expr)", "'sqrt', 'tan': 'tan', 'tanh': 'tanh' } # Not used from", "expr.args)) def _print_Max(self, expr): return '{0}(({1}))'.format(self._module_format('numpy.amax'), ','.join(self._print(i) for i in", "% (self._module_format('numpy.real'), self._print(expr.args[0])) def _print_MatrixBase(self, expr): func = self.known_functions.get(expr.__class__.__name__, None)", "arg1, arg2 = expr.args if arg1.shape[0] != 1: arg1 =", "dict( CodePrinter._default_settings, user_functions={}, precision=17, inline=True, fully_qualified_modules=True ) def __init__(self, settings=None):", "sequence printer: converts to tuple\" # Print tuples here instead", "'acos': 'arccos', 'acosh': 'arccosh', 'asin': 'arcsin', 'asinh': 'arcsinh', 'atan': 'arctan',", "collections import defaultdict from functools import wraps from itertools import", "_print_log2(self, e): return '{0}({1})/{0}(2)'.format( self._module_format('mpmath.log'), self._print(e.args[0])) def _print_log1p(self, e): return", "b in expr.limits) return '(builtins.sum({function} {loops}))'.format( function=self._print(expr.function), loops=' '.join(loops)) def", "keyword. # If LambdaPrinter didn't define it, we would still", "in expr.args)) # If [default_value, True] is a (expr, cond)", "\"\"\" Numpy printer which handles vectorized piecewise functions, logical operators,", "None) if func is None: func = self._module_format('numpy.array') return \"%s(%s)\"", "in seq)) def _print_MatMul(self, expr): \"Matrix multiplication printer\" return '({0})'.format(').dot('.join(self._print(i)", "# {0}\".format(text) def _print_NaN(self, expr): return \"float('nan')\" def _print_Infinity(self, expr):", "for mpmath which maintains precision for floats \"\"\" printmethod =", "i in expr.args)) def _print_Max(self, expr): return '{0}(({1}))'.format(self._module_format('numpy.amax'), ','.join(self._print(i) for", "'(builtins.sum({function} {loops}))'.format( function=self._print(expr.function), loops=' '.join(loops)) def _print_ImaginaryUnit(self, expr): return '1j'", "isnan ldexp frexp pow modf # radians trunc fmod fsum", "_print_MutableSparseMatrix = \\ _print_ImmutableSparseMatrix = \\ _print_Matrix = \\ _print_DenseMatrix", "'log', 'log10': 'log10', 'log1p': 'log1p', 'log2': 'log2', 'sin': 'sin', 'sinh':", "def _print_Relational(self, expr): \"Relational printer for Equality and Unequality\" op", "fqn, register=True): parts = fqn.split('.') if register and len(parts) >", "'factorial', 'floor': 'floor', 'gamma': 'gamma', 'hypot': 'hypot', 'loggamma': 'lgamma', 'log':", "wraps from itertools import chain from sympy.core import sympify from", "= self._module_format('numpy.array') return \"%s(%s)\" % (func, self._print(expr.tolist())) for k in", "in expr.args. # If this is not the case, it", "'del', 'elif', 'else', 'except', 'finally', 'for', 'from', 'global', 'if', 'import',", "1: self.module_imports['.'.join(parts[:-1])].add(parts[-1]) if self._settings['fully_qualified_modules']: return fqn else: return fqn.split('(')[0].split('[')[0].split('.')[-1] def", "'pass', 'raise', 'return', 'try', 'while', 'with', 'yield', 'None' # 'None'", "j, data = [], [], [] for (r, c), v", "expr): result = [] i = 0 for arg in", "\"_pythoncode\" language = \"Python\" standard = \"python3\" reserved_words = _kw_py2and3.union(_kw_only_py3)", "for k, v in _known_functions_math.items() if k not in _not_in_mpmath]", "# version of the function and add 'logical_and' to NUMPY_TRANSLATIONS.", "LambdaPrinter didn't define it, we could use StrPrinter's # version", "'mpz' if gmpy is installed. args = str(tuple(map(int, e._mpf_))) return", "known = self.known_functions[expr.__class__.__name__] return '{name}({args})'.format(name=self._module_format(known), args=', '.join(map(self._print, expr.args))) def _print_known_const(self,", "from itertools import chain from sympy.core import sympify from .precedence", "self).__init__(settings) self.module_imports = defaultdict(set) self.known_functions = dict(self._kf, **(settings or {}).get(", "the last element in expr.args. # If this is not", "_print_Sum(self, expr): loops = ( 'for {i} in range({a}, {b}+1)'.format(", "\\ _print_ImmutableDenseMatrix = \\ lambda self, expr: self._print_MatrixBase(expr) for k", "% k, _print_known_func) for k in NumPyPrinter._kc: setattr(NumPyPrinter, '_print_%s' %", "for k, v in _known_functions_scipy_special.items()] )) _kc = {k: 'scipy.constants.'", "select() # *as long as* it is the last element", "','.join(self._print(i) for i in expr.args)) def _print_Or(self, expr): \"Logical Or", "v in _known_constants_math.items()} def _print_seq(self, seq, delimiter=', '): \"General sequence", "= dict(_in_mpmath) _known_constants_mpmath = { 'Pi': 'pi' } class MpmathPrinter(PythonCodePrinter):", "expr): return '{0}(({1}))'.format(self._module_format('numpy.amax'), ','.join(self._print(i) for i in expr.args)) def _print_Pow(self,", "self._module_format('mpmath.gammainc'), self._print(e.args[0]), self._print(e.args[1])) def _print_log2(self, e): return '{0}({1})/{0}(2)'.format( self._module_format('mpmath.log'), self._print(e.args[0]))", "_known_functions.items(), [(k, 'math.' + v) for k, v in _known_functions_math.items()]", "tab = ' ' _kf = dict(chain( _known_functions.items(), [(k, 'math.'", "if arg2.shape[1] != 1: arg2 = arg2.T return \"%s(%s, %s)\"", "for k in NumPyPrinter._kf: setattr(NumPyPrinter, '_print_%s' % k, _print_known_func) for", "_print_Relational(self, expr): \"Relational printer for Equality and Unequality\" op =", ")) _kc = {k: 'math.'+v for k, v in _known_constants_math.items()}", "result.append(')'*(2*i - 2)) return ''.join(result) def _print_ITE(self, expr): from sympy.functions.elementary.piecewise", "it to sufficient # precision to match the Floats in", "return '{func}({args})'.format(func=self._module_format('mpmath.mpf'), args=args) def _print_uppergamma(self,e): #printer for the uppergamma function", "matrix # multiplication, so we have to make sure it", "1: arg2 = arg2.T return \"%s(%s, %s)\" % (self._module_format('numpy.dot'), self._print(arg1),", "args = str(tuple(map(int, e._mpf_))) return '{func}({args})'.format(func=self._module_format('mpmath.mpf'), args=args) def _print_uppergamma(self,e): #printer", "in _known_constants_scipy_constants.items()} def _print_SparseMatrix(self, expr): i, j, data = [],", "NumPyPrinter._kc: setattr(NumPyPrinter, '_print_%s' % k, _print_known_const) _known_functions_scipy_special = { 'erf':", "{k: 'math.'+v for k, v in _known_constants_math.items()} _operators = {'and':", "have to override LambdaPrinter because it uses Python 'or' keyword.", "% k, _print_known_func) for k in SciPyPrinter._kc: setattr(SciPyPrinter, '_print_%s' %", "[copysign isclose isfinite isinf isnan ldexp frexp pow modf #", "= { 'GoldenRatio': 'golden_ratio' } class SciPyPrinter(NumPyPrinter): _kf = dict(chain(", "If LambdaPrinter didn't define it, we would still have to", "'{func}({args})'.format(func=self._module_format('mpmath.mpf'), args=args) def _print_uppergamma(self,e): #printer for the uppergamma function return", "SymPyPrinter(PythonCodePrinter): _kf = dict([(k, 'sympy.' + v) for k, v", "_print_lowergamma(self,e): #printer for the lowergamma functioin return \"{0}({1}, 0, {2})\".format(", "fqn.split('.') if register and len(parts) > 1: self.module_imports['.'.join(parts[:-1])].add(parts[-1]) if self._settings['fully_qualified_modules']:", "', '.join(map(self._print, expr.args))) def _print_re(self, expr): return \"%s(%s)\" % (self._module_format('numpy.real'),", "name, value): return \"%s = %s\" % (name, value) def", "('{0} % {1}'.format(*map(lambda x: self.parenthesize(x, PREC), expr.args))) def _print_Piecewise(self, expr):", "would still have to define our # own because StrPrinter", "= self._print(expr.lhs) rhs = self._print(expr.rhs) return '{op}({lhs}, {rhs})'.format(op=self._module_format('numpy.'+op[expr.rel_op]), lhs=lhs, rhs=rhs)", "'atan2': 'atan2', 'atanh': 'atanh', 'ceiling': 'ceil', 'cos': 'cos', 'cosh': 'cosh',", "def _print_Or(self, expr): \"Logical Or printer\" # We have to", "e): return '{0}({1}+1)'.format( self._module_format('mpmath.log'), self._print(e.args[0])) for k in MpmathPrinter._kf: setattr(MpmathPrinter,", "'Exp1': 'e', 'Pi': 'pi', # Only in python >= 3.5:", "_print_MatrixBase(self, expr): func = self.known_functions.get(expr.__class__.__name__, None) if func is None:", "arg2 = arg2.T return \"%s(%s, %s)\" % (self._module_format('numpy.dot'), self._print(arg1), self._print(arg2))", "i in expr.args)) def _print_Or(self, expr): \"Logical Or printer\" #", "case, it may be triggered prematurely. return '{0}({1}, {2}, default=numpy.nan)'.format(self._module_format('numpy.select'),", "_print_known_func(self, expr): known = self.known_functions[expr.__class__.__name__] return '{name}({args})'.format(name=self._module_format(known), args=', '.join(map(self._print, expr.args)))", "\"%s(%s)\" % (self._module_format('numpy.real'), self._print(expr.args[0])) def _print_MatrixBase(self, expr): func = self.known_functions.get(expr.__class__.__name__,", "'atan', 'atan2': 'atan2', 'atanh': 'atanh', 'ceiling': 'ceil', 'cos': 'cos', 'cosh':", "expression. # Remove 'mpz' if gmpy is installed. args =", "v in _known_functions_math.items()] )) _kc = {k: 'math.'+v for k,", "in expr._smat.items(): i.append(r) j.append(c) data.append(v) return \"{name}({data}, ({i}, {j}), shape={shape})\".format(", "gamma lgamma'.split() _in_numpy = [(k, v) for k, v in", "expr.exp == 0.5: return '{0}({1})'.format(self._module_format('numpy.sqrt'), self._print(expr.base)) else: return super(NumPyPrinter, self)._print_Pow(expr)", "for k, v in _known_functions_numpy.items()] )) _kc = {k: 'numpy.'+v", "uses Python 'or' keyword. # If LambdaPrinter didn't define it,", "'{0}({1})'.format(self._module_format('numpy.logical_or'), ','.join(self._print(i) for i in expr.args)) def _print_Not(self, expr): \"Logical", "expr): return \"%s(%s)\" % (self._module_format('numpy.mod'), ', '.join(map(self._print, expr.args))) def _print_re(self,", "data = [], [], [] for (r, c), v in", "_known_functions_math.items()] )) _kc = {k: 'math.'+v for k, v in", "self._print(e.args[0]), self._print(e.args[1]), self._module_format('mpmath.inf')) def _print_lowergamma(self,e): #printer for the lowergamma functioin", "but numpy.dot does matrix # multiplication, so we have to", "expr): return \"%s(%s)\" % (self._module_format('numpy.imag', self._print(expr.args[0]))) def _print_Mod(self, expr): return", "sympify from .precedence import precedence from .codeprinter import CodePrinter _kw_py2and3", "in MpmathPrinter._kf: setattr(MpmathPrinter, '_print_%s' % k, _print_known_func) for k in", "i in expr.args)) def _print_Pow(self, expr): if expr.exp == 0.5:", "PythonCodePrinter(CodePrinter): printmethod = \"_pythoncode\" language = \"Python\" standard = \"python3\"", "expr.args)) def _print_Or(self, expr): \"Logical Or printer\" # We have", "NumPyPrinter(PythonCodePrinter): \"\"\" Numpy printer which handles vectorized piecewise functions, logical", "(self._module_format('numpy.dot'), self._print(arg1), self._print(arg2)) def _print_Piecewise(self, expr): \"Piecewise function printer\" exprs", "which maintains precision for floats \"\"\" printmethod = \"_mpmathcode\" _kf", "\"Python\" standard = \"python3\" reserved_words = _kw_py2and3.union(_kw_only_py3) modules = None", "pow modf # radians trunc fmod fsum gcd degrees fabs]", "return \"%s(%s)\" % (func, self._print(expr.tolist())) _print_SparseMatrix = \\ _print_MutableSparseMatrix =", "match the Floats in the expression. # Remove 'mpz' if", "else (') i += 1 result = result[:-1] result.append(') else", "uppergamma function return \"{0}({1}, {2}, {3})\".format( self._module_format('mpmath.gammainc'), self._print(e.args[0]), self._print(e.args[1]), self._module_format('mpmath.inf'))", "_known_functions_numpy.items()] )) _kc = {k: 'numpy.'+v for k, v in", "maintains precision for floats \"\"\" printmethod = \"_mpmathcode\" _kf =", "'inf', # 'NaN': 'nan' } def _print_known_func(self, expr): known =", "NumPyPrinter._kf.items(), [(k, 'scipy.special.' + v) for k, v in _known_functions_scipy_special.items()]", "'raise', 'return', 'try', 'while', 'with', 'yield', 'None' # 'None' is", "in nopython mode. return '({},)'.format(delimiter.join(self._print(item) for item in seq)) def", "range({a}, {b}+1)'.format( i=self._print(i), a=self._print(a), b=self._print(b)) for i, a, b in", "will behave the same as passing the 'default' kwarg to", "def _print_Piecewise(self, expr): \"Piecewise function printer\" exprs = '[{0}]'.format(','.join(self._print(arg.expr) for", "dict(self._kc, **(settings or {}).get( 'user_constants', {})) def _declare_number_const(self, name, value):", "PythonCodePrinter._kf.items(), [(k, 'numpy.' + v) for k, v in _known_functions_numpy.items()]", "expr.args)) def _print_Pow(self, expr): if expr.exp == 0.5: return '{0}({1})'.format(self._module_format('numpy.sqrt'),", "``math``: [copysign isclose isfinite isinf isnan ldexp frexp pow modf", "= precedence(expr) return ('{0} % {1}'.format(*map(lambda x: self.parenthesize(x, PREC), expr.args)))", "'sin': 'sin', 'sinh': 'sinh', 'Sqrt': 'sqrt', 'tan': 'tan', 'tanh': 'tanh'", "{0}\".format(text) def _print_NaN(self, expr): return \"float('nan')\" def _print_Infinity(self, expr): return", "And printer\" # We have to override LambdaPrinter because it", "+ v) for k, v in _known_functions_math.items()] )) _kc =", "= \\ _print_ImmutableSparseMatrix = \\ _print_Matrix = \\ _print_DenseMatrix =", "expr.args if arg1.shape[0] != 1: arg1 = arg1.T if arg2.shape[1]", "'class', 'continue', 'def', 'del', 'elif', 'else', 'except', 'finally', 'for', 'from',", "expr.func.__module__ or '' return '%s(%s)' % (self._module_format(mod + ('.' if", "from ``math``: [copysign isclose isfinite isinf isnan ldexp frexp pow", "e): return '%s(%d)' % (self._module_format('mpmath.mpf'), e) def _print_Float(self, e): #", "class PythonCodePrinter(CodePrinter): printmethod = \"_pythoncode\" language = \"Python\" standard =", "shape={shape})\".format( name=self._module_format('scipy.sparse.coo_matrix'), data=data, i=i, j=j, shape=expr.shape ) _print_ImmutableSparseMatrix = _print_SparseMatrix", "printer\" exprs = '[{0}]'.format(','.join(self._print(arg.expr) for arg in expr.args)) conds =", "_known_constants_math: setattr(PythonCodePrinter, '_print_%s' % k, _print_known_const) def pycode(expr, **settings): return", "} if expr.rel_op in op: lhs = self._print(expr.lhs) rhs =", "in _known_constants_mpmath: setattr(MpmathPrinter, '_print_%s' % k, _print_known_const) _not_in_numpy = 'erf", "printer\" return '({0})'.format(').dot('.join(self._print(i) for i in expr.args)) def _print_DotProduct(self, expr):", "== 0.5: return '{0}({1})'.format(self._module_format('numpy.sqrt'), self._print(expr.base)) else: return super(NumPyPrinter, self)._print_Pow(expr) def", "\\ _print_Matrix = \\ _print_DenseMatrix = \\ _print_MutableDenseMatrix = \\", "'asinh', 'atan': 'atan', 'atan2': 'atan2', 'atanh': 'atanh', 'ceiling': 'ceil', 'cos':", "expr): return '1j' def _print_MatrixBase(self, expr): name = expr.__class__.__name__ func", "a set in __init__ tab = ' ' _kf =", "def _print_MatMul(self, expr): \"Matrix multiplication printer\" return '({0})'.format(').dot('.join(self._print(i) for i", "frexp pow modf # radians trunc fmod fsum gcd degrees", "make sure it gets 1 x n by n x", "self._print(expr.args[0]))) def _print_Mod(self, expr): return \"%s(%s)\" % (self._module_format('numpy.mod'), ', '.join(map(self._print,", "'gammaln' } _known_constants_scipy_constants = { 'GoldenRatio': 'golden_ratio' } class SciPyPrinter(NumPyPrinter):", "mpmath.mp.dps. It is assumed that # the caller of the", "for i in expr.args)) def _print_Pow(self, expr): if expr.exp ==", "'.join(loops)) def _print_ImaginaryUnit(self, expr): return '1j' def _print_MatrixBase(self, expr): name", "in _known_functions_numpy.items()] )) _kc = {k: 'numpy.'+v for k, v", "k in NumPyPrinter._kf: setattr(NumPyPrinter, '_print_%s' % k, _print_known_func) for k", "ldexp frexp pow modf # radians trunc fmod fsum gcd", "_print_ITE(self, expr): from sympy.functions.elementary.piecewise import Piecewise return self._print(expr.rewrite(Piecewise)) def _print_Sum(self,", "'nan' } def _print_known_func(self, expr): known = self.known_functions[expr.__class__.__name__] return '{name}({args})'.format(name=self._module_format(known),", "data.append(v) return \"{name}({data}, ({i}, {j}), shape={shape})\".format( name=self._module_format('scipy.sparse.coo_matrix'), data=data, i=i, j=j,", "because StrPrinter doesn't define it. return '{0}({1})'.format(self._module_format('numpy.logical_not'), ','.join(self._print(i) for i", "expr): mod = expr.func.__module__ or '' return '%s(%s)' % (self._module_format(mod", "'arcsin', 'asinh': 'arcsinh', 'atan': 'arctan', 'atan2': 'arctan2', 'atanh': 'arctanh', 'exp2':", "'hypot', 'loggamma': 'lgamma', 'log': 'log', 'log10': 'log10', 'log1p': 'log1p', 'log2':", "_print_And(self, expr): \"Logical And printer\" # We have to override", "'or': 'or', 'not': 'not'} _default_settings = dict( CodePrinter._default_settings, user_functions={}, precision=17,", "super(NumPyPrinter, self)._print_Pow(expr) def _print_arg(self, expr): return \"%s(%s)\" % (self._module_format('numpy.angle'), self._print(expr.args[0]))", "return '1j' def _print_MatrixBase(self, expr): name = expr.__class__.__name__ func =", "'asinh': 'arcsinh', 'atan': 'arctan', 'atan2': 'arctan2', 'atanh': 'arctanh', 'exp2': 'exp2',", "version of the function and add 'logical_or' to NUMPY_TRANSLATIONS. return", "_known_constants_mpmath = { 'Pi': 'pi' } class MpmathPrinter(PythonCodePrinter): \"\"\" Lambda", "if func is None: func = self._module_format('numpy.array') return \"%s(%s)\" %", "k, _print_known_func) for k in NumPyPrinter._kc: setattr(NumPyPrinter, '_print_%s' % k,", "expr): known = self.known_functions[expr.__class__.__name__] return '{name}({args})'.format(name=self._module_format(known), args=', '.join(map(self._print, expr.args))) def", "register and len(parts) > 1: self.module_imports['.'.join(parts[:-1])].add(parts[-1]) if self._settings['fully_qualified_modules']: return fqn", "_print_ImaginaryUnit(self, expr): return '1j' def _print_MatrixBase(self, expr): name = expr.__class__.__name__", "'arccosh', 'asin': 'arcsin', 'asinh': 'arcsinh', 'atan': 'arctan', 'atan2': 'arctan2', 'atanh':", "return \"%s(%s)\" % (self._module_format('numpy.imag', self._print(expr.args[0]))) def _print_Mod(self, expr): return \"%s(%s)\"", ":'greater_equal', } if expr.rel_op in op: lhs = self._print(expr.lhs) rhs", "'acosh': 'acosh', 'asin': 'asin', 'asinh': 'asinh', 'atan': 'atan', 'atan2': 'atan2',", "{ '==' :'equal', '!=' :'not_equal', '<' :'less', '<=' :'less_equal', '>'", "from sympy.core import sympify from .precedence import precedence from .codeprinter", "result.append(self._print(c)) result.append(') else (') i += 1 result = result[:-1]", "PREC), expr.args))) def _print_Piecewise(self, expr): result = [] i =", "printmethod = \"_mpmathcode\" _kf = dict(chain( _known_functions.items(), [(k, 'mpmath.' +", "**(settings or {}).get( 'user_constants', {})) def _declare_number_const(self, name, value): return", "\\ _print_MutableDenseMatrix = \\ _print_ImmutableMatrix = \\ _print_ImmutableDenseMatrix = \\", "self._print_MatrixBase(expr) for k in PythonCodePrinter._kf: setattr(PythonCodePrinter, '_print_%s' % k, _print_known_func)", "for k in MpmathPrinter._kf: setattr(MpmathPrinter, '_print_%s' % k, _print_known_func) for", "tuples here instead of lists because numba supports # tuples", "if arg1.shape[0] != 1: arg1 = arg1.T if arg2.shape[1] !=", "'assert', 'break', 'class', 'continue', 'def', 'del', 'elif', 'else', 'except', 'finally',", "degrees fabs] _known_constants_math = { 'Exp1': 'e', 'Pi': 'pi', #", "in the expression. # Remove 'mpz' if gmpy is installed.", "def _module_format(self, fqn, register=True): parts = fqn.split('.') if register and", "dict(chain( _known_functions.items(), [(k, 'mpmath.' + v) for k, v in", "_known_constants_math.items()} _operators = {'and': 'and', 'or': 'or', 'not': 'not'} _default_settings", "{'exec', 'print'} _kw_only_py3 = {'False', 'nonlocal', 'True'} _known_functions = {", "return fqn else: return fqn.split('(')[0].split('[')[0].split('.')[-1] def _format_code(self, lines): return lines", "used from ``math``: [copysign isclose isfinite isinf isnan ldexp frexp", "0, {2})\".format( self._module_format('mpmath.gammainc'), self._print(e.args[0]), self._print(e.args[1])) def _print_log2(self, e): return '{0}({1})/{0}(2)'.format(", "(self._module_format('numpy.angle'), self._print(expr.args[0])) def _print_im(self, expr): return \"%s(%s)\" % (self._module_format('numpy.imag', self._print(expr.args[0])))", "dict(chain( NumPyPrinter._kf.items(), [(k, 'scipy.special.' + v) for k, v in", "'golden_ratio' } class SciPyPrinter(NumPyPrinter): _kf = dict(chain( NumPyPrinter._kf.items(), [(k, 'scipy.special.'", "{ 'GoldenRatio': 'golden_ratio' } class SciPyPrinter(NumPyPrinter): _kf = dict(chain( NumPyPrinter._kf.items(),", "'numpy.'+v for k, v in _known_constants_math.items()} def _print_seq(self, seq, delimiter=',", "for the uppergamma function return \"{0}({1}, {2}, {3})\".format( self._module_format('mpmath.gammainc'), self._print(e.args[0]),", "for floats \"\"\" printmethod = \"_mpmathcode\" _kf = dict(chain( _known_functions.items(),", "our # own because StrPrinter doesn't define it. return '{0}({1})'.format(self._module_format('numpy.logical_not'),", "pycode(expr, **settings): return PythonCodePrinter(settings).doprint(expr) _not_in_mpmath = 'log1p log2'.split() _in_mpmath =", "v) for k, v in chain( _known_functions.items(), _known_functions_math.items() )]) def", "'True'} _known_functions = { 'Abs': 'abs', } _known_functions_math = {", "in expr.args)) def _print_DotProduct(self, expr): # DotProduct allows any shape", "_print_SparseMatrix for k in SciPyPrinter._kf: setattr(SciPyPrinter, '_print_%s' % k, _print_known_func)", "# precision to match the Floats in the expression. #", "the function and add 'logical_and' to NUMPY_TRANSLATIONS. return '{0}({1})'.format(self._module_format('numpy.logical_and'), ','.join(self._print(i)", "# radians trunc fmod fsum gcd degrees fabs] _known_constants_math =", "return \"%s(%s)\" % (self._module_format('numpy.angle'), self._print(expr.args[0])) def _print_im(self, expr): return \"%s(%s)\"", "'user_constants', {})) def _declare_number_const(self, name, value): return \"%s = %s\"", "# the caller of the lambdified function will have set", "for k, v in _known_constants_math.items()} def _print_seq(self, seq, delimiter=', '):", "v in _known_functions_math.items() if k not in _not_in_numpy] _known_functions_numpy =", "return '{0}({1})'.format(self._module_format('numpy.logical_not'), ','.join(self._print(i) for i in expr.args)) def _print_Min(self, expr):", "import defaultdict from functools import wraps from itertools import chain", "lambdified function will have set it to sufficient # precision", "so we have to make sure it gets 1 x", "passing the 'default' kwarg to select() # *as long as*", "{}).get( 'user_constants', {})) def _declare_number_const(self, name, value): return \"%s =", "self._print(e.args[1])) def _print_log2(self, e): return '{0}({1})/{0}(2)'.format( self._module_format('mpmath.log'), self._print(e.args[0])) def _print_log1p(self,", "LambdaPrinter because it uses Python 'not' keyword. # If LambdaPrinter", "def _declare_number_const(self, name, value): return \"%s = %s\" % (name,", "'pi', # Only in python >= 3.5: # 'Infinity': 'inf',", "'arctan2', 'atanh': 'arctanh', 'exp2': 'exp2', }) class NumPyPrinter(PythonCodePrinter): \"\"\" Numpy", "LambdaPrinter because it uses Python 'or' keyword. # If LambdaPrinter", "= dict(self._kc, **(settings or {}).get( 'user_constants', {})) def _declare_number_const(self, name,", "'{name}({args})'.format(name=self._module_format(known), args=', '.join(map(self._print, expr.args))) def _print_known_const(self, expr): known = self.known_constants[expr.__class__.__name__]", "_declare_number_const(self, name, value): return \"%s = %s\" % (name, value)", "k, v in _known_functions_math.items() if k not in _not_in_numpy] _known_functions_numpy", "% (self._module_format('numpy.angle'), self._print(expr.args[0])) def _print_im(self, expr): return \"%s(%s)\" % (self._module_format('numpy.imag',", "k, _print_known_const) _not_in_numpy = 'erf erfc factorial gamma lgamma'.split() _in_numpy", "args=args) def _print_uppergamma(self,e): #printer for the uppergamma function return \"{0}({1},", "(name, value) def _module_format(self, fqn, register=True): parts = fqn.split('.') if", "result.append(') if (') result.append(self._print(c)) result.append(') else (') i += 1", "setattr(MpmathPrinter, '_print_%s' % k, _print_known_func) for k in _known_constants_mpmath: setattr(MpmathPrinter,", "_print_known_func) for k in _known_constants_mpmath: setattr(MpmathPrinter, '_print_%s' % k, _print_known_const)", "'{0}({1})'.format(self._module_format('numpy.logical_and'), ','.join(self._print(i) for i in expr.args)) def _print_Or(self, expr): \"Logical", "- 2)) return ''.join(result) def _print_ITE(self, expr): from sympy.functions.elementary.piecewise import", "'exp2', }) class NumPyPrinter(PythonCodePrinter): \"\"\" Numpy printer which handles vectorized", "'arctan', 'atan2': 'arctan2', 'atanh': 'arctanh', 'exp2': 'exp2', }) class NumPyPrinter(PythonCodePrinter):", "rhs = self._print(expr.rhs) return '{op}({lhs}, {rhs})'.format(op=self._module_format('numpy.'+op[expr.rel_op]), lhs=lhs, rhs=rhs) return super(NumPyPrinter,", "rhs=rhs) return super(NumPyPrinter, self)._print_Relational(expr) def _print_And(self, expr): \"Logical And printer\"", "the expression. # Remove 'mpz' if gmpy is installed. args", "and Unequality\" op = { '==' :'equal', '!=' :'not_equal', '<'", "for k in SciPyPrinter._kc: setattr(SciPyPrinter, '_print_%s' % k, _print_known_const) class", "expr): \"Matrix multiplication printer\" return '({0})'.format(').dot('.join(self._print(i) for i in expr.args))", "'gamma', 'hypot': 'hypot', 'loggamma': 'lgamma', 'log': 'log', 'log10': 'log10', 'log1p':", "'and', 'or': 'or', 'not': 'not'} _default_settings = dict( CodePrinter._default_settings, user_functions={},", "result.append(') else None)') result.append(')'*(2*i - 2)) return ''.join(result) def _print_ITE(self,", "setattr(MpmathPrinter, '_print_%s' % k, _print_known_const) _not_in_numpy = 'erf erfc factorial", "'default' kwarg to select() # *as long as* it is", "= {'exec', 'print'} _kw_only_py3 = {'False', 'nonlocal', 'True'} _known_functions =", "if k not in _not_in_mpmath] _known_functions_mpmath = dict(_in_mpmath) _known_constants_mpmath =", "'erfc', 'gamma': 'gamma', 'loggamma': 'gammaln' } _known_constants_scipy_constants = { 'GoldenRatio':", "'log1p log2'.split() _in_mpmath = [(k, v) for k, v in", "mod = expr.func.__module__ or '' return '%s(%s)' % (self._module_format(mod +", "'NaN': 'nan' } def _print_known_func(self, expr): known = self.known_functions[expr.__class__.__name__] return", "% k, _print_known_const) _known_functions_scipy_special = { 'erf': 'erf', 'erfc': 'erfc',", "\"float('inf')\" def _print_Mod(self, expr): PREC = precedence(expr) return ('{0} %", "_print_Pow(self, expr): if expr.exp == 0.5: return '{0}({1})'.format(self._module_format('numpy.sqrt'), self._print(expr.base)) else:", "own because StrPrinter doesn't define it. return '{0}({1})'.format(self._module_format('numpy.logical_not'), ','.join(self._print(i) for", "isinf isnan ldexp frexp pow modf # radians trunc fmod", "k, _print_known_const) _known_functions_scipy_special = { 'erf': 'erf', 'erfc': 'erfc', 'gamma':", "_known_functions_math.items() if k not in _not_in_numpy] _known_functions_numpy = dict(_in_numpy, **{", "shape=expr.shape ) _print_ImmutableSparseMatrix = _print_SparseMatrix for k in SciPyPrinter._kf: setattr(SciPyPrinter,", "= dict(chain( PythonCodePrinter._kf.items(), [(k, 'numpy.' + v) for k, v", "_print_known_const) def pycode(expr, **settings): return PythonCodePrinter(settings).doprint(expr) _not_in_mpmath = 'log1p log2'.split()", "for k, v in _known_constants_math.items()} _operators = {'and': 'and', 'or':", "a, b in expr.limits) return '(builtins.sum({function} {loops}))'.format( function=self._print(expr.function), loops=' '.join(loops))", "for arg in expr.args)) conds = '[{0}]'.format(','.join(self._print(arg.cond) for arg in", "b=self._print(b)) for i, a, b in expr.limits) return '(builtins.sum({function} {loops}))'.format(", "\\ lambda self, expr: self._print_MatrixBase(expr) for k in PythonCodePrinter._kf: setattr(PythonCodePrinter,", "} class MpmathPrinter(PythonCodePrinter): \"\"\" Lambda printer for mpmath which maintains", "last element in expr.args. # If this is not the", "{b}+1)'.format( i=self._print(i), a=self._print(a), b=self._print(b)) for i, a, b in expr.limits)", "= \\ lambda self, expr: self._print_MatrixBase(expr) for k in PythonCodePrinter._kf:", "'or' keyword. # If LambdaPrinter didn't define it, we could", "the 'default' kwarg to select() # *as long as* it", "does matrix # multiplication, so we have to make sure", "k, v in _known_functions_math.items()] )) _kc = {k: 'math.'+v for", "return \"%s = %s\" % (name, value) def _module_format(self, fqn,", "'>=' :'greater_equal', } if expr.rel_op in op: lhs = self._print(expr.lhs)", "'else', 'except', 'finally', 'for', 'from', 'global', 'if', 'import', 'in', 'is',", "i, a, b in expr.limits) return '(builtins.sum({function} {loops}))'.format( function=self._print(expr.function), loops='", "return \"float('nan')\" def _print_Infinity(self, expr): return \"float('inf')\" def _print_Mod(self, expr):", "i, j, data = [], [], [] for (r, c),", "return '{0}({1}+1)'.format( self._module_format('mpmath.log'), self._print(e.args[0])) for k in MpmathPrinter._kf: setattr(MpmathPrinter, '_print_%s'", "return lines def _get_comment(self, text): return \" # {0}\".format(text) def", "__init__(self, settings=None): super(PythonCodePrinter, self).__init__(settings) self.module_imports = defaultdict(set) self.known_functions = dict(self._kf,", "= dict(_in_numpy, **{ 'acos': 'arccos', 'acosh': 'arccosh', 'asin': 'arcsin', 'asinh':", "setattr(SciPyPrinter, '_print_%s' % k, _print_known_const) class SymPyPrinter(PythonCodePrinter): _kf = dict([(k,", "x: self.parenthesize(x, PREC), expr.args))) def _print_Piecewise(self, expr): result = []", "+ v for k, v in _known_constants_scipy_constants.items()} def _print_SparseMatrix(self, expr):", "trunc fmod fsum gcd degrees fabs] _known_constants_math = { 'Exp1':", "= [], [], [] for (r, c), v in expr._smat.items():", "return '{0}({1})'.format(self._module_format('numpy.sqrt'), self._print(expr.base)) else: return super(NumPyPrinter, self)._print_Pow(expr) def _print_arg(self, expr):", "etc. \"\"\" printmethod = \"_numpycode\" _kf = dict(chain( PythonCodePrinter._kf.items(), [(k,", "language = \"Python\" standard = \"python3\" reserved_words = _kw_py2and3.union(_kw_only_py3) modules", "super(NumPyPrinter, self)._print_Relational(expr) def _print_And(self, expr): \"Logical And printer\" # We", "# 'NaN': 'nan' } def _print_known_func(self, expr): known = self.known_functions[expr.__class__.__name__]", "result.append(self._print(e)) result.append(') if (') result.append(self._print(c)) result.append(') else (') i +=", "expr): \"Logical Not printer\" # We have to override LambdaPrinter", "arg in expr.args)) # If [default_value, True] is a (expr,", "in expr.args: e = arg.expr c = arg.cond result.append('((') result.append(self._print(e))", "def _print_Not(self, expr): \"Logical Not printer\" # We have to", "multiplication printer\" return '({0})'.format(').dot('.join(self._print(i) for i in expr.args)) def _print_DotProduct(self,", "MpmathPrinter._kf: setattr(MpmathPrinter, '_print_%s' % k, _print_known_func) for k in _known_constants_mpmath:", "for k, v in _known_functions_math.items() if k not in _not_in_numpy]", "seq, delimiter=', '): \"General sequence printer: converts to tuple\" #", "\"%s(%s)\" % (func, self._print(expr.tolist())) _print_SparseMatrix = \\ _print_MutableSparseMatrix = \\", "= arg.expr c = arg.cond result.append('((') result.append(self._print(e)) result.append(') if (')", "expr.args)) def _print_Not(self, expr): \"Logical Not printer\" # We have", "to sufficient # precision to match the Floats in the", "self.known_constants[expr.__class__.__name__] return self._module_format(known) class PythonCodePrinter(CodePrinter): printmethod = \"_pythoncode\" language =", "'numpy.' + v) for k, v in _known_functions_numpy.items()] )) _kc", "'break', 'class', 'continue', 'def', 'del', 'elif', 'else', 'except', 'finally', 'for',", "'_print_%s' % k, _print_known_func) for k in SciPyPrinter._kc: setattr(SciPyPrinter, '_print_%s'", "default=numpy.nan)'.format(self._module_format('numpy.select'), conds, exprs) def _print_Relational(self, expr): \"Relational printer for Equality", "by n x 1. arg1, arg2 = expr.args if arg1.shape[0]", "or {}).get( 'user_constants', {})) def _declare_number_const(self, name, value): return \"%s", "k, _print_known_func) for k in SciPyPrinter._kc: setattr(SciPyPrinter, '_print_%s' % k,", "1 x n by n x 1. arg1, arg2 =", "not handle setting mpmath.mp.dps. It is assumed that # the", "'%s(%s)' % (self._module_format(mod + ('.' if mod else '') +", "'gamma', 'loggamma': 'gammaln' } _known_constants_scipy_constants = { 'GoldenRatio': 'golden_ratio' }", "+= 1 result = result[:-1] result.append(') else None)') result.append(')'*(2*i -", "'logical_or' to NUMPY_TRANSLATIONS. return '{0}({1})'.format(self._module_format('numpy.logical_or'), ','.join(self._print(i) for i in expr.args))", "kwarg to select() # *as long as* it is the", "= dict([(k, 'sympy.' + v) for k, v in chain(", "return '{0}(({1}))'.format(self._module_format('numpy.amax'), ','.join(self._print(i) for i in expr.args)) def _print_Pow(self, expr):", "*as long as* it is the last element in expr.args.", "{i} in range({a}, {b}+1)'.format( i=self._print(i), a=self._print(a), b=self._print(b)) for i, a,", "'loggamma': 'lgamma', 'log': 'log', 'log10': 'log10', 'log1p': 'log1p', 'log2': 'log2',", "\"General sequence printer: converts to tuple\" # Print tuples here", "self._print(expr.args[0])) def _print_MatrixBase(self, expr): func = self.known_functions.get(expr.__class__.__name__, None) if func", "return super(NumPyPrinter, self)._print_Relational(expr) def _print_And(self, expr): \"Logical And printer\" #", "MpmathPrinter(PythonCodePrinter): \"\"\" Lambda printer for mpmath which maintains precision for", "precedence from .codeprinter import CodePrinter _kw_py2and3 = { 'and', 'as',", "= '[{0}]'.format(','.join(self._print(arg.cond) for arg in expr.args)) # If [default_value, True]", "initialized to a set in __init__ tab = ' '", "to tuple\" # Print tuples here instead of lists because", "% (self._module_format('mpmath.mpf'), e) def _print_Float(self, e): # XXX: This does", "return PythonCodePrinter(settings).doprint(expr) _not_in_mpmath = 'log1p log2'.split() _in_mpmath = [(k, v)", "[], [] for (r, c), v in expr._smat.items(): i.append(r) j.append(c)", "to select() # *as long as* it is the last", "'continue', 'def', 'del', 'elif', 'else', 'except', 'finally', 'for', 'from', 'global',", "_print_Not(self, expr): \"Logical Not printer\" # We have to override", "# 'None' is actually not in Python 2's keyword.kwlist }", "func = self.known_functions.get(expr.__class__.__name__, None) if func is None: func =", "'log1p', 'log2': 'log2', 'sin': 'sin', 'sinh': 'sinh', 'Sqrt': 'sqrt', 'tan':", "expr): i, j, data = [], [], [] for (r,", "'%s(%d)' % (self._module_format('mpmath.mpf'), e) def _print_Float(self, e): # XXX: This", "in _known_functions_mpmath.items()] )) def _print_Integer(self, e): return '%s(%d)' % (self._module_format('mpmath.mpf'),", "'Pi': 'pi', # Only in python >= 3.5: # 'Infinity':", "e): return '{0}({1})/{0}(2)'.format( self._module_format('mpmath.log'), self._print(e.args[0])) def _print_log1p(self, e): return '{0}({1}+1)'.format(", "{k: 'numpy.'+v for k, v in _known_constants_math.items()} def _print_seq(self, seq,", "func = self._module_format('numpy.array') return \"%s(%s)\" % (func, self._print(expr.tolist())) for k", "return '{name}({args})'.format(name=self._module_format(known), args=', '.join(map(self._print, expr.args))) def _print_known_const(self, expr): known =", "def _print_Pow(self, expr): if expr.exp == 0.5: return '{0}({1})'.format(self._module_format('numpy.sqrt'), self._print(expr.base))", "return self._print(expr.rewrite(Piecewise)) def _print_Sum(self, expr): loops = ( 'for {i}", "def _print_log1p(self, e): return '{0}({1}+1)'.format( self._module_format('mpmath.log'), self._print(e.args[0])) for k in", "% k, _print_known_func) for k in _known_constants_math: setattr(PythonCodePrinter, '_print_%s' %", "(self._module_format('numpy.mod'), ', '.join(map(self._print, expr.args))) def _print_re(self, expr): return \"%s(%s)\" %", "# 'Infinity': 'inf', # 'NaN': 'nan' } def _print_known_func(self, expr):", "_kf = dict(chain( _known_functions.items(), [(k, 'math.' + v) for k,", "{})) def _declare_number_const(self, name, value): return \"%s = %s\" %", "'erf': 'erf', 'erfc': 'erfc', 'gamma': 'gamma', 'loggamma': 'gammaln' } _known_constants_scipy_constants", "expr): \"Relational printer for Equality and Unequality\" op = {", "= \\ _print_ImmutableDenseMatrix = \\ lambda self, expr: self._print_MatrixBase(expr) for", "= dict(self._kf, **(settings or {}).get( 'user_functions', {})) self.known_constants = dict(self._kc,", "self._print(expr.rhs) return '{op}({lhs}, {rhs})'.format(op=self._module_format('numpy.'+op[expr.rel_op]), lhs=lhs, rhs=rhs) return super(NumPyPrinter, self)._print_Relational(expr) def", "piecewise functions, logical operators, etc. \"\"\" printmethod = \"_numpycode\" _kf", "the Floats in the expression. # Remove 'mpz' if gmpy", "factorial gamma lgamma'.split() _in_numpy = [(k, v) for k, v", "Python 'and' keyword. # If LambdaPrinter didn't define it, we", "def _print_Float(self, e): # XXX: This does not handle setting", "import wraps from itertools import chain from sympy.core import sympify", "to override LambdaPrinter because it uses Python 'and' keyword. #", "1. arg1, arg2 = expr.args if arg1.shape[0] != 1: arg1", "'not', 'or', 'pass', 'raise', 'return', 'try', 'while', 'with', 'yield', 'None'", "None: func = self._module_format('numpy.array') return \"%s(%s)\" % (func, self._print(expr.tolist())) for", "'or', 'not': 'not'} _default_settings = dict( CodePrinter._default_settings, user_functions={}, precision=17, inline=True,", "% {1}'.format(*map(lambda x: self.parenthesize(x, PREC), expr.args))) def _print_Piecewise(self, expr): result", "+ ('.' if mod else '') + expr.func.__name__), ', '.join(map(self._print,", "for k in SciPyPrinter._kf: setattr(SciPyPrinter, '_print_%s' % k, _print_known_func) for", "v for k, v in _known_constants_scipy_constants.items()} def _print_SparseMatrix(self, expr): i,", "itertools import chain from sympy.core import sympify from .precedence import", "% (self._module_format('numpy.dot'), self._print(arg1), self._print(arg2)) def _print_Piecewise(self, expr): \"Piecewise function printer\"", "setattr(NumPyPrinter, '_print_%s' % k, _print_known_const) _known_functions_scipy_special = { 'erf': 'erf',", "'cosh': 'cosh', 'erf': 'erf', 'erfc': 'erfc', 'exp': 'exp', 'expm1': 'expm1',", "= {'False', 'nonlocal', 'True'} _known_functions = { 'Abs': 'abs', }", "self.known_functions[expr.__class__.__name__] return '{name}({args})'.format(name=self._module_format(known), args=', '.join(map(self._print, expr.args))) def _print_known_const(self, expr): known", "in SciPyPrinter._kc: setattr(SciPyPrinter, '_print_%s' % k, _print_known_const) class SymPyPrinter(PythonCodePrinter): _kf", "use StrPrinter's # version of the function and add 'logical_or'", "from .precedence import precedence from .codeprinter import CodePrinter _kw_py2and3 =", "[], [], [] for (r, c), v in expr._smat.items(): i.append(r)", "j=j, shape=expr.shape ) _print_ImmutableSparseMatrix = _print_SparseMatrix for k in SciPyPrinter._kf:", "def _print_Max(self, expr): return '{0}(({1}))'.format(self._module_format('numpy.amax'), ','.join(self._print(i) for i in expr.args))", "in expr.args)) def _print_Max(self, expr): return '{0}(({1}))'.format(self._module_format('numpy.amax'), ','.join(self._print(i) for i", "def _print_log2(self, e): return '{0}({1})/{0}(2)'.format( self._module_format('mpmath.log'), self._print(e.args[0])) def _print_log1p(self, e):", "[default_value, True] is a (expr, cond) sequence in a Piecewise", "NUMPY_TRANSLATIONS. return '{0}({1})'.format(self._module_format('numpy.logical_or'), ','.join(self._print(i) for i in expr.args)) def _print_Not(self,", "'in', 'is', 'lambda', 'not', 'or', 'pass', 'raise', 'return', 'try', 'while',", ".codeprinter import CodePrinter _kw_py2and3 = { 'and', 'as', 'assert', 'break',", "exprs) def _print_Relational(self, expr): \"Relational printer for Equality and Unequality\"", "expr): return \"float('nan')\" def _print_Infinity(self, expr): return \"float('inf')\" def _print_Mod(self,", "'lambda', 'not', 'or', 'pass', 'raise', 'return', 'try', 'while', 'with', 'yield',", "!= 1: arg1 = arg1.T if arg2.shape[1] != 1: arg2", "for (r, c), v in expr._smat.items(): i.append(r) j.append(c) data.append(v) return", "_print_Mod(self, expr): PREC = precedence(expr) return ('{0} % {1}'.format(*map(lambda x:", "def _print_DotProduct(self, expr): # DotProduct allows any shape order, but", "'from', 'global', 'if', 'import', 'in', 'is', 'lambda', 'not', 'or', 'pass',", "modules = None # initialized to a set in __init__", "it gets 1 x n by n x 1. arg1,", "def _print_ITE(self, expr): from sympy.functions.elementary.piecewise import Piecewise return self._print(expr.rewrite(Piecewise)) def", "If this is not the case, it may be triggered", ">= 3.5: # 'Infinity': 'inf', # 'NaN': 'nan' } def", "fabs] _known_constants_math = { 'Exp1': 'e', 'Pi': 'pi', # Only", "def _print_known_const(self, expr): known = self.known_constants[expr.__class__.__name__] return self._module_format(known) class PythonCodePrinter(CodePrinter):", "It is assumed that # the caller of the lambdified", "lambda self, expr: self._print_MatrixBase(expr) for k in PythonCodePrinter._kf: setattr(PythonCodePrinter, '_print_%s'", ":'less_equal', '>' :'greater', '>=' :'greater_equal', } if expr.rel_op in op:", "_known_constants_math.items()} def _print_seq(self, seq, delimiter=', '): \"General sequence printer: converts", "k, v in _known_constants_math.items()} _operators = {'and': 'and', 'or': 'or',", "(r, c), v in expr._smat.items(): i.append(r) j.append(c) data.append(v) return \"{name}({data},", "'gamma': 'gamma', 'loggamma': 'gammaln' } _known_constants_scipy_constants = { 'GoldenRatio': 'golden_ratio'", "in Python 2's keyword.kwlist } _kw_only_py2 = {'exec', 'print'} _kw_only_py3", "return \"{0}({1}, 0, {2})\".format( self._module_format('mpmath.gammainc'), self._print(e.args[0]), self._print(e.args[1])) def _print_log2(self, e):", "we have to make sure it gets 1 x n", "} def _print_known_func(self, expr): known = self.known_functions[expr.__class__.__name__] return '{name}({args})'.format(name=self._module_format(known), args=',", "in a Piecewise object # it will behave the same", "= dict(chain( NumPyPrinter._kf.items(), [(k, 'scipy.special.' + v) for k, v", "= ' ' _kf = dict(chain( _known_functions.items(), [(k, 'math.' +", "conds = '[{0}]'.format(','.join(self._print(arg.cond) for arg in expr.args)) # If [default_value,", "def _print_uppergamma(self,e): #printer for the uppergamma function return \"{0}({1}, {2},", "settings=None): super(PythonCodePrinter, self).__init__(settings) self.module_imports = defaultdict(set) self.known_functions = dict(self._kf, **(settings", "= {k: 'numpy.'+v for k, v in _known_constants_math.items()} def _print_seq(self,", "arg2.shape[1] != 1: arg2 = arg2.T return \"%s(%s, %s)\" %", "Print tuples here instead of lists because numba supports #", "return super(NumPyPrinter, self)._print_Pow(expr) def _print_arg(self, expr): return \"%s(%s)\" % (self._module_format('numpy.angle'),", "operators, etc. \"\"\" printmethod = \"_numpycode\" _kf = dict(chain( PythonCodePrinter._kf.items(),", "in _known_constants_math.items()} def _print_seq(self, seq, delimiter=', '): \"General sequence printer:", "it may be triggered prematurely. return '{0}({1}, {2}, default=numpy.nan)'.format(self._module_format('numpy.select'), conds,", "This does not handle setting mpmath.mp.dps. It is assumed that", "fqn.split('(')[0].split('[')[0].split('.')[-1] def _format_code(self, lines): return lines def _get_comment(self, text): return", "does not handle setting mpmath.mp.dps. It is assumed that #", "_print_MatrixBase(self, expr): name = expr.__class__.__name__ func = self.known_functions.get(name, name) return", "'atan': 'arctan', 'atan2': 'arctan2', 'atanh': 'arctanh', 'exp2': 'exp2', }) class", "'atanh': 'arctanh', 'exp2': 'exp2', }) class NumPyPrinter(PythonCodePrinter): \"\"\" Numpy printer", "'!=' :'not_equal', '<' :'less', '<=' :'less_equal', '>' :'greater', '>=' :'greater_equal',", "_format_code(self, lines): return lines def _get_comment(self, text): return \" #", "supports # tuples in nopython mode. return '({},)'.format(delimiter.join(self._print(item) for item", "= \\ _print_MutableDenseMatrix = \\ _print_ImmutableMatrix = \\ _print_ImmutableDenseMatrix =", "'for {i} in range({a}, {b}+1)'.format( i=self._print(i), a=self._print(a), b=self._print(b)) for i,", "\"_mpmathcode\" _kf = dict(chain( _known_functions.items(), [(k, 'mpmath.' + v) for", "is assumed that # the caller of the lambdified function", "result.append(') else (') i += 1 result = result[:-1] result.append(')", "in _known_constants_math.items()} _operators = {'and': 'and', 'or': 'or', 'not': 'not'}", "that # the caller of the lambdified function will have", "# DotProduct allows any shape order, but numpy.dot does matrix", "it, we would still have to define our # own", "% (self._module_format('numpy.imag', self._print(expr.args[0]))) def _print_Mod(self, expr): return \"%s(%s)\" % (self._module_format('numpy.mod'),", "_not_in_mpmath = 'log1p log2'.split() _in_mpmath = [(k, v) for k,", "def _print_seq(self, seq, delimiter=', '): \"General sequence printer: converts to", "{})) self.known_constants = dict(self._kc, **(settings or {}).get( 'user_constants', {})) def", "sympy.core import sympify from .precedence import precedence from .codeprinter import", "from functools import wraps from itertools import chain from sympy.core", "override LambdaPrinter because it uses Python 'not' keyword. # If", "# Not used from ``math``: [copysign isclose isfinite isinf isnan", "= { 'Abs': 'abs', } _known_functions_math = { 'acos': 'acos',", "# version of the function and add 'logical_or' to NUMPY_TRANSLATIONS.", "arg2.T return \"%s(%s, %s)\" % (self._module_format('numpy.dot'), self._print(arg1), self._print(arg2)) def _print_Piecewise(self,", "in expr.args)) def _print_Or(self, expr): \"Logical Or printer\" # We", "'Sqrt': 'sqrt', 'tan': 'tan', 'tanh': 'tanh' } # Not used", "**{ 'acos': 'arccos', 'acosh': 'arccosh', 'asin': 'arcsin', 'asinh': 'arcsinh', 'atan':", "('.' if mod else '') + expr.func.__name__), ', '.join(map(self._print, expr.args)))", "self._print(expr.lhs) rhs = self._print(expr.rhs) return '{op}({lhs}, {rhs})'.format(op=self._module_format('numpy.'+op[expr.rel_op]), lhs=lhs, rhs=rhs) return", "arg1 = arg1.T if arg2.shape[1] != 1: arg2 = arg2.T", "\"\"\" Lambda printer for mpmath which maintains precision for floats", "'GoldenRatio': 'golden_ratio' } class SciPyPrinter(NumPyPrinter): _kf = dict(chain( NumPyPrinter._kf.items(), [(k,", "\\ _print_ImmutableMatrix = \\ _print_ImmutableDenseMatrix = \\ lambda self, expr:" ]
[ "model = self.args.model assert model in self.model_list, \"model is not", "model to process\") parser.add_argument(\"--task\", \"-t\", required=True, help=\"training/testing\") return parser.parse_args() def", "configuration file(json type)\") parser.add_argument(\"--model\", \"-m\", required=True, help=\"the model to process\")", "\"-c\", required=True, help=\"the path of a configuration file(json type)\") parser.add_argument(\"--model\",", "self.validate_configuration_path() self.validate_model() self.validate_task() def validate_task(self): task = self.args.task assert task", "\"configuration path is inappropriate (not found file)\" def get_configuraiton_file_path(self): return", "inappropriate (not found file)\" def get_configuraiton_file_path(self): return self.args.configuration def get_model_type(self):", "assert model in self.model_list, \"model is not in the prepared", "required=True, help=\"the path of a configuration file(json type)\") parser.add_argument(\"--model\", \"-m\",", "found file)\" def get_configuraiton_file_path(self): return self.args.configuration def get_model_type(self): return self.args.model", "model in self.model_list, \"model is not in the prepared model", "model_list self.args = self.get_input_arguments() self.validate_arguments() def get_input_arguments(self): parser = argparse.ArgumentParser(description='Process", "help=\"the path of a configuration file(json type)\") parser.add_argument(\"--model\", \"-m\", required=True,", "self.get_input_arguments() self.validate_arguments() def get_input_arguments(self): parser = argparse.ArgumentParser(description='Process some integers.') parser.add_argument(\"--configuration\",", "self.args = self.get_input_arguments() self.validate_arguments() def get_input_arguments(self): parser = argparse.ArgumentParser(description='Process some", "assert os.path.exists(config_path), \"configuration path is inappropriate (not found file)\" def", "parser = argparse.ArgumentParser(description='Process some integers.') parser.add_argument(\"--configuration\", \"-c\", required=True, help=\"the path", "= self.args.task assert task == \"training\" or task == \"testing\",", "self.args.configuration assert os.path.exists(config_path), \"configuration path is inappropriate (not found file)\"", "argparse.ArgumentParser(description='Process some integers.') parser.add_argument(\"--configuration\", \"-c\", required=True, help=\"the path of a", "help=\"the model to process\") parser.add_argument(\"--task\", \"-t\", required=True, help=\"training/testing\") return parser.parse_args()", "process\") parser.add_argument(\"--task\", \"-t\", required=True, help=\"training/testing\") return parser.parse_args() def validate_arguments(self): self.validate_configuration_path()", "in the prepared model list\" def validate_configuration_path(self): config_path = self.args.configuration", "required=True, help=\"training/testing\") return parser.parse_args() def validate_arguments(self): self.validate_configuration_path() self.validate_model() self.validate_task() def", "\"task should be training or testing\" def validate_model(self): model =", "model list\" def validate_configuration_path(self): config_path = self.args.configuration assert os.path.exists(config_path), \"configuration", "= self.args.configuration assert os.path.exists(config_path), \"configuration path is inappropriate (not found", "self.args.model assert model in self.model_list, \"model is not in the", "to process\") parser.add_argument(\"--task\", \"-t\", required=True, help=\"training/testing\") return parser.parse_args() def validate_arguments(self):", "parser.add_argument(\"--task\", \"-t\", required=True, help=\"training/testing\") return parser.parse_args() def validate_arguments(self): self.validate_configuration_path() self.validate_model()", "a configuration file(json type)\") parser.add_argument(\"--model\", \"-m\", required=True, help=\"the model to", "task = self.args.task assert task == \"training\" or task ==", "required=True, help=\"the model to process\") parser.add_argument(\"--task\", \"-t\", required=True, help=\"training/testing\") return", "in self.model_list, \"model is not in the prepared model list\"", "the prepared model list\" def validate_configuration_path(self): config_path = self.args.configuration assert", "def validate_configuration_path(self): config_path = self.args.configuration assert os.path.exists(config_path), \"configuration path is", "os.path.exists(config_path), \"configuration path is inappropriate (not found file)\" def get_configuraiton_file_path(self):", "__init__(self, model_list): self.model_list = model_list self.args = self.get_input_arguments() self.validate_arguments() def", "type)\") parser.add_argument(\"--model\", \"-m\", required=True, help=\"the model to process\") parser.add_argument(\"--task\", \"-t\",", "def get_input_arguments(self): parser = argparse.ArgumentParser(description='Process some integers.') parser.add_argument(\"--configuration\", \"-c\", required=True,", "os class ArgumentManager: def __init__(self, model_list): self.model_list = model_list self.args", "config_path = self.args.configuration assert os.path.exists(config_path), \"configuration path is inappropriate (not", "or testing\" def validate_model(self): model = self.args.model assert model in", "should be training or testing\" def validate_model(self): model = self.args.model", "integers.') parser.add_argument(\"--configuration\", \"-c\", required=True, help=\"the path of a configuration file(json", "assert task == \"training\" or task == \"testing\", \"task should", "== \"testing\", \"task should be training or testing\" def validate_model(self):", "self.validate_arguments() def get_input_arguments(self): parser = argparse.ArgumentParser(description='Process some integers.') parser.add_argument(\"--configuration\", \"-c\",", "def validate_model(self): model = self.args.model assert model in self.model_list, \"model", "parser.add_argument(\"--model\", \"-m\", required=True, help=\"the model to process\") parser.add_argument(\"--task\", \"-t\", required=True,", "argparse, os class ArgumentManager: def __init__(self, model_list): self.model_list = model_list", "self.validate_task() def validate_task(self): task = self.args.task assert task == \"training\"", "self.validate_model() self.validate_task() def validate_task(self): task = self.args.task assert task ==", "\"model is not in the prepared model list\" def validate_configuration_path(self):", "help=\"training/testing\") return parser.parse_args() def validate_arguments(self): self.validate_configuration_path() self.validate_model() self.validate_task() def validate_task(self):", "class ArgumentManager: def __init__(self, model_list): self.model_list = model_list self.args =", "ArgumentManager: def __init__(self, model_list): self.model_list = model_list self.args = self.get_input_arguments()", "model_list): self.model_list = model_list self.args = self.get_input_arguments() self.validate_arguments() def get_input_arguments(self):", "some integers.') parser.add_argument(\"--configuration\", \"-c\", required=True, help=\"the path of a configuration", "file(json type)\") parser.add_argument(\"--model\", \"-m\", required=True, help=\"the model to process\") parser.add_argument(\"--task\",", "validate_model(self): model = self.args.model assert model in self.model_list, \"model is", "path of a configuration file(json type)\") parser.add_argument(\"--model\", \"-m\", required=True, help=\"the", "return self.args.configuration def get_model_type(self): return self.args.model def get_task_type(self): return self.args.task", "def validate_task(self): task = self.args.task assert task == \"training\" or", "\"-m\", required=True, help=\"the model to process\") parser.add_argument(\"--task\", \"-t\", required=True, help=\"training/testing\")", "\"testing\", \"task should be training or testing\" def validate_model(self): model", "return parser.parse_args() def validate_arguments(self): self.validate_configuration_path() self.validate_model() self.validate_task() def validate_task(self): task", "validate_task(self): task = self.args.task assert task == \"training\" or task", "<gh_stars>0 import argparse, os class ArgumentManager: def __init__(self, model_list): self.model_list", "file)\" def get_configuraiton_file_path(self): return self.args.configuration def get_model_type(self): return self.args.model def", "(not found file)\" def get_configuraiton_file_path(self): return self.args.configuration def get_model_type(self): return", "is not in the prepared model list\" def validate_configuration_path(self): config_path", "be training or testing\" def validate_model(self): model = self.args.model assert", "= self.args.model assert model in self.model_list, \"model is not in", "self.args.task assert task == \"training\" or task == \"testing\", \"task", "\"-t\", required=True, help=\"training/testing\") return parser.parse_args() def validate_arguments(self): self.validate_configuration_path() self.validate_model() self.validate_task()", "testing\" def validate_model(self): model = self.args.model assert model in self.model_list,", "or task == \"testing\", \"task should be training or testing\"", "self.model_list, \"model is not in the prepared model list\" def", "training or testing\" def validate_model(self): model = self.args.model assert model", "validate_arguments(self): self.validate_configuration_path() self.validate_model() self.validate_task() def validate_task(self): task = self.args.task assert", "path is inappropriate (not found file)\" def get_configuraiton_file_path(self): return self.args.configuration", "def get_configuraiton_file_path(self): return self.args.configuration def get_model_type(self): return self.args.model def get_task_type(self):", "of a configuration file(json type)\") parser.add_argument(\"--model\", \"-m\", required=True, help=\"the model", "== \"training\" or task == \"testing\", \"task should be training", "def __init__(self, model_list): self.model_list = model_list self.args = self.get_input_arguments() self.validate_arguments()", "list\" def validate_configuration_path(self): config_path = self.args.configuration assert os.path.exists(config_path), \"configuration path", "\"training\" or task == \"testing\", \"task should be training or", "parser.add_argument(\"--configuration\", \"-c\", required=True, help=\"the path of a configuration file(json type)\")", "validate_configuration_path(self): config_path = self.args.configuration assert os.path.exists(config_path), \"configuration path is inappropriate", "is inappropriate (not found file)\" def get_configuraiton_file_path(self): return self.args.configuration def", "get_input_arguments(self): parser = argparse.ArgumentParser(description='Process some integers.') parser.add_argument(\"--configuration\", \"-c\", required=True, help=\"the", "= argparse.ArgumentParser(description='Process some integers.') parser.add_argument(\"--configuration\", \"-c\", required=True, help=\"the path of", "import argparse, os class ArgumentManager: def __init__(self, model_list): self.model_list =", "parser.parse_args() def validate_arguments(self): self.validate_configuration_path() self.validate_model() self.validate_task() def validate_task(self): task =", "= model_list self.args = self.get_input_arguments() self.validate_arguments() def get_input_arguments(self): parser =", "def validate_arguments(self): self.validate_configuration_path() self.validate_model() self.validate_task() def validate_task(self): task = self.args.task", "task == \"testing\", \"task should be training or testing\" def", "prepared model list\" def validate_configuration_path(self): config_path = self.args.configuration assert os.path.exists(config_path),", "get_configuraiton_file_path(self): return self.args.configuration def get_model_type(self): return self.args.model def get_task_type(self): return", "task == \"training\" or task == \"testing\", \"task should be", "self.model_list = model_list self.args = self.get_input_arguments() self.validate_arguments() def get_input_arguments(self): parser", "= self.get_input_arguments() self.validate_arguments() def get_input_arguments(self): parser = argparse.ArgumentParser(description='Process some integers.')", "not in the prepared model list\" def validate_configuration_path(self): config_path =" ]
[ "circulantSize) ): bLeft = hexToCirculant(lines[2 * i], circulantSize) bRight =", "01/12/2020, need to make sure this doesn't break anything. import", "figureName = str(file)) else: pass return evalTimes, evaluationFaildAt, numberOfIterationsAtHigh #plt.imshow(nearEarthParity)", "pathlib projectDir = pathlib.Path(__file__).parent.absolute() ## <NAME>: added on 01/12/2020, need", "n = NIBBLE_CONVERTER.dot(inputArray) if n == 10: h = 'A'", "True, isHex = True, isGenerator = True ): # This", "circulantSize: binaryArray = np.hstack(np.zeros(circulantSize-len(binaryArray), dtype = GENERAL_CODE_MATRIX_DATA_TYPE)) else: binaryArray =", "np.array([0,1,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '8': nibble =", "line in the file contains the non zero locations of", "non zero locations of the first row of a circulant.", "as np from scipy.linalg import circulant #import matplotlib.pyplot as plt", "in range(8176): # for j in range(1022): # if nearEarthParity[j,i]", "NIBBLE_CONVERTER = np.array([8, 4, 2, 1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) def", "of opposite node sets #for i in range(8176): # for", "= n // circulantSize hexName = '' for r in", "inputArray[4 * j : 4 * j + 4] h", "'C': nibble = np.array([1,1,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i ==", "// circulantSize N = n // circulantSize if fileName ==", "str(M) + '_' + str(N) + '_' + str(hashlib.sha224(str(fileName).encode('utf-8')).hexdigest()) fileNameWithPath", "'', figureNumber = i, figureName = str(file)) else: pass return", "\" + fileName) workspaceDict = {} workspaceDict['parityMatrix'] = parityMatrix workspaceDict['fileName']", "i)) for i, n in enumerate(X) ) #pos.update( (n, (2,", "= scatterITR workspaceDict['averageSnrAxis'] = averageSnrAxis workspaceDict['averageNumberOfIterations'] = averageNumberOfIterations workspaceDict['evaluationTime'] =", "dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'E': nibble = np.array([1,1,1,0],", "== 'F': nibble = np.array([1,1,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) else: #print('Error,", "== 'A': nibble = np.array([1,0,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i", "str) outputString = '' for j in range(d1//4): nibble =", "== '8': nibble = np.array([1,0,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i", "'4': nibble = np.array([0,1,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i ==", "circulant #import matplotlib.pyplot as plt import scipy.io import common import", "bRight = hexToCirculant(lines[2 * i + 1], circulantSize) newBlock =", "circulantSize N = n // circulantSize if fileName == None:", "in the file then defines a circulant, and the order", "nibble = np.array([], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) outputBinary = np.hstack((outputBinary, nibble))", "= readMatrixFromFile('/home/oss22/swift/swift/codeMatrices/nearEarthParity.txt', 1022, 8176, 511, True, False, False) #import networkx", "binaryArray = hexStringToBinaryArray(hexStr) if len(binaryArray) < circulantSize: binaryArray = np.hstack(np.zeros(circulantSize-len(binaryArray),", "h return outputArray, outputString def hexStringToBinaryArray(hexString): outputBinary = np.array([], dtype", "dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'F': nibble = np.array([1,1,1,1],", "accumulatedBlock = np.vstack((accumulatedBlock, newBlock)) newMatrix = np.hstack((np.eye(dim0, dtype = GENERAL_CODE_MATRIX_DATA_TYPE),", "outputString = '' for j in range(d1//4): nibble = inputArray[4", "None): print(\"*** in saveCodeInstance ...\") m, n = parityMatrix.shape M", "= parityMatrix.shape M = m // circulantSize N = n", "'2': nibble = np.array([0,0,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i ==", "dict() #pos.update( (n, (1, i)) for i, n in enumerate(X)", "dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '6': nibble = np.array([0,1,1,0],", "== '2': nibble = np.array([0,0,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i", "of this file to parse it. ## The emphasis here", "'3': nibble = np.array([0,0,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i ==", "newCirculant.T return newCirculant def readMatrixFromFile(fileName, dim0, dim1, circulantSize, isRow =", "networkx as nx #from networkx.algorithms import bipartite #B = nx.Graph()", "the file contains the non zero locations of the first", "scipy.linalg import circulant #import matplotlib.pyplot as plt import scipy.io import", "-*- \"\"\" Created on Thu Nov 28 12:10:11 2019 @author:", "for k in range(N): nextLine = np.hstack((leftPadding, binaryMatrix[ r *", "+ '_' + str(hashlib.sha224(str(fileName).encode('utf-8')).hexdigest()) fileNameWithPath = path + fileNameSHA224 else:", "#print('Error, 0-9 or A-F') pass nibble = np.array([], dtype =", "8176, 511, True, False, False) return 'OK' def plotResults(path, makeMat", "is top to bottom left to right, i.e.: # line", "pdf were copied manually to a txt file, and it", "i == 'E': nibble = np.array([1,1,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif", "np.array(4 - (circulantSize % 4)) m,n = binaryMatrix.shape #print(m) #print(n)", "dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '8': nibble = np.array([1,0,0,0],", "= np.zeros(circulantSize, dtype = GENERAL_CODE_MATRIX_DATA_TYPE) generatingVector[locationList] = 1 newCirculant =", "% circulantSize == 0) assert (n % circulantSize == 0)", "n == 10: h = 'A' elif n== 11: h", "0,0 with open(fileName) as fid: lines = fid.readlines() if isGenerator:", "path + fileNameSHA224 else: fileNameWithPath = path + fileName print(\"***", "nibble = np.array([0,0,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '1':", "snrActual = mat['averageSnrAxis'] if len(snrAxis) < 3: evaluationFaildAt[len(snrAxis)] = evaluationFaildAt[len(snrAxis)]", "0, numberOfNonZero = 0, fileName = None): print(\"*** in saveCodeInstance", "def hotLocationsToCirculant(locationList, circulantSize): generatingVector = np.zeros(circulantSize, dtype = GENERAL_CODE_MATRIX_DATA_TYPE) generatingVector[locationList]", "mat['berData'] if ('evaluationTime' in mat.keys()): evalTimes.append(mat['evaluationTime']) averageNumberOfIterations = mat['averageNumberOfIterations'] numberOfIterationsAtHigh.append(averageNumberOfIterations[-1])", "range(d1//4): nibble = inputArray[4 * j : 4 * j", "'7': nibble = np.array([0,1,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i ==", "= GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'B': nibble = np.array([1,0,1,1], dtype", "// circulantSize N = n // circulantSize hexName = ''", "= scatterBER workspaceDict['itrData'] = scatterITR workspaceDict['averageSnrAxis'] = averageSnrAxis workspaceDict['averageNumberOfIterations'] =", "workspaceDict['averageSnrAxis'] = averageSnrAxis workspaceDict['averageNumberOfIterations'] = averageNumberOfIterations workspaceDict['evaluationTime'] = evaluationTime workspaceDict['nonZero']", "to bottom left to right, i.e.: # line 0 defines", "= evaluationTime workspaceDict['nonZero'] = numberOfNonZero scipy.io.savemat((fileNameWithPath + '.mat'), workspaceDict) #evaluationData.plotStats(codewordSize,", "str(circulantSize) + '_' + str(M) + '_' + str(N) +", "i == '6': nibble = np.array([0,1,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif", "locations of the first row of a circulant. # Each", "GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '5': nibble = np.array([0,1,0,1], dtype =", "as fid: lines = fid.readlines() if isGenerator: for i in", "= GENERAL_CODE_MATRIX_DATA_TYPE), accumulatedBlock)) else: for i in range((dim1 // circulantSize)):", "generatingVector = np.zeros(circulantSize, dtype = GENERAL_CODE_MATRIX_DATA_TYPE) generatingVector[locationList] = 1 newCirculant", "averageNumberOfIterations = mat['averageNumberOfIterations'] numberOfIterationsAtHigh.append(averageNumberOfIterations[-1]) common.plotSNRvsBER(snrActual, berAxis, fileName = None, inputLabel", "order in which they are defined is top to bottom", "% 4 == 0) outputArray = np.zeros(d1//4, dtype = str)", "this doesn't break anything. import sys sys.path.insert(1, projectDir) FILE_HANDLER_INT_DATA_TYPE =", "= i + 1 mat = scipy.io.loadmat(str(os.path.join(root, file))) snrAxis =", "workspaceDict['itrData'] = scatterITR workspaceDict['averageSnrAxis'] = averageSnrAxis workspaceDict['averageNumberOfIterations'] = averageNumberOfIterations workspaceDict['evaluationTime']", "averageNumberOfIterations workspaceDict['evaluationTime'] = evaluationTime workspaceDict['nonZero'] = numberOfNonZero scipy.io.savemat((fileNameWithPath + '.mat'),", "outputString def hexStringToBinaryArray(hexString): outputBinary = np.array([], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) for", "= outputString + h return outputArray, outputString def hexStringToBinaryArray(hexString): outputBinary", "matrices will be saved in either json or some matrix", "import common import hashlib import os projectDir = os.environ.get('LDPC') if", "'5': nibble = np.array([0,1,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i ==", "n== 11: h = 'B' elif n== 12: h =", "+ 1) * circulantSize])) hexArray, hexString = binaryArraytoHex(nextLine) hexName =", "str(N) + '_' + str(hashlib.sha224(str(fileName).encode('utf-8')).hexdigest()) fileNameWithPath = path + fileNameSHA224", "newCirculant def readMatrixFromFile(fileName, dim0, dim1, circulantSize, isRow = True, isHex", "= None, path = None, evaluationTime = 0, numberOfNonZero =", "k * circulantSize : (k + 1) * circulantSize])) hexArray,", "= None, inputLabel = '', figureNumber = i, figureName =", "== '3': nibble = np.array([0,0,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i", "= list(lines[ i].rstrip('\\n').split(',')) locationList1 = list(map(int, locationList1)) upBlock = hotLocationsToCirculant(locationList1,", "else: accumulatedBlock = np.vstack((accumulatedBlock, newBlock)) newMatrix = np.hstack((np.eye(dim0, dtype =", "i + 1 mat = scipy.io.loadmat(str(os.path.join(root, file))) snrAxis = mat['snrAxis']", "isRow = True, isHex = True, isGenerator = True ):", "os projectDir = os.environ.get('LDPC') if projectDir == None: import pathlib", "GENERAL_CODE_MATRIX_DATA_TYPE)) else: binaryArray = binaryArray[1:] circulantMatrix = circulant(binaryArray) circulantMatrix =", "circulantMatrix = circulantMatrix.T return circulantMatrix def hotLocationsToCirculant(locationList, circulantSize): generatingVector =", "if nearEarthParity[j,i] != 0: # B.add_edges_from([(j, 7156 + i)]) #X,", "i == '4': nibble = np.array([0,1,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif", "circulantSize])) hexArray, hexString = binaryArraytoHex(nextLine) hexName = hexName + hexString", "= str(circulantSize) + '_' + str(M) + '_' + str(N)", "Each line in the file then defines a circulant, and", "m // circulantSize N = n // circulantSize hexName =", "elif n== 14: h = 'E' elif n== 15: h", "hexStringToBinaryArray(hexString): outputBinary = np.array([], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) for i in", "np.vstack((accumulatedUpBlock1, accumulatedBlock2)) return newMatrix def binaryMatrixToHexString(binaryMatrix, circulantSize): leftPadding = np.array(4", "'' for j in range(d1//4): nibble = inputArray[4 * j", "not see a reason to generalise this file, since matrices", "be saved in either json or some matrix friendly format.", ": (k + 1) * circulantSize])) hexArray, hexString = binaryArraytoHex(nextLine)", "snrAxis, averageSnrAxis, berData, averageNumberOfIterations = evaluationData.getStatsV2() workspaceDict['snrData'] = scatterSNR workspaceDict['berData']", "i == 'C': nibble = np.array([1,1,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif", "os.walk(path): for file in files: if str(file).endswith('.mat'): i = i", "pathlib.Path(__file__).parent.absolute() ## <NAME>: added on 01/12/2020, need to make sure", "to parse it. ## The emphasis here is on correctness,", "= circulant(generatingVector) newCirculant = newCirculant.T return newCirculant def readMatrixFromFile(fileName, dim0,", "<NAME>: added on 01/12/2020, need to make sure this doesn't", "+ 1], circulantSize) newBlock = np.hstack((bLeft, bRight)) if i ==", "= binaryArraytoHex(nextLine) hexName = hexName + hexString return hexName def", "evaluationFaildAt[len(snrAxis)] = evaluationFaildAt[len(snrAxis)] + 1 berAxis = mat['berData'] if ('evaluationTime'", "GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '4': nibble = np.array([0,1,0,0], dtype =", "GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'B': nibble = np.array([1,0,1,1], dtype =", "== None: import pathlib projectDir = pathlib.Path(__file__).parent.absolute() ## <NAME>: added", "i in range((dim0 // circulantSize) ): bLeft = hexToCirculant(lines[2 *", "== 0) assert (n % circulantSize == 0) M =", "hexName = hexName + hexString return hexName def saveCodeInstance(parityMatrix, circulantSize,", "range((dim1 // circulantSize)): locationList1 = list(lines[ i].rstrip('\\n').split(',')) locationList1 = list(map(int,", "locationList1 = list(lines[ i].rstrip('\\n').split(',')) locationList1 = list(map(int, locationList1)) upBlock =", "nibble = inputArray[4 * j : 4 * j +", "= GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '1': nibble = np.array([0,0,0,1], dtype", "np.array([1,1,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'F': nibble =", "zero locations of the first row of a circulant. #", "Add edges only between nodes of opposite node sets #for", "12: h = 'C' elif n== 13: h = 'D'", "GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'D': nibble = np.array([1,1,0,1], dtype =", "10 evaluationFaildAt = np.zeros(4, dtype = FILE_HANDLER_INT_DATA_TYPE) evalTimes = []", "1 mat = scipy.io.loadmat(str(os.path.join(root, file))) snrAxis = mat['snrAxis'] snrActual =", "import sys sys.path.insert(1, projectDir) FILE_HANDLER_INT_DATA_TYPE = np.int32 GENERAL_CODE_MATRIX_DATA_TYPE = np.int32", "was initially intended purely to generate the matrices for the", "dtype = FILE_HANDLER_INT_DATA_TYPE) evalTimes = [] numberOfIterationsAtHigh = [] for", "else: accumulatedUpBlock1 = np.hstack((accumulatedUpBlock1, upBlock)) for i in range((dim1 //", "4, 2, 1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) def nibbleToHex(inputArray): n =", "outputString + h return outputArray, outputString def hexStringToBinaryArray(hexString): outputBinary =", "i], circulantSize) bRight = hexToCirculant(lines[2 * i + 1], circulantSize)", "open(fileName) as fid: lines = fid.readlines() if isGenerator: for i", "+ i].rstrip('\\n').split(',')) locationList = list(map(int, locationList)) newBlock = hotLocationsToCirculant(locationList, circulantSize)", "files: if str(file).endswith('.mat'): i = i + 1 mat =", "hashlib import os projectDir = os.environ.get('LDPC') if projectDir == None:", "workspaceDict['nonZero'] = numberOfNonZero scipy.io.savemat((fileNameWithPath + '.mat'), workspaceDict) #evaluationData.plotStats(codewordSize, fileNameWithPath) print(\"***", "#evaluationData.plotStats(codewordSize, fileNameWithPath) print(\"*** Finishing saveCodeInstance !\") return fileName def testFileHandler():", "np from scipy.linalg import circulant #import matplotlib.pyplot as plt import", "= GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '5': nibble = np.array([0,1,0,1], dtype", "): # This function assumes that each line in the", "range(M): for k in range(N): nextLine = np.hstack((leftPadding, binaryMatrix[ r", "if projectDir == None: import pathlib projectDir = pathlib.Path(__file__).parent.absolute() ##", "GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'A': nibble = np.array([1,0,1,0], dtype =", "= np.zeros(4, dtype = FILE_HANDLER_INT_DATA_TYPE) evalTimes = [] numberOfIterationsAtHigh =", "True ): # This function assumes that each line in", "511, True, False, False) #import networkx as nx #from networkx.algorithms", "print(\"*** Finishing saveCodeInstance !\") return fileName def testFileHandler(): nearEarthGenerator =", "dtype = GENERAL_CODE_MATRIX_DATA_TYPE) for i in hexString: if i ==", "common.plotSNRvsBER(snrActual, berAxis, fileName = None, inputLabel = '', figureNumber =", "to a txt file, and it is the purpose of", "n== 13: h = 'D' elif n== 14: h =", "def binaryMatrixToHexString(binaryMatrix, circulantSize): leftPadding = np.array(4 - (circulantSize % 4))", "j + 4] h = nibbleToHex(nibble) outputArray[j] = h outputString", "anything. import sys sys.path.insert(1, projectDir) FILE_HANDLER_INT_DATA_TYPE = np.int32 GENERAL_CODE_MATRIX_DATA_TYPE =", "elif i == 'A': nibble = np.array([1,0,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)", "elif i == 'E': nibble = np.array([1,1,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)", "np.hstack((np.eye(dim0, dtype = GENERAL_CODE_MATRIX_DATA_TYPE), accumulatedBlock)) else: for i in range((dim1", "// circulantSize) + i].rstrip('\\n').split(',')) locationList = list(map(int, locationList)) newBlock =", "= np.zeros(d1//4, dtype = str) outputString = '' for j", "False, False) #import networkx as nx #from networkx.algorithms import bipartite", "= GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '3': nibble = np.array([0,0,1,1], dtype", "np.array([0,0,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '4': nibble =", "to right, i.e.: # line 0 defines circulant 0,0 with", "This file was initially intended purely to generate the matrices", "+ '.mat'), workspaceDict) #evaluationData.plotStats(codewordSize, fileNameWithPath) print(\"*** Finishing saveCodeInstance !\") return", "break anything. import sys sys.path.insert(1, projectDir) FILE_HANDLER_INT_DATA_TYPE = np.int32 GENERAL_CODE_MATRIX_DATA_TYPE", "len(binaryArray) < circulantSize: binaryArray = np.hstack(np.zeros(circulantSize-len(binaryArray), dtype = GENERAL_CODE_MATRIX_DATA_TYPE)) else:", "function assumes that each line in the file contains the", "added on 01/12/2020, need to make sure this doesn't break", "np.hstack((accumulatedUpBlock1, upBlock)) for i in range((dim1 // circulantSize)): locationList =", "#nearEarthParity = readMatrixFromFile('/home/oss22/swift/swift/codeMatrices/nearEarthParity.txt', 1022, 8176, 511, True, False, False) #import", "circulantSize) if i == 0: accumulatedBlock2 = newBlock else: accumulatedBlock2", "#import networkx as nx #from networkx.algorithms import bipartite #B =", "outputString = outputString + h return outputArray, outputString def hexStringToBinaryArray(hexString):", "## <NAME>: added on 01/12/2020, need to make sure this", "return outputBinary def hexToCirculant(hexStr, circulantSize): binaryArray = hexStringToBinaryArray(hexStr) if len(binaryArray)", "the matrices for the near earth code found in: https://public.ccsds.org/Pubs/131x1o2e2s.pdf", "= GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'A': nibble = np.array([1,0,1,0], dtype", "[] for root, dirs, files in os.walk(path): for file in", "= binaryArray[1:] circulantMatrix = circulant(binaryArray) circulantMatrix = circulantMatrix.T return circulantMatrix", "pass return evalTimes, evaluationFaildAt, numberOfIterationsAtHigh #plt.imshow(nearEarthParity) #nearEarthParity = readMatrixFromFile('/home/oss22/swift/swift/codeMatrices/nearEarthParity.txt', 1022,", "i].rstrip('\\n').split(',')) locationList1 = list(map(int, locationList1)) upBlock = hotLocationsToCirculant(locationList1, circulantSize) if", "in range(N): nextLine = np.hstack((leftPadding, binaryMatrix[ r * circulantSize ,", "GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '8': nibble = np.array([1,0,0,0], dtype =", "if i == 0: accumulatedBlock = newBlock else: accumulatedBlock =", "dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '5': nibble = np.array([0,1,0,1],", "= scipy.io.loadmat(str(os.path.join(root, file))) snrAxis = mat['snrAxis'] snrActual = mat['averageSnrAxis'] if", "newMatrix = np.vstack((accumulatedUpBlock1, accumulatedBlock2)) return newMatrix def binaryMatrixToHexString(binaryMatrix, circulantSize): leftPadding", "or A-F') pass nibble = np.array([], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) outputBinary", "= np.hstack((bLeft, bRight)) if i == 0: accumulatedBlock = newBlock", "#B.add_nodes_from(range(1022, 7156 + 1022), bipartite=1) # Add edges only between", "= path + fileNameSHA224 else: fileNameWithPath = path + fileName", "= [] numberOfIterationsAtHigh = [] for root, dirs, files in", "lines = fid.readlines() if isGenerator: for i in range((dim0 //", "return h def binaryArraytoHex(inputArray): d1 = len(inputArray) assert (d1 %", "elif n== 13: h = 'D' elif n== 14: h", "nibble = np.array([1,0,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'B':", "nibble = np.array([1,0,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'C':", "False) #import networkx as nx #from networkx.algorithms import bipartite #B", "Y = bipartite.sets(B) #pos = dict() #pos.update( (n, (1, i))", "earth code found in: https://public.ccsds.org/Pubs/131x1o2e2s.pdf ## The values from the", "'C' elif n== 13: h = 'D' elif n== 14:", "= str(n) return h def binaryArraytoHex(inputArray): d1 = len(inputArray) assert", "= circulant(binaryArray) circulantMatrix = circulantMatrix.T return circulantMatrix def hotLocationsToCirculant(locationList, circulantSize):", "# for j in range(1022): # if nearEarthParity[j,i] != 0:", "B.add_edges_from([(j, 7156 + i)]) #X, Y = bipartite.sets(B) #pos =", "n== 12: h = 'C' elif n== 13: h =", "if i == 0: accumulatedUpBlock1 = upBlock else: accumulatedUpBlock1 =", "## The emphasis here is on correctness, I currently do", "will be saved in either json or some matrix friendly", "GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '1': nibble = np.array([0,0,0,1], dtype =", "i].rstrip('\\n').split(',')) locationList = list(map(int, locationList)) newBlock = hotLocationsToCirculant(locationList, circulantSize) if", "(n % circulantSize == 0) M = m // circulantSize", "dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'D': nibble = np.array([1,1,0,1],", "'' for r in range(M): for k in range(N): nextLine", "n== 15: h = 'F' else: h = str(n) return", "the near earth code found in: https://public.ccsds.org/Pubs/131x1o2e2s.pdf ## The values", "if n == 10: h = 'A' elif n== 11:", "8176, 511, True, True, True) nearEarthParity = readMatrixFromFile(projectDir + '/codeMatrices/nearEarthParity.txt',", "[] numberOfIterationsAtHigh = [] for root, dirs, files in os.walk(path):", "saveCodeInstance(parityMatrix, circulantSize, codewordSize, evaluationData = None, path = None, evaluationTime", "plotResults(path, makeMat = False): i = 10 evaluationFaildAt = np.zeros(4,", "initially intended purely to generate the matrices for the near", "elif i == '5': nibble = np.array([0,1,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)", "= np.array([0,1,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '6': nibble", "= newBlock else: accumulatedBlock2 = np.hstack((accumulatedBlock2, newBlock)) newMatrix = np.vstack((accumulatedUpBlock1,", "newBlock else: accumulatedBlock2 = np.hstack((accumulatedBlock2, newBlock)) newMatrix = np.vstack((accumulatedUpBlock1, accumulatedBlock2))", "on Thu Nov 28 12:10:11 2019 @author: Omer \"\"\" ##", "= np.array([0,0,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '4': nibble", "circulantSize) if i == 0: accumulatedUpBlock1 = upBlock else: accumulatedUpBlock1", "= np.array([1,1,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'E': nibble", "11: h = 'B' elif n== 12: h = 'C'", "list(lines[(dim1 // circulantSize) + i].rstrip('\\n').split(',')) locationList = list(map(int, locationList)) newBlock", "GENERAL_CODE_MATRIX_DATA_TYPE) else: #print('Error, 0-9 or A-F') pass nibble = np.array([],", "circulantSize): binaryArray = hexStringToBinaryArray(hexStr) if len(binaryArray) < circulantSize: binaryArray =", "0) assert (n % circulantSize == 0) M = m", "i == '8': nibble = np.array([1,0,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif", "friendly format. import numpy as np from scipy.linalg import circulant", "= list(map(int, locationList1)) upBlock = hotLocationsToCirculant(locationList1, circulantSize) if i ==", "= path + fileName print(\"*** \" + fileName) workspaceDict =", "berAxis = mat['berData'] if ('evaluationTime' in mat.keys()): evalTimes.append(mat['evaluationTime']) averageNumberOfIterations =", "#pos = dict() #pos.update( (n, (1, i)) for i, n", "0: # B.add_edges_from([(j, 7156 + i)]) #X, Y = bipartite.sets(B)", "file))) snrAxis = mat['snrAxis'] snrActual = mat['averageSnrAxis'] if len(snrAxis) <", "import pathlib projectDir = pathlib.Path(__file__).parent.absolute() ## <NAME>: added on 01/12/2020,", "workspaceDict['fileName'] = fileName if evaluationData != None: scatterSNR, scatterBER, scatterITR,", "# Add edges only between nodes of opposite node sets", "def hexStringToBinaryArray(hexString): outputBinary = np.array([], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) for i", "i = 10 evaluationFaildAt = np.zeros(4, dtype = FILE_HANDLER_INT_DATA_TYPE) evalTimes", "'D': nibble = np.array([1,1,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i ==", "7154, 8176, 511, True, True, True) nearEarthParity = readMatrixFromFile(projectDir +", "circulantSize == 0) M = m // circulantSize N =", "circulantSize hexName = '' for r in range(M): for k", "elif n== 11: h = 'B' elif n== 12: h", "!= None: scatterSNR, scatterBER, scatterITR, snrAxis, averageSnrAxis, berData, averageNumberOfIterations =", "'E': nibble = np.array([1,1,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i ==", "True, False, False) #import networkx as nx #from networkx.algorithms import", "circulantMatrix.T return circulantMatrix def hotLocationsToCirculant(locationList, circulantSize): generatingVector = np.zeros(circulantSize, dtype", "the non zero locations of the first row of a", "dtype = str) outputString = '' for j in range(d1//4):", "dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'A': nibble = np.array([1,0,1,0],", "nibble = np.array([1,1,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'D':", "if isGenerator: for i in range((dim0 // circulantSize) ): bLeft", "< circulantSize: binaryArray = np.hstack(np.zeros(circulantSize-len(binaryArray), dtype = GENERAL_CODE_MATRIX_DATA_TYPE)) else: binaryArray", "elif i == '3': nibble = np.array([0,0,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)", "== 'C': nibble = np.array([1,1,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i", "return outputArray, outputString def hexStringToBinaryArray(hexString): outputBinary = np.array([], dtype =", "// circulantSize)): locationList1 = list(lines[ i].rstrip('\\n').split(',')) locationList1 = list(map(int, locationList1))", "readMatrixFromFile('/home/oss22/swift/swift/codeMatrices/nearEarthParity.txt', 1022, 8176, 511, True, False, False) #import networkx as", "'.mat'), workspaceDict) #evaluationData.plotStats(codewordSize, fileNameWithPath) print(\"*** Finishing saveCodeInstance !\") return fileName", "dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '3': nibble = np.array([0,0,1,1],", "circulantMatrix = circulant(binaryArray) circulantMatrix = circulantMatrix.T return circulantMatrix def hotLocationsToCirculant(locationList,", "(circulantSize % 4)) m,n = binaryMatrix.shape #print(m) #print(n) assert( m", "i in range((dim1 // circulantSize)): locationList = list(lines[(dim1 // circulantSize)", "np.array([1,0,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'B': nibble =", "= None, evaluationTime = 0, numberOfNonZero = 0, fileName =", "* circulantSize])) hexArray, hexString = binaryArraytoHex(nextLine) hexName = hexName +", "return fileName def testFileHandler(): nearEarthGenerator = readMatrixFromFile(projectDir + '/codeMatrices/nearEarthGenerator.txt', 7154,", "0) outputArray = np.zeros(d1//4, dtype = str) outputString = ''", "h = nibbleToHex(nibble) outputArray[j] = h outputString = outputString +", "== '6': nibble = np.array([0,1,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i", "file to parse it. ## The emphasis here is on", "i == 0: accumulatedUpBlock1 = upBlock else: accumulatedUpBlock1 = np.hstack((accumulatedUpBlock1,", "N = n // circulantSize hexName = '' for r", "file in files: if str(file).endswith('.mat'): i = i + 1", "node sets #for i in range(8176): # for j in", "nibble = np.array([1,1,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) else: #print('Error, 0-9 or", "range((dim1 // circulantSize)): locationList = list(lines[(dim1 // circulantSize) + i].rstrip('\\n').split(','))", "readMatrixFromFile(projectDir + '/codeMatrices/nearEarthGenerator.txt', 7154, 8176, 511, True, True, True) nearEarthParity", "i in range(8176): # for j in range(1022): # if", "True, True) nearEarthParity = readMatrixFromFile(projectDir + '/codeMatrices/nearEarthParity.txt', 1022, 8176, 511,", "and it is the purpose of this file to parse", "'F': nibble = np.array([1,1,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) else: #print('Error, 0-9", "The values from the above pdf were copied manually to", "i == '9': nibble = np.array([1,0,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif", "np.array([0,1,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '6': nibble =", "import hashlib import os projectDir = os.environ.get('LDPC') if projectDir ==", "= np.array([0,1,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '8': nibble", "from the above pdf were copied manually to a txt", "which they are defined is top to bottom left to", "upBlock else: accumulatedUpBlock1 = np.hstack((accumulatedUpBlock1, upBlock)) for i in range((dim1", "return evalTimes, evaluationFaildAt, numberOfIterationsAtHigh #plt.imshow(nearEarthParity) #nearEarthParity = readMatrixFromFile('/home/oss22/swift/swift/codeMatrices/nearEarthParity.txt', 1022, 8176,", "#pos.update( (n, (2, i)) for i, n in enumerate(Y) )", "np.array([0,0,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '2': nibble =", "12:10:11 2019 @author: Omer \"\"\" ## File handler ## This", "with open(fileName) as fid: lines = fid.readlines() if isGenerator: for", "networkx.algorithms import bipartite #B = nx.Graph() #B.add_nodes_from(range(1022), bipartite=0) #B.add_nodes_from(range(1022, 7156", "file then defines a circulant, and the order in which", "GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '2': nibble = np.array([0,0,1,0], dtype =", "projectDir = os.environ.get('LDPC') if projectDir == None: import pathlib projectDir", "enumerate(X) ) #pos.update( (n, (2, i)) for i, n in", "to generalise this file, since matrices will be saved in", "nearEarthGenerator = readMatrixFromFile(projectDir + '/codeMatrices/nearEarthGenerator.txt', 7154, 8176, 511, True, True,", "circulant(generatingVector) newCirculant = newCirculant.T return newCirculant def readMatrixFromFile(fileName, dim0, dim1,", "the file then defines a circulant, and the order in", "h = 'C' elif n== 13: h = 'D' elif", "h = 'E' elif n== 15: h = 'F' else:", "for j in range(1022): # if nearEarthParity[j,i] != 0: #", "np.zeros(d1//4, dtype = str) outputString = '' for j in", "\"\"\" Created on Thu Nov 28 12:10:11 2019 @author: Omer", "opposite node sets #for i in range(8176): # for j", "def hexToCirculant(hexStr, circulantSize): binaryArray = hexStringToBinaryArray(hexStr) if len(binaryArray) < circulantSize:", "for file in files: if str(file).endswith('.mat'): i = i +", "= np.array([1,1,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'D': nibble", "= averageNumberOfIterations workspaceDict['evaluationTime'] = evaluationTime workspaceDict['nonZero'] = numberOfNonZero scipy.io.savemat((fileNameWithPath +", "= upBlock else: accumulatedUpBlock1 = np.hstack((accumulatedUpBlock1, upBlock)) for i in", "= n // circulantSize if fileName == None: fileName =", "that each line in the file contains the non zero", "in range((dim1 // circulantSize)): locationList = list(lines[(dim1 // circulantSize) +", "FILE_HANDLER_INT_DATA_TYPE) evalTimes = [] numberOfIterationsAtHigh = [] for root, dirs,", "between nodes of opposite node sets #for i in range(8176):", "nibble = np.array([0,1,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '8':", "defined is top to bottom left to right, i.e.: #", "= NIBBLE_CONVERTER.dot(inputArray) if n == 10: h = 'A' elif", "#B = nx.Graph() #B.add_nodes_from(range(1022), bipartite=0) #B.add_nodes_from(range(1022, 7156 + 1022), bipartite=1)", "saved in either json or some matrix friendly format. import", "'B': nibble = np.array([1,0,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i ==", "circulantMatrix def hotLocationsToCirculant(locationList, circulantSize): generatingVector = np.zeros(circulantSize, dtype = GENERAL_CODE_MATRIX_DATA_TYPE)", "averageSnrAxis workspaceDict['averageNumberOfIterations'] = averageNumberOfIterations workspaceDict['evaluationTime'] = evaluationTime workspaceDict['nonZero'] = numberOfNonZero", "= mat['averageSnrAxis'] if len(snrAxis) < 3: evaluationFaildAt[len(snrAxis)] = evaluationFaildAt[len(snrAxis)] +", "= GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '9': nibble = np.array([1,0,0,1], dtype", "return circulantMatrix def hotLocationsToCirculant(locationList, circulantSize): generatingVector = np.zeros(circulantSize, dtype =", "i + 1], circulantSize) newBlock = np.hstack((bLeft, bRight)) if i", "if len(snrAxis) < 3: evaluationFaildAt[len(snrAxis)] = evaluationFaildAt[len(snrAxis)] + 1 berAxis", "= 0, fileName = None): print(\"*** in saveCodeInstance ...\") m,", "hexStringToBinaryArray(hexStr) if len(binaryArray) < circulantSize: binaryArray = np.hstack(np.zeros(circulantSize-len(binaryArray), dtype =", "circulantSize if fileName == None: fileName = binaryMatrixToHexString(parityMatrix, circulantSize) fileNameSHA224", "i = i + 1 mat = scipy.io.loadmat(str(os.path.join(root, file))) snrAxis", "if len(binaryArray) < circulantSize: binaryArray = np.hstack(np.zeros(circulantSize-len(binaryArray), dtype = GENERAL_CODE_MATRIX_DATA_TYPE))", "0, fileName = None): print(\"*** in saveCodeInstance ...\") m, n", "= len(inputArray) assert (d1 % 4 == 0) outputArray =", "top to bottom left to right, i.e.: # line 0", "in range((dim0 // circulantSize) ): bLeft = hexToCirculant(lines[2 * i],", "False) return 'OK' def plotResults(path, makeMat = False): i =", "= readMatrixFromFile(projectDir + '/codeMatrices/nearEarthParity.txt', 1022, 8176, 511, True, False, False)", "h = 'F' else: h = str(n) return h def", "= pathlib.Path(__file__).parent.absolute() ## <NAME>: added on 01/12/2020, need to make", "hexName = '' for r in range(M): for k in", "workspaceDict) #evaluationData.plotStats(codewordSize, fileNameWithPath) print(\"*** Finishing saveCodeInstance !\") return fileName def", "= np.array([1,0,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'C': nibble", "matrices for the near earth code found in: https://public.ccsds.org/Pubs/131x1o2e2s.pdf ##", "fileNameWithPath = path + fileName print(\"*** \" + fileName) workspaceDict", "1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) def nibbleToHex(inputArray): n = NIBBLE_CONVERTER.dot(inputArray) if", "list(map(int, locationList1)) upBlock = hotLocationsToCirculant(locationList1, circulantSize) if i == 0:", "evaluationData.getStatsV2() workspaceDict['snrData'] = scatterSNR workspaceDict['berData'] = scatterBER workspaceDict['itrData'] = scatterITR", "sets #for i in range(8176): # for j in range(1022):", "intended purely to generate the matrices for the near earth", "== 'B': nibble = np.array([1,0,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i", "circulant 0,0 with open(fileName) as fid: lines = fid.readlines() if", "= averageSnrAxis workspaceDict['averageNumberOfIterations'] = averageNumberOfIterations workspaceDict['evaluationTime'] = evaluationTime workspaceDict['nonZero'] =", "this file to parse it. ## The emphasis here is", "as plt import scipy.io import common import hashlib import os", "i == 'B': nibble = np.array([1,0,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif", "= fileName if evaluationData != None: scatterSNR, scatterBER, scatterITR, snrAxis,", "generalise this file, since matrices will be saved in either", "= mat['snrAxis'] snrActual = mat['averageSnrAxis'] if len(snrAxis) < 3: evaluationFaildAt[len(snrAxis)]", "# Each line in the file then defines a circulant,", "GENERAL_CODE_MATRIX_DATA_TYPE) def nibbleToHex(inputArray): n = NIBBLE_CONVERTER.dot(inputArray) if n == 10:", "True, isGenerator = True ): # This function assumes that", "* i], circulantSize) bRight = hexToCirculant(lines[2 * i + 1],", "= evaluationFaildAt[len(snrAxis)] + 1 berAxis = mat['berData'] if ('evaluationTime' in", "= np.array([1,1,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'F': nibble", "j : 4 * j + 4] h = nibbleToHex(nibble)", "= np.array([0,1,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '7': nibble", "= 'F' else: h = str(n) return h def binaryArraytoHex(inputArray):", "= GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'F': nibble = np.array([1,1,1,1], dtype", "None: scatterSNR, scatterBER, scatterITR, snrAxis, averageSnrAxis, berData, averageNumberOfIterations = evaluationData.getStatsV2()", "+ str(M) + '_' + str(N) + '_' + str(hashlib.sha224(str(fileName).encode('utf-8')).hexdigest())", "a circulant. # Each line in the file then defines", "i.e.: # line 0 defines circulant 0,0 with open(fileName) as", "else: h = str(n) return h def binaryArraytoHex(inputArray): d1 =", "accumulatedBlock2 = newBlock else: accumulatedBlock2 = np.hstack((accumulatedBlock2, newBlock)) newMatrix =", "fileNameSHA224 else: fileNameWithPath = path + fileName print(\"*** \" +", "(n, (1, i)) for i, n in enumerate(X) ) #pos.update(", "'8': nibble = np.array([1,0,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i ==", "in saveCodeInstance ...\") m, n = parityMatrix.shape M = m", "for root, dirs, files in os.walk(path): for file in files:", "A-F') pass nibble = np.array([], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) outputBinary =", "elif i == 'F': nibble = np.array([1,1,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)", "== 0: accumulatedBlock2 = newBlock else: accumulatedBlock2 = np.hstack((accumulatedBlock2, newBlock))", "= np.array([1,0,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '9': nibble", "evaluationTime = 0, numberOfNonZero = 0, fileName = None): print(\"***", "= np.int32 GENERAL_CODE_MATRIX_DATA_TYPE = np.int32 NIBBLE_CONVERTER = np.array([8, 4, 2,", "locationList1)) upBlock = hotLocationsToCirculant(locationList1, circulantSize) if i == 0: accumulatedUpBlock1", "line 0 defines circulant 0,0 with open(fileName) as fid: lines", "'6': nibble = np.array([0,1,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i ==", "hexString: if i == '0': nibble = np.array([0,0,0,0], dtype =", "3: evaluationFaildAt[len(snrAxis)] = evaluationFaildAt[len(snrAxis)] + 1 berAxis = mat['berData'] if", "do not see a reason to generalise this file, since", "= GENERAL_CODE_MATRIX_DATA_TYPE) def nibbleToHex(inputArray): n = NIBBLE_CONVERTER.dot(inputArray) if n ==", "assert (d1 % 4 == 0) outputArray = np.zeros(d1//4, dtype", "'9': nibble = np.array([1,0,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i ==", "binaryArray[1:] circulantMatrix = circulant(binaryArray) circulantMatrix = circulantMatrix.T return circulantMatrix def", "4)) m,n = binaryMatrix.shape #print(m) #print(n) assert( m % circulantSize", "evaluationFaildAt[len(snrAxis)] + 1 berAxis = mat['berData'] if ('evaluationTime' in mat.keys()):", "= hotLocationsToCirculant(locationList1, circulantSize) if i == 0: accumulatedUpBlock1 = upBlock", "GENERAL_CODE_MATRIX_DATA_TYPE = np.int32 NIBBLE_CONVERTER = np.array([8, 4, 2, 1], dtype", "outputBinary = np.array([], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) for i in hexString:", "'0': nibble = np.array([0,0,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i ==", "accumulatedBlock)) else: for i in range((dim1 // circulantSize)): locationList1 =", "workspaceDict['evaluationTime'] = evaluationTime workspaceDict['nonZero'] = numberOfNonZero scipy.io.savemat((fileNameWithPath + '.mat'), workspaceDict)", "Omer \"\"\" ## File handler ## This file was initially", "2, 1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) def nibbleToHex(inputArray): n = NIBBLE_CONVERTER.dot(inputArray)", "str(hashlib.sha224(str(fileName).encode('utf-8')).hexdigest()) fileNameWithPath = path + fileNameSHA224 else: fileNameWithPath = path", "dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '4': nibble = np.array([0,1,0,0],", "+ '_' + str(M) + '_' + str(N) + '_'", "r in range(M): for k in range(N): nextLine = np.hstack((leftPadding,", "fileNameWithPath) print(\"*** Finishing saveCodeInstance !\") return fileName def testFileHandler(): nearEarthGenerator", "workspaceDict['snrData'] = scatterSNR workspaceDict['berData'] = scatterBER workspaceDict['itrData'] = scatterITR workspaceDict['averageSnrAxis']", "isHex = True, isGenerator = True ): # This function", "a circulant, and the order in which they are defined", "nibble = np.array([0,0,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '3':", "== '5': nibble = np.array([0,1,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i", "GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '6': nibble = np.array([0,1,1,0], dtype =", "file contains the non zero locations of the first row", "= binaryMatrixToHexString(parityMatrix, circulantSize) fileNameSHA224 = str(circulantSize) + '_' + str(M)", "= np.hstack(np.zeros(circulantSize-len(binaryArray), dtype = GENERAL_CODE_MATRIX_DATA_TYPE)) else: binaryArray = binaryArray[1:] circulantMatrix", "GENERAL_CODE_MATRIX_DATA_TYPE) for i in hexString: if i == '0': nibble", "elif i == '2': nibble = np.array([0,0,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)", "True) nearEarthParity = readMatrixFromFile(projectDir + '/codeMatrices/nearEarthParity.txt', 1022, 8176, 511, True,", "= str(file)) else: pass return evalTimes, evaluationFaildAt, numberOfIterationsAtHigh #plt.imshow(nearEarthParity) #nearEarthParity", "np.array([0,1,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '5': nibble =", "np.array([8, 4, 2, 1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) def nibbleToHex(inputArray): n", "GENERAL_CODE_MATRIX_DATA_TYPE) outputBinary = np.hstack((outputBinary, nibble)) return outputBinary def hexToCirculant(hexStr, circulantSize):", "if fileName == None: fileName = binaryMatrixToHexString(parityMatrix, circulantSize) fileNameSHA224 =", "This function assumes that each line in the file contains", "#plt.imshow(nearEarthParity) #nearEarthParity = readMatrixFromFile('/home/oss22/swift/swift/codeMatrices/nearEarthParity.txt', 1022, 8176, 511, True, False, False)", "circulantSize == 0) assert (n % circulantSize == 0) M", "= newBlock else: accumulatedBlock = np.vstack((accumulatedBlock, newBlock)) newMatrix = np.hstack((np.eye(dim0,", "evalTimes, evaluationFaildAt, numberOfIterationsAtHigh #plt.imshow(nearEarthParity) #nearEarthParity = readMatrixFromFile('/home/oss22/swift/swift/codeMatrices/nearEarthParity.txt', 1022, 8176, 511,", "hexName def saveCodeInstance(parityMatrix, circulantSize, codewordSize, evaluationData = None, path =", "// circulantSize if fileName == None: fileName = binaryMatrixToHexString(parityMatrix, circulantSize)", "# -*- coding: utf-8 -*- \"\"\" Created on Thu Nov", "nibble = np.array([0,0,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '4':", "dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '2': nibble = np.array([0,0,1,0],", "np.zeros(circulantSize, dtype = GENERAL_CODE_MATRIX_DATA_TYPE) generatingVector[locationList] = 1 newCirculant = circulant(generatingVector)", "+ 4] h = nibbleToHex(nibble) outputArray[j] = h outputString =", "('evaluationTime' in mat.keys()): evalTimes.append(mat['evaluationTime']) averageNumberOfIterations = mat['averageNumberOfIterations'] numberOfIterationsAtHigh.append(averageNumberOfIterations[-1]) common.plotSNRvsBER(snrActual, berAxis,", "#pos.update( (n, (1, i)) for i, n in enumerate(X) )", "2019 @author: Omer \"\"\" ## File handler ## This file", "File handler ## This file was initially intended purely to", "np.vstack((accumulatedBlock, newBlock)) newMatrix = np.hstack((np.eye(dim0, dtype = GENERAL_CODE_MATRIX_DATA_TYPE), accumulatedBlock)) else:", "+ str(N) + '_' + str(hashlib.sha224(str(fileName).encode('utf-8')).hexdigest()) fileNameWithPath = path +", "= evaluationData.getStatsV2() workspaceDict['snrData'] = scatterSNR workspaceDict['berData'] = scatterBER workspaceDict['itrData'] =", "= 'A' elif n== 11: h = 'B' elif n==", "def binaryArraytoHex(inputArray): d1 = len(inputArray) assert (d1 % 4 ==", "+ 1 mat = scipy.io.loadmat(str(os.path.join(root, file))) snrAxis = mat['snrAxis'] snrActual", "in range((dim1 // circulantSize)): locationList1 = list(lines[ i].rstrip('\\n').split(',')) locationList1 =", "in enumerate(X) ) #pos.update( (n, (2, i)) for i, n", "dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '7': nibble = np.array([0,1,1,1],", "i == '1': nibble = np.array([0,0,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif", "i == 'F': nibble = np.array([1,1,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) else:", "if i == '0': nibble = np.array([0,0,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)", "nibble = np.array([1,0,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '9':", "= 1 newCirculant = circulant(generatingVector) newCirculant = newCirculant.T return newCirculant", "* circulantSize : (k + 1) * circulantSize])) hexArray, hexString", ": 4 * j + 4] h = nibbleToHex(nibble) outputArray[j]", "= h outputString = outputString + h return outputArray, outputString", "code found in: https://public.ccsds.org/Pubs/131x1o2e2s.pdf ## The values from the above", "generatingVector[locationList] = 1 newCirculant = circulant(generatingVector) newCirculant = newCirculant.T return", "saveCodeInstance ...\") m, n = parityMatrix.shape M = m //", "bipartite #B = nx.Graph() #B.add_nodes_from(range(1022), bipartite=0) #B.add_nodes_from(range(1022, 7156 + 1022),", "the above pdf were copied manually to a txt file,", "circulantSize , k * circulantSize : (k + 1) *", "'E' elif n== 15: h = 'F' else: h =", "= GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '4': nibble = np.array([0,1,0,0], dtype", "= np.array([1,0,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'A': nibble", "numberOfIterationsAtHigh #plt.imshow(nearEarthParity) #nearEarthParity = readMatrixFromFile('/home/oss22/swift/swift/codeMatrices/nearEarthParity.txt', 1022, 8176, 511, True, False,", "// circulantSize hexName = '' for r in range(M): for", "since matrices will be saved in either json or some", "1], circulantSize) newBlock = np.hstack((bLeft, bRight)) if i == 0:", "i)) for i, n in enumerate(Y) ) #nx.draw(B, pos=pos) #plt.show()", "== 0: accumulatedUpBlock1 = upBlock else: accumulatedUpBlock1 = np.hstack((accumulatedUpBlock1, upBlock))", "scatterBER workspaceDict['itrData'] = scatterITR workspaceDict['averageSnrAxis'] = averageSnrAxis workspaceDict['averageNumberOfIterations'] = averageNumberOfIterations", "fid: lines = fid.readlines() if isGenerator: for i in range((dim0", "i == 0: accumulatedBlock = newBlock else: accumulatedBlock = np.vstack((accumulatedBlock,", "from scipy.linalg import circulant #import matplotlib.pyplot as plt import scipy.io", "locationList = list(lines[(dim1 // circulantSize) + i].rstrip('\\n').split(',')) locationList = list(map(int,", "nibbleToHex(inputArray): n = NIBBLE_CONVERTER.dot(inputArray) if n == 10: h =", "= np.vstack((accumulatedUpBlock1, accumulatedBlock2)) return newMatrix def binaryMatrixToHexString(binaryMatrix, circulantSize): leftPadding =", "= numberOfNonZero scipy.io.savemat((fileNameWithPath + '.mat'), workspaceDict) #evaluationData.plotStats(codewordSize, fileNameWithPath) print(\"*** Finishing", "doesn't break anything. import sys sys.path.insert(1, projectDir) FILE_HANDLER_INT_DATA_TYPE = np.int32", "= GENERAL_CODE_MATRIX_DATA_TYPE) outputBinary = np.hstack((outputBinary, nibble)) return outputBinary def hexToCirculant(hexStr,", "outputBinary def hexToCirculant(hexStr, circulantSize): binaryArray = hexStringToBinaryArray(hexStr) if len(binaryArray) <", "hotLocationsToCirculant(locationList1, circulantSize) if i == 0: accumulatedUpBlock1 = upBlock else:", "return newCirculant def readMatrixFromFile(fileName, dim0, dim1, circulantSize, isRow = True,", "or some matrix friendly format. import numpy as np from", "13: h = 'D' elif n== 14: h = 'E'", "upBlock = hotLocationsToCirculant(locationList1, circulantSize) if i == 0: accumulatedUpBlock1 =", "== 0) M = m // circulantSize N = n", "GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '7': nibble = np.array([0,1,1,1], dtype =", "= hotLocationsToCirculant(locationList, circulantSize) if i == 0: accumulatedBlock2 = newBlock", "numberOfNonZero = 0, fileName = None): print(\"*** in saveCodeInstance ...\")", "= i, figureName = str(file)) else: pass return evalTimes, evaluationFaildAt,", "os.environ.get('LDPC') if projectDir == None: import pathlib projectDir = pathlib.Path(__file__).parent.absolute()", "in range(M): for k in range(N): nextLine = np.hstack((leftPadding, binaryMatrix[", "None: fileName = binaryMatrixToHexString(parityMatrix, circulantSize) fileNameSHA224 = str(circulantSize) + '_'", "readMatrixFromFile(projectDir + '/codeMatrices/nearEarthParity.txt', 1022, 8176, 511, True, False, False) return", "% circulantSize == 0) M = m // circulantSize N", "evaluationTime workspaceDict['nonZero'] = numberOfNonZero scipy.io.savemat((fileNameWithPath + '.mat'), workspaceDict) #evaluationData.plotStats(codewordSize, fileNameWithPath)", "plt import scipy.io import common import hashlib import os projectDir", "only between nodes of opposite node sets #for i in", "nibble = np.array([0,1,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '5':", "8176, 511, True, False, False) #import networkx as nx #from", "then defines a circulant, and the order in which they", "fileName if evaluationData != None: scatterSNR, scatterBER, scatterITR, snrAxis, averageSnrAxis,", "found in: https://public.ccsds.org/Pubs/131x1o2e2s.pdf ## The values from the above pdf", "dtype = GENERAL_CODE_MATRIX_DATA_TYPE) outputBinary = np.hstack((outputBinary, nibble)) return outputBinary def", "hexToCirculant(lines[2 * i + 1], circulantSize) newBlock = np.hstack((bLeft, bRight))", "print(\"*** in saveCodeInstance ...\") m, n = parityMatrix.shape M =", "binaryArraytoHex(nextLine) hexName = hexName + hexString return hexName def saveCodeInstance(parityMatrix,", "in hexString: if i == '0': nibble = np.array([0,0,0,0], dtype", "'_' + str(M) + '_' + str(N) + '_' +", "= 10 evaluationFaildAt = np.zeros(4, dtype = FILE_HANDLER_INT_DATA_TYPE) evalTimes =", "elif i == '1': nibble = np.array([0,0,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)", "inputLabel = '', figureNumber = i, figureName = str(file)) else:", "== '7': nibble = np.array([0,1,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i", "= '', figureNumber = i, figureName = str(file)) else: pass", "binaryMatrixToHexString(binaryMatrix, circulantSize): leftPadding = np.array(4 - (circulantSize % 4)) m,n", "path = None, evaluationTime = 0, numberOfNonZero = 0, fileName", "the purpose of this file to parse it. ## The", "Nov 28 12:10:11 2019 @author: Omer \"\"\" ## File handler", "## File handler ## This file was initially intended purely", "accumulatedBlock2 = np.hstack((accumulatedBlock2, newBlock)) newMatrix = np.vstack((accumulatedUpBlock1, accumulatedBlock2)) return newMatrix", "codewordSize, evaluationData = None, path = None, evaluationTime = 0,", "'_' + str(hashlib.sha224(str(fileName).encode('utf-8')).hexdigest()) fileNameWithPath = path + fileNameSHA224 else: fileNameWithPath", "#for i in range(8176): # for j in range(1022): #", "outputArray[j] = h outputString = outputString + h return outputArray,", "np.int32 NIBBLE_CONVERTER = np.array([8, 4, 2, 1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)", "= np.hstack((leftPadding, binaryMatrix[ r * circulantSize , k * circulantSize", "evaluationData = None, path = None, evaluationTime = 0, numberOfNonZero", "4 * j + 4] h = nibbleToHex(nibble) outputArray[j] =", "+ h return outputArray, outputString def hexStringToBinaryArray(hexString): outputBinary = np.array([],", "matplotlib.pyplot as plt import scipy.io import common import hashlib import", "were copied manually to a txt file, and it is", "14: h = 'E' elif n== 15: h = 'F'", "matrix friendly format. import numpy as np from scipy.linalg import", "np.array([0,1,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '7': nibble =", "hotLocationsToCirculant(locationList, circulantSize): generatingVector = np.zeros(circulantSize, dtype = GENERAL_CODE_MATRIX_DATA_TYPE) generatingVector[locationList] =", "np.hstack((bLeft, bRight)) if i == 0: accumulatedBlock = newBlock else:", "if i == 0: accumulatedBlock2 = newBlock else: accumulatedBlock2 =", "else: fileNameWithPath = path + fileName print(\"*** \" + fileName)", "True, True, True) nearEarthParity = readMatrixFromFile(projectDir + '/codeMatrices/nearEarthParity.txt', 1022, 8176,", "= os.environ.get('LDPC') if projectDir == None: import pathlib projectDir =", "isGenerator = True ): # This function assumes that each", "## The values from the above pdf were copied manually", "Created on Thu Nov 28 12:10:11 2019 @author: Omer \"\"\"", "nibble = np.array([1,0,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'A':", "7156 + 1022), bipartite=1) # Add edges only between nodes", "= GENERAL_CODE_MATRIX_DATA_TYPE) generatingVector[locationList] = 1 newCirculant = circulant(generatingVector) newCirculant =", "circulantSize) newBlock = np.hstack((bLeft, bRight)) if i == 0: accumulatedBlock", "to make sure this doesn't break anything. import sys sys.path.insert(1,", "a txt file, and it is the purpose of this", "np.hstack((leftPadding, binaryMatrix[ r * circulantSize , k * circulantSize :", "mat['averageSnrAxis'] if len(snrAxis) < 3: evaluationFaildAt[len(snrAxis)] = evaluationFaildAt[len(snrAxis)] + 1", "0 defines circulant 0,0 with open(fileName) as fid: lines =", "elif i == '9': nibble = np.array([1,0,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)", "fid.readlines() if isGenerator: for i in range((dim0 // circulantSize) ):", "= parityMatrix workspaceDict['fileName'] = fileName if evaluationData != None: scatterSNR,", "dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '9': nibble = np.array([1,0,0,1],", "'OK' def plotResults(path, makeMat = False): i = 10 evaluationFaildAt", "accumulatedUpBlock1 = np.hstack((accumulatedUpBlock1, upBlock)) for i in range((dim1 // circulantSize)):", "elif i == 'B': nibble = np.array([1,0,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)", "purely to generate the matrices for the near earth code", "else: binaryArray = binaryArray[1:] circulantMatrix = circulant(binaryArray) circulantMatrix = circulantMatrix.T", "'/codeMatrices/nearEarthGenerator.txt', 7154, 8176, 511, True, True, True) nearEarthParity = readMatrixFromFile(projectDir", "https://public.ccsds.org/Pubs/131x1o2e2s.pdf ## The values from the above pdf were copied", "dtype = GENERAL_CODE_MATRIX_DATA_TYPE), accumulatedBlock)) else: for i in range((dim1 //", "defines a circulant, and the order in which they are", "i == 'D': nibble = np.array([1,1,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif", "circulant(binaryArray) circulantMatrix = circulantMatrix.T return circulantMatrix def hotLocationsToCirculant(locationList, circulantSize): generatingVector", "i == '3': nibble = np.array([0,0,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif", "0-9 or A-F') pass nibble = np.array([], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)", "are defined is top to bottom left to right, i.e.:", "line in the file then defines a circulant, and the", "fileName = None): print(\"*** in saveCodeInstance ...\") m, n =", "(1, i)) for i, n in enumerate(X) ) #pos.update( (n,", "dim0, dim1, circulantSize, isRow = True, isHex = True, isGenerator", "bRight)) if i == 0: accumulatedBlock = newBlock else: accumulatedBlock", "'1': nibble = np.array([0,0,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i ==", "# if nearEarthParity[j,i] != 0: # B.add_edges_from([(j, 7156 + i)])", "== 0: accumulatedBlock = newBlock else: accumulatedBlock = np.vstack((accumulatedBlock, newBlock))", "np.array([1,1,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'E': nibble =", "for r in range(M): for k in range(N): nextLine =", "need to make sure this doesn't break anything. import sys", "evaluationData != None: scatterSNR, scatterBER, scatterITR, snrAxis, averageSnrAxis, berData, averageNumberOfIterations", "nearEarthParity[j,i] != 0: # B.add_edges_from([(j, 7156 + i)]) #X, Y", "(d1 % 4 == 0) outputArray = np.zeros(d1//4, dtype =", "= np.array([1,0,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'B': nibble", "'D' elif n== 14: h = 'E' elif n== 15:", "h outputString = outputString + h return outputArray, outputString def", "circulantSize : (k + 1) * circulantSize])) hexArray, hexString =", "elif i == '4': nibble = np.array([0,1,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)", "nodes of opposite node sets #for i in range(8176): #", "file, and it is the purpose of this file to", "j in range(1022): # if nearEarthParity[j,i] != 0: # B.add_edges_from([(j,", "#B.add_nodes_from(range(1022), bipartite=0) #B.add_nodes_from(range(1022, 7156 + 1022), bipartite=1) # Add edges", "i == 'A': nibble = np.array([1,0,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif", "== '9': nibble = np.array([1,0,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i", "== None: fileName = binaryMatrixToHexString(parityMatrix, circulantSize) fileNameSHA224 = str(circulantSize) +", "'_' + str(N) + '_' + str(hashlib.sha224(str(fileName).encode('utf-8')).hexdigest()) fileNameWithPath = path", "parityMatrix workspaceDict['fileName'] = fileName if evaluationData != None: scatterSNR, scatterBER,", "= GENERAL_CODE_MATRIX_DATA_TYPE) for i in hexString: if i == '0':", "scatterITR workspaceDict['averageSnrAxis'] = averageSnrAxis workspaceDict['averageNumberOfIterations'] = averageNumberOfIterations workspaceDict['evaluationTime'] = evaluationTime", "make sure this doesn't break anything. import sys sys.path.insert(1, projectDir)", "binaryArraytoHex(inputArray): d1 = len(inputArray) assert (d1 % 4 == 0)", "(n, (2, i)) for i, n in enumerate(Y) ) #nx.draw(B,", "projectDir == None: import pathlib projectDir = pathlib.Path(__file__).parent.absolute() ## <NAME>:", "= np.int32 NIBBLE_CONVERTER = np.array([8, 4, 2, 1], dtype =", "i in range((dim1 // circulantSize)): locationList1 = list(lines[ i].rstrip('\\n').split(',')) locationList1", "= list(map(int, locationList)) newBlock = hotLocationsToCirculant(locationList, circulantSize) if i ==", "circulantSize) fileNameSHA224 = str(circulantSize) + '_' + str(M) + '_'", "fileName) workspaceDict = {} workspaceDict['parityMatrix'] = parityMatrix workspaceDict['fileName'] = fileName", "manually to a txt file, and it is the purpose", "= GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '8': nibble = np.array([1,0,0,0], dtype", "defines circulant 0,0 with open(fileName) as fid: lines = fid.readlines()", "sys.path.insert(1, projectDir) FILE_HANDLER_INT_DATA_TYPE = np.int32 GENERAL_CODE_MATRIX_DATA_TYPE = np.int32 NIBBLE_CONVERTER =", "= np.array([0,0,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '2': nibble", "binaryMatrix.shape #print(m) #print(n) assert( m % circulantSize == 0) assert", "= None): print(\"*** in saveCodeInstance ...\") m, n = parityMatrix.shape", "fileName = binaryMatrixToHexString(parityMatrix, circulantSize) fileNameSHA224 = str(circulantSize) + '_' +", "elif n== 12: h = 'C' elif n== 13: h", "hexToCirculant(lines[2 * i], circulantSize) bRight = hexToCirculant(lines[2 * i +", "upBlock)) for i in range((dim1 // circulantSize)): locationList = list(lines[(dim1", "import bipartite #B = nx.Graph() #B.add_nodes_from(range(1022), bipartite=0) #B.add_nodes_from(range(1022, 7156 +", "k in range(N): nextLine = np.hstack((leftPadding, binaryMatrix[ r * circulantSize", "= GENERAL_CODE_MATRIX_DATA_TYPE)) else: binaryArray = binaryArray[1:] circulantMatrix = circulant(binaryArray) circulantMatrix", "return newMatrix def binaryMatrixToHexString(binaryMatrix, circulantSize): leftPadding = np.array(4 - (circulantSize", "circulantSize): generatingVector = np.zeros(circulantSize, dtype = GENERAL_CODE_MATRIX_DATA_TYPE) generatingVector[locationList] = 1", "near earth code found in: https://public.ccsds.org/Pubs/131x1o2e2s.pdf ## The values from", "GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'C': nibble = np.array([1,1,0,0], dtype =", "readMatrixFromFile(fileName, dim0, dim1, circulantSize, isRow = True, isHex = True,", "0: accumulatedBlock = newBlock else: accumulatedBlock = np.vstack((accumulatedBlock, newBlock)) newMatrix", "outputArray = np.zeros(d1//4, dtype = str) outputString = '' for", "fileName == None: fileName = binaryMatrixToHexString(parityMatrix, circulantSize) fileNameSHA224 = str(circulantSize)", "copied manually to a txt file, and it is the", "import circulant #import matplotlib.pyplot as plt import scipy.io import common", "dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '1': nibble = np.array([0,0,0,1],", "#from networkx.algorithms import bipartite #B = nx.Graph() #B.add_nodes_from(range(1022), bipartite=0) #B.add_nodes_from(range(1022,", "list(map(int, locationList)) newBlock = hotLocationsToCirculant(locationList, circulantSize) if i == 0:", "+ '/codeMatrices/nearEarthGenerator.txt', 7154, 8176, 511, True, True, True) nearEarthParity =", "scatterBER, scatterITR, snrAxis, averageSnrAxis, berData, averageNumberOfIterations = evaluationData.getStatsV2() workspaceDict['snrData'] =", "nearEarthParity = readMatrixFromFile(projectDir + '/codeMatrices/nearEarthParity.txt', 1022, 8176, 511, True, False,", "None: import pathlib projectDir = pathlib.Path(__file__).parent.absolute() ## <NAME>: added on", "4] h = nibbleToHex(nibble) outputArray[j] = h outputString = outputString", "# This function assumes that each line in the file", "GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '9': nibble = np.array([1,0,0,1], dtype =", "right, i.e.: # line 0 defines circulant 0,0 with open(fileName)", "np.hstack((outputBinary, nibble)) return outputBinary def hexToCirculant(hexStr, circulantSize): binaryArray = hexStringToBinaryArray(hexStr)", "n = parityMatrix.shape M = m // circulantSize N =", "else: pass return evalTimes, evaluationFaildAt, numberOfIterationsAtHigh #plt.imshow(nearEarthParity) #nearEarthParity = readMatrixFromFile('/home/oss22/swift/swift/codeMatrices/nearEarthParity.txt',", "see a reason to generalise this file, since matrices will", "import scipy.io import common import hashlib import os projectDir =", "sys sys.path.insert(1, projectDir) FILE_HANDLER_INT_DATA_TYPE = np.int32 GENERAL_CODE_MATRIX_DATA_TYPE = np.int32 NIBBLE_CONVERTER", "def readMatrixFromFile(fileName, dim0, dim1, circulantSize, isRow = True, isHex =", "bottom left to right, i.e.: # line 0 defines circulant", "nibbleToHex(nibble) outputArray[j] = h outputString = outputString + h return", "and the order in which they are defined is top", "accumulatedBlock = newBlock else: accumulatedBlock = np.vstack((accumulatedBlock, newBlock)) newMatrix =", "i, n in enumerate(X) ) #pos.update( (n, (2, i)) for", "= nibbleToHex(nibble) outputArray[j] = h outputString = outputString + h", "in which they are defined is top to bottom left", "m, n = parityMatrix.shape M = m // circulantSize N", "circulantSize) bRight = hexToCirculant(lines[2 * i + 1], circulantSize) newBlock", "np.hstack(np.zeros(circulantSize-len(binaryArray), dtype = GENERAL_CODE_MATRIX_DATA_TYPE)) else: binaryArray = binaryArray[1:] circulantMatrix =", "= scatterSNR workspaceDict['berData'] = scatterBER workspaceDict['itrData'] = scatterITR workspaceDict['averageSnrAxis'] =", "#print(m) #print(n) assert( m % circulantSize == 0) assert (n", "newBlock)) newMatrix = np.hstack((np.eye(dim0, dtype = GENERAL_CODE_MATRIX_DATA_TYPE), accumulatedBlock)) else: for", "hotLocationsToCirculant(locationList, circulantSize) if i == 0: accumulatedBlock2 = newBlock else:", "np.array([1,0,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '9': nibble =", "0: accumulatedBlock2 = newBlock else: accumulatedBlock2 = np.hstack((accumulatedBlock2, newBlock)) newMatrix", "newBlock = hotLocationsToCirculant(locationList, circulantSize) if i == 0: accumulatedBlock2 =", "= GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'C': nibble = np.array([1,1,0,0], dtype", "bipartite=1) # Add edges only between nodes of opposite node", "511, True, True, True) nearEarthParity = readMatrixFromFile(projectDir + '/codeMatrices/nearEarthParity.txt', 1022,", "circulant. # Each line in the file then defines a", "== '0': nibble = np.array([0,0,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i", "files in os.walk(path): for file in files: if str(file).endswith('.mat'): i", "for i in range((dim0 // circulantSize) ): bLeft = hexToCirculant(lines[2", "\"\"\" ## File handler ## This file was initially intended", "workspaceDict['averageNumberOfIterations'] = averageNumberOfIterations workspaceDict['evaluationTime'] = evaluationTime workspaceDict['nonZero'] = numberOfNonZero scipy.io.savemat((fileNameWithPath", "dtype = GENERAL_CODE_MATRIX_DATA_TYPE) generatingVector[locationList] = 1 newCirculant = circulant(generatingVector) newCirculant", "= circulantMatrix.T return circulantMatrix def hotLocationsToCirculant(locationList, circulantSize): generatingVector = np.zeros(circulantSize,", "GENERAL_CODE_MATRIX_DATA_TYPE), accumulatedBlock)) else: for i in range((dim1 // circulantSize)): locationList1", "averageNumberOfIterations = evaluationData.getStatsV2() workspaceDict['snrData'] = scatterSNR workspaceDict['berData'] = scatterBER workspaceDict['itrData']", "format. import numpy as np from scipy.linalg import circulant #import", "is the purpose of this file to parse it. ##", "== 'E': nibble = np.array([1,1,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i", ", k * circulantSize : (k + 1) * circulantSize]))", "if str(file).endswith('.mat'): i = i + 1 mat = scipy.io.loadmat(str(os.path.join(root,", "= str) outputString = '' for j in range(d1//4): nibble", "each line in the file contains the non zero locations", "= np.hstack((accumulatedUpBlock1, upBlock)) for i in range((dim1 // circulantSize)): locationList", "= np.hstack((outputBinary, nibble)) return outputBinary def hexToCirculant(hexStr, circulantSize): binaryArray =", "m,n = binaryMatrix.shape #print(m) #print(n) assert( m % circulantSize ==", "1022), bipartite=1) # Add edges only between nodes of opposite", "some matrix friendly format. import numpy as np from scipy.linalg", "= np.array([], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) outputBinary = np.hstack((outputBinary, nibble)) return", "* circulantSize , k * circulantSize : (k + 1)", "evalTimes.append(mat['evaluationTime']) averageNumberOfIterations = mat['averageNumberOfIterations'] numberOfIterationsAtHigh.append(averageNumberOfIterations[-1]) common.plotSNRvsBER(snrActual, berAxis, fileName = None,", "= np.array(4 - (circulantSize % 4)) m,n = binaryMatrix.shape #print(m)", "root, dirs, files in os.walk(path): for file in files: if", "bipartite=0) #B.add_nodes_from(range(1022, 7156 + 1022), bipartite=1) # Add edges only", "elif i == 'D': nibble = np.array([1,1,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)", "= FILE_HANDLER_INT_DATA_TYPE) evalTimes = [] numberOfIterationsAtHigh = [] for root,", "above pdf were copied manually to a txt file, and", ") #pos.update( (n, (2, i)) for i, n in enumerate(Y)", "currently do not see a reason to generalise this file,", "berAxis, fileName = None, inputLabel = '', figureNumber = i,", "np.zeros(4, dtype = FILE_HANDLER_INT_DATA_TYPE) evalTimes = [] numberOfIterationsAtHigh = []", "newCirculant = newCirculant.T return newCirculant def readMatrixFromFile(fileName, dim0, dim1, circulantSize,", "(k + 1) * circulantSize])) hexArray, hexString = binaryArraytoHex(nextLine) hexName", "np.array([], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) for i in hexString: if i", "== '4': nibble = np.array([0,1,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i", "= [] for root, dirs, files in os.walk(path): for file", "np.array([1,0,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'A': nibble =", "in os.walk(path): for file in files: if str(file).endswith('.mat'): i =", "range(8176): # for j in range(1022): # if nearEarthParity[j,i] !=", "i in hexString: if i == '0': nibble = np.array([0,0,0,0],", "I currently do not see a reason to generalise this", "1) * circulantSize])) hexArray, hexString = binaryArraytoHex(nextLine) hexName = hexName", "is on correctness, I currently do not see a reason", "= GENERAL_CODE_MATRIX_DATA_TYPE) else: #print('Error, 0-9 or A-F') pass nibble =", "fileNameSHA224 = str(circulantSize) + '_' + str(M) + '_' +", "assumes that each line in the file contains the non", "in the file contains the non zero locations of the", "fileNameWithPath = path + fileNameSHA224 else: fileNameWithPath = path +", "= 'D' elif n== 14: h = 'E' elif n==", "+ fileNameSHA224 else: fileNameWithPath = path + fileName print(\"*** \"", "i == '5': nibble = np.array([0,1,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif", "txt file, and it is the purpose of this file", "binaryArray = binaryArray[1:] circulantMatrix = circulant(binaryArray) circulantMatrix = circulantMatrix.T return", "testFileHandler(): nearEarthGenerator = readMatrixFromFile(projectDir + '/codeMatrices/nearEarthGenerator.txt', 7154, 8176, 511, True,", "7156 + i)]) #X, Y = bipartite.sets(B) #pos = dict()", "= '' for j in range(d1//4): nibble = inputArray[4 *", "either json or some matrix friendly format. import numpy as", "def testFileHandler(): nearEarthGenerator = readMatrixFromFile(projectDir + '/codeMatrices/nearEarthGenerator.txt', 7154, 8176, 511,", "dtype = GENERAL_CODE_MATRIX_DATA_TYPE)) else: binaryArray = binaryArray[1:] circulantMatrix = circulant(binaryArray)", "None, evaluationTime = 0, numberOfNonZero = 0, fileName = None):", "nibble = np.array([1,1,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'F':", "Finishing saveCodeInstance !\") return fileName def testFileHandler(): nearEarthGenerator = readMatrixFromFile(projectDir", "10: h = 'A' elif n== 11: h = 'B'", "...\") m, n = parityMatrix.shape M = m // circulantSize", "GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'F': nibble = np.array([1,1,1,1], dtype =", "False): i = 10 evaluationFaildAt = np.zeros(4, dtype = FILE_HANDLER_INT_DATA_TYPE)", "outputBinary = np.hstack((outputBinary, nibble)) return outputBinary def hexToCirculant(hexStr, circulantSize): binaryArray", "None, path = None, evaluationTime = 0, numberOfNonZero = 0,", "j in range(d1//4): nibble = inputArray[4 * j : 4", "= bipartite.sets(B) #pos = dict() #pos.update( (n, (1, i)) for", "(2, i)) for i, n in enumerate(Y) ) #nx.draw(B, pos=pos)", "+ '/codeMatrices/nearEarthParity.txt', 1022, 8176, 511, True, False, False) return 'OK'", "= False): i = 10 evaluationFaildAt = np.zeros(4, dtype =", "of the first row of a circulant. # Each line", "!= 0: # B.add_edges_from([(j, 7156 + i)]) #X, Y =", "= np.vstack((accumulatedBlock, newBlock)) newMatrix = np.hstack((np.eye(dim0, dtype = GENERAL_CODE_MATRIX_DATA_TYPE), accumulatedBlock))", "assert (n % circulantSize == 0) M = m //", "nibble = np.array([1,1,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'E':", "dtype = GENERAL_CODE_MATRIX_DATA_TYPE) def nibbleToHex(inputArray): n = NIBBLE_CONVERTER.dot(inputArray) if n", "GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'E': nibble = np.array([1,1,1,0], dtype =", "correctness, I currently do not see a reason to generalise", "4 == 0) outputArray = np.zeros(d1//4, dtype = str) outputString", "snrAxis = mat['snrAxis'] snrActual = mat['averageSnrAxis'] if len(snrAxis) < 3:", "if ('evaluationTime' in mat.keys()): evalTimes.append(mat['evaluationTime']) averageNumberOfIterations = mat['averageNumberOfIterations'] numberOfIterationsAtHigh.append(averageNumberOfIterations[-1]) common.plotSNRvsBER(snrActual,", "contains the non zero locations of the first row of", "-*- coding: utf-8 -*- \"\"\" Created on Thu Nov 28", "bipartite.sets(B) #pos = dict() #pos.update( (n, (1, i)) for i,", "range(N): nextLine = np.hstack((leftPadding, binaryMatrix[ r * circulantSize , k", "to generate the matrices for the near earth code found", "= True, isGenerator = True ): # This function assumes", "for i in hexString: if i == '0': nibble =", "fileName print(\"*** \" + fileName) workspaceDict = {} workspaceDict['parityMatrix'] =", "False, False) return 'OK' def plotResults(path, makeMat = False): i", "nibble = np.array([0,1,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '6':", "GENERAL_CODE_MATRIX_DATA_TYPE) generatingVector[locationList] = 1 newCirculant = circulant(generatingVector) newCirculant = newCirculant.T", "def plotResults(path, makeMat = False): i = 10 evaluationFaildAt =", "np.array([], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) outputBinary = np.hstack((outputBinary, nibble)) return outputBinary", "= list(lines[(dim1 // circulantSize) + i].rstrip('\\n').split(',')) locationList = list(map(int, locationList))", "mat = scipy.io.loadmat(str(os.path.join(root, file))) snrAxis = mat['snrAxis'] snrActual = mat['averageSnrAxis']", "in range(1022): # if nearEarthParity[j,i] != 0: # B.add_edges_from([(j, 7156", "berData, averageNumberOfIterations = evaluationData.getStatsV2() workspaceDict['snrData'] = scatterSNR workspaceDict['berData'] = scatterBER", "= m // circulantSize N = n // circulantSize hexName", "len(inputArray) assert (d1 % 4 == 0) outputArray = np.zeros(d1//4,", "n in enumerate(X) ) #pos.update( (n, (2, i)) for i,", "'A': nibble = np.array([1,0,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i ==", "= GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '6': nibble = np.array([0,1,1,0], dtype", "left to right, i.e.: # line 0 defines circulant 0,0", "GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '3': nibble = np.array([0,0,1,1], dtype =", "= '' for r in range(M): for k in range(N):", "= 0, numberOfNonZero = 0, fileName = None): print(\"*** in", "nibble = np.array([0,1,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '7':", "elif i == 'C': nibble = np.array([1,1,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)", "n // circulantSize if fileName == None: fileName = binaryMatrixToHexString(parityMatrix,", "+ hexString return hexName def saveCodeInstance(parityMatrix, circulantSize, codewordSize, evaluationData =", "path + fileName print(\"*** \" + fileName) workspaceDict = {}", "coding: utf-8 -*- \"\"\" Created on Thu Nov 28 12:10:11", "elif i == '8': nibble = np.array([1,0,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)", "binaryMatrixToHexString(parityMatrix, circulantSize) fileNameSHA224 = str(circulantSize) + '_' + str(M) +", "scatterSNR, scatterBER, scatterITR, snrAxis, averageSnrAxis, berData, averageNumberOfIterations = evaluationData.getStatsV2() workspaceDict['snrData']", "+ fileName print(\"*** \" + fileName) workspaceDict = {} workspaceDict['parityMatrix']", "+ fileName) workspaceDict = {} workspaceDict['parityMatrix'] = parityMatrix workspaceDict['fileName'] =", "// circulantSize) ): bLeft = hexToCirculant(lines[2 * i], circulantSize) bRight", "assert( m % circulantSize == 0) assert (n % circulantSize", "'/codeMatrices/nearEarthParity.txt', 1022, 8176, 511, True, False, False) return 'OK' def", "makeMat = False): i = 10 evaluationFaildAt = np.zeros(4, dtype", "'F' else: h = str(n) return h def binaryArraytoHex(inputArray): d1", "np.array([0,0,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '3': nibble =", "N = n // circulantSize if fileName == None: fileName", "mat['snrAxis'] snrActual = mat['averageSnrAxis'] if len(snrAxis) < 3: evaluationFaildAt[len(snrAxis)] =", "{} workspaceDict['parityMatrix'] = parityMatrix workspaceDict['fileName'] = fileName if evaluationData !=", "hexString = binaryArraytoHex(nextLine) hexName = hexName + hexString return hexName", "h = 'B' elif n== 12: h = 'C' elif", "pass nibble = np.array([], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) outputBinary = np.hstack((outputBinary,", "projectDir) FILE_HANDLER_INT_DATA_TYPE = np.int32 GENERAL_CODE_MATRIX_DATA_TYPE = np.int32 NIBBLE_CONVERTER = np.array([8,", "= GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'D': nibble = np.array([1,1,0,1], dtype", "np.hstack((accumulatedBlock2, newBlock)) newMatrix = np.vstack((accumulatedUpBlock1, accumulatedBlock2)) return newMatrix def binaryMatrixToHexString(binaryMatrix,", "else: accumulatedBlock2 = np.hstack((accumulatedBlock2, newBlock)) newMatrix = np.vstack((accumulatedUpBlock1, accumulatedBlock2)) return", "r * circulantSize , k * circulantSize : (k +", "file, since matrices will be saved in either json or", "elif n== 15: h = 'F' else: h = str(n)", "nibble = np.array([0,0,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '2':", "dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'B': nibble = np.array([1,0,1,1],", "first row of a circulant. # Each line in the", "= readMatrixFromFile(projectDir + '/codeMatrices/nearEarthGenerator.txt', 7154, 8176, 511, True, True, True)", "nx.Graph() #B.add_nodes_from(range(1022), bipartite=0) #B.add_nodes_from(range(1022, 7156 + 1022), bipartite=1) # Add", "import numpy as np from scipy.linalg import circulant #import matplotlib.pyplot", "# line 0 defines circulant 0,0 with open(fileName) as fid:", "= hexToCirculant(lines[2 * i + 1], circulantSize) newBlock = np.hstack((bLeft,", "= GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '7': nibble = np.array([0,1,1,1], dtype", "str(n) return h def binaryArraytoHex(inputArray): d1 = len(inputArray) assert (d1", "binaryMatrix[ r * circulantSize , k * circulantSize : (k", "i == 0: accumulatedBlock2 = newBlock else: accumulatedBlock2 = np.hstack((accumulatedBlock2,", "h = str(n) return h def binaryArraytoHex(inputArray): d1 = len(inputArray)", "def saveCodeInstance(parityMatrix, circulantSize, codewordSize, evaluationData = None, path = None,", "range(1022): # if nearEarthParity[j,i] != 0: # B.add_edges_from([(j, 7156 +", "str(file).endswith('.mat'): i = i + 1 mat = scipy.io.loadmat(str(os.path.join(root, file)))", "else: #print('Error, 0-9 or A-F') pass nibble = np.array([], dtype", "= dict() #pos.update( (n, (1, i)) for i, n in", "circulantSize)): locationList = list(lines[(dim1 // circulantSize) + i].rstrip('\\n').split(',')) locationList =", "edges only between nodes of opposite node sets #for i", "numpy as np from scipy.linalg import circulant #import matplotlib.pyplot as", "Thu Nov 28 12:10:11 2019 @author: Omer \"\"\" ## File", "= GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '2': nibble = np.array([0,0,1,0], dtype", "h def binaryArraytoHex(inputArray): d1 = len(inputArray) assert (d1 % 4", "= hexStringToBinaryArray(hexStr) if len(binaryArray) < circulantSize: binaryArray = np.hstack(np.zeros(circulantSize-len(binaryArray), dtype", "- (circulantSize % 4)) m,n = binaryMatrix.shape #print(m) #print(n) assert(", "True, False, False) return 'OK' def plotResults(path, makeMat = False):", "= mat['berData'] if ('evaluationTime' in mat.keys()): evalTimes.append(mat['evaluationTime']) averageNumberOfIterations = mat['averageNumberOfIterations']", "np.array([1,0,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'C': nibble =", "circulantSize, codewordSize, evaluationData = None, path = None, evaluationTime =", "scipy.io import common import hashlib import os projectDir = os.environ.get('LDPC')", "newMatrix def binaryMatrixToHexString(binaryMatrix, circulantSize): leftPadding = np.array(4 - (circulantSize %", "numberOfNonZero scipy.io.savemat((fileNameWithPath + '.mat'), workspaceDict) #evaluationData.plotStats(codewordSize, fileNameWithPath) print(\"*** Finishing saveCodeInstance", "evaluationFaildAt = np.zeros(4, dtype = FILE_HANDLER_INT_DATA_TYPE) evalTimes = [] numberOfIterationsAtHigh", "return hexName def saveCodeInstance(parityMatrix, circulantSize, codewordSize, evaluationData = None, path", "evaluationFaildAt, numberOfIterationsAtHigh #plt.imshow(nearEarthParity) #nearEarthParity = readMatrixFromFile('/home/oss22/swift/swift/codeMatrices/nearEarthParity.txt', 1022, 8176, 511, True,", "#print(n) assert( m % circulantSize == 0) assert (n %", "hexName + hexString return hexName def saveCodeInstance(parityMatrix, circulantSize, codewordSize, evaluationData", "circulantSize) + i].rstrip('\\n').split(',')) locationList = list(map(int, locationList)) newBlock = hotLocationsToCirculant(locationList,", "* j : 4 * j + 4] h =", "= 'C' elif n== 13: h = 'D' elif n==", "= np.hstack((accumulatedBlock2, newBlock)) newMatrix = np.vstack((accumulatedUpBlock1, accumulatedBlock2)) return newMatrix def", "workspaceDict['berData'] = scatterBER workspaceDict['itrData'] = scatterITR workspaceDict['averageSnrAxis'] = averageSnrAxis workspaceDict['averageNumberOfIterations']", "handler ## This file was initially intended purely to generate", "'A' elif n== 11: h = 'B' elif n== 12:", "scipy.io.loadmat(str(os.path.join(root, file))) snrAxis = mat['snrAxis'] snrActual = mat['averageSnrAxis'] if len(snrAxis)", "* i + 1], circulantSize) newBlock = np.hstack((bLeft, bRight)) if", "= hexName + hexString return hexName def saveCodeInstance(parityMatrix, circulantSize, codewordSize,", "values from the above pdf were copied manually to a", "scatterSNR workspaceDict['berData'] = scatterBER workspaceDict['itrData'] = scatterITR workspaceDict['averageSnrAxis'] = averageSnrAxis", "generate the matrices for the near earth code found in:", "bLeft = hexToCirculant(lines[2 * i], circulantSize) bRight = hexToCirculant(lines[2 *", "scatterITR, snrAxis, averageSnrAxis, berData, averageNumberOfIterations = evaluationData.getStatsV2() workspaceDict['snrData'] = scatterSNR", "accumulatedBlock2)) return newMatrix def binaryMatrixToHexString(binaryMatrix, circulantSize): leftPadding = np.array(4 -", "+ '_' + str(N) + '_' + str(hashlib.sha224(str(fileName).encode('utf-8')).hexdigest()) fileNameWithPath =", "a reason to generalise this file, since matrices will be", "here is on correctness, I currently do not see a", "= np.array([8, 4, 2, 1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) def nibbleToHex(inputArray):", "!\") return fileName def testFileHandler(): nearEarthGenerator = readMatrixFromFile(projectDir + '/codeMatrices/nearEarthGenerator.txt',", "workspaceDict['parityMatrix'] = parityMatrix workspaceDict['fileName'] = fileName if evaluationData != None:", "circulantSize): leftPadding = np.array(4 - (circulantSize % 4)) m,n =", "hexString return hexName def saveCodeInstance(parityMatrix, circulantSize, codewordSize, evaluationData = None,", "leftPadding = np.array(4 - (circulantSize % 4)) m,n = binaryMatrix.shape", "= True ): # This function assumes that each line", "hexToCirculant(hexStr, circulantSize): binaryArray = hexStringToBinaryArray(hexStr) if len(binaryArray) < circulantSize: binaryArray", "elif i == '6': nibble = np.array([0,1,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)", "= mat['averageNumberOfIterations'] numberOfIterationsAtHigh.append(averageNumberOfIterations[-1]) common.plotSNRvsBER(snrActual, berAxis, fileName = None, inputLabel =", "1022, 8176, 511, True, False, False) #import networkx as nx", "it is the purpose of this file to parse it.", "nibble)) return outputBinary def hexToCirculant(hexStr, circulantSize): binaryArray = hexStringToBinaryArray(hexStr) if", "workspaceDict = {} workspaceDict['parityMatrix'] = parityMatrix workspaceDict['fileName'] = fileName if", "= 'B' elif n== 12: h = 'C' elif n==", "range((dim0 // circulantSize) ): bLeft = hexToCirculant(lines[2 * i], circulantSize)", "else: for i in range((dim1 // circulantSize)): locationList1 = list(lines[", "hexArray, hexString = binaryArraytoHex(nextLine) hexName = hexName + hexString return", "circulantSize N = n // circulantSize hexName = '' for", "newBlock = np.hstack((bLeft, bRight)) if i == 0: accumulatedBlock =", "row of a circulant. # Each line in the file", "= binaryMatrix.shape #print(m) #print(n) assert( m % circulantSize == 0)", "isGenerator: for i in range((dim0 // circulantSize) ): bLeft =", "1022, 8176, 511, True, False, False) return 'OK' def plotResults(path,", "= hexToCirculant(lines[2 * i], circulantSize) bRight = hexToCirculant(lines[2 * i", "15: h = 'F' else: h = str(n) return h", "on correctness, I currently do not see a reason to", "* j + 4] h = nibbleToHex(nibble) outputArray[j] = h", "for i in range((dim1 // circulantSize)): locationList = list(lines[(dim1 //", "m // circulantSize N = n // circulantSize if fileName", "i == '2': nibble = np.array([0,0,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif", "circulantSize)): locationList1 = list(lines[ i].rstrip('\\n').split(',')) locationList1 = list(map(int, locationList1)) upBlock", "1 berAxis = mat['berData'] if ('evaluationTime' in mat.keys()): evalTimes.append(mat['evaluationTime']) averageNumberOfIterations", "in files: if str(file).endswith('.mat'): i = i + 1 mat", "it. ## The emphasis here is on correctness, I currently", "#import matplotlib.pyplot as plt import scipy.io import common import hashlib", "M = m // circulantSize N = n // circulantSize", "#X, Y = bipartite.sets(B) #pos = dict() #pos.update( (n, (1,", "scipy.io.savemat((fileNameWithPath + '.mat'), workspaceDict) #evaluationData.plotStats(codewordSize, fileNameWithPath) print(\"*** Finishing saveCodeInstance !\")", "= fid.readlines() if isGenerator: for i in range((dim0 // circulantSize)", "newBlock else: accumulatedBlock = np.vstack((accumulatedBlock, newBlock)) newMatrix = np.hstack((np.eye(dim0, dtype", "== 10: h = 'A' elif n== 11: h =", "== 0) outputArray = np.zeros(d1//4, dtype = str) outputString =", "FILE_HANDLER_INT_DATA_TYPE = np.int32 GENERAL_CODE_MATRIX_DATA_TYPE = np.int32 NIBBLE_CONVERTER = np.array([8, 4,", "511, True, False, False) return 'OK' def plotResults(path, makeMat =", "d1 = len(inputArray) assert (d1 % 4 == 0) outputArray", "nextLine = np.hstack((leftPadding, binaryMatrix[ r * circulantSize , k *", "fileName = None, inputLabel = '', figureNumber = i, figureName", "as nx #from networkx.algorithms import bipartite #B = nx.Graph() #B.add_nodes_from(range(1022),", "i)]) #X, Y = bipartite.sets(B) #pos = dict() #pos.update( (n,", "= newCirculant.T return newCirculant def readMatrixFromFile(fileName, dim0, dim1, circulantSize, isRow", "sure this doesn't break anything. import sys sys.path.insert(1, projectDir) FILE_HANDLER_INT_DATA_TYPE", "= 'E' elif n== 15: h = 'F' else: h", "1 newCirculant = circulant(generatingVector) newCirculant = newCirculant.T return newCirculant def", "purpose of this file to parse it. ## The emphasis", "np.array([0,0,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '1': nibble =", "= np.hstack((np.eye(dim0, dtype = GENERAL_CODE_MATRIX_DATA_TYPE), accumulatedBlock)) else: for i in", "newBlock)) newMatrix = np.vstack((accumulatedUpBlock1, accumulatedBlock2)) return newMatrix def binaryMatrixToHexString(binaryMatrix, circulantSize):", "## This file was initially intended purely to generate the", "The emphasis here is on correctness, I currently do not", "dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'C': nibble = np.array([1,1,0,0],", "the first row of a circulant. # Each line in", "locationList)) newBlock = hotLocationsToCirculant(locationList, circulantSize) if i == 0: accumulatedBlock2", "= np.array([0,1,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '5': nibble", "m % circulantSize == 0) assert (n % circulantSize ==", "circulant, and the order in which they are defined is", "= nx.Graph() #B.add_nodes_from(range(1022), bipartite=0) #B.add_nodes_from(range(1022, 7156 + 1022), bipartite=1) #", "= m // circulantSize N = n // circulantSize if", "in range(d1//4): nibble = inputArray[4 * j : 4 *", "return 'OK' def plotResults(path, makeMat = False): i = 10", "= np.array([0,0,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '3': nibble", "mat['averageNumberOfIterations'] numberOfIterationsAtHigh.append(averageNumberOfIterations[-1]) common.plotSNRvsBER(snrActual, berAxis, fileName = None, inputLabel = '',", "@author: Omer \"\"\" ## File handler ## This file was", "emphasis here is on correctness, I currently do not see", "they are defined is top to bottom left to right,", "np.array([1,1,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'D': nibble =", "for i in range((dim1 // circulantSize)): locationList1 = list(lines[ i].rstrip('\\n').split(','))", "dtype = GENERAL_CODE_MATRIX_DATA_TYPE) else: #print('Error, 0-9 or A-F') pass nibble", "= {} workspaceDict['parityMatrix'] = parityMatrix workspaceDict['fileName'] = fileName if evaluationData", "reason to generalise this file, since matrices will be saved", "common import hashlib import os projectDir = os.environ.get('LDPC') if projectDir", "len(snrAxis) < 3: evaluationFaildAt[len(snrAxis)] = evaluationFaildAt[len(snrAxis)] + 1 berAxis =", "list(lines[ i].rstrip('\\n').split(',')) locationList1 = list(map(int, locationList1)) upBlock = hotLocationsToCirculant(locationList1, circulantSize)", "28 12:10:11 2019 @author: Omer \"\"\" ## File handler ##", "n== 14: h = 'E' elif n== 15: h =", "= inputArray[4 * j : 4 * j + 4]", "== 'D': nibble = np.array([1,1,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i", "newMatrix = np.hstack((np.eye(dim0, dtype = GENERAL_CODE_MATRIX_DATA_TYPE), accumulatedBlock)) else: for i", "np.int32 GENERAL_CODE_MATRIX_DATA_TYPE = np.int32 NIBBLE_CONVERTER = np.array([8, 4, 2, 1],", "numberOfIterationsAtHigh = [] for root, dirs, files in os.walk(path): for", "): bLeft = hexToCirculant(lines[2 * i], circulantSize) bRight = hexToCirculant(lines[2", "NIBBLE_CONVERTER.dot(inputArray) if n == 10: h = 'A' elif n==", "== '1': nibble = np.array([0,0,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i", "= np.array([0,0,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif i == '1': nibble", "+ str(hashlib.sha224(str(fileName).encode('utf-8')).hexdigest()) fileNameWithPath = path + fileNameSHA224 else: fileNameWithPath =", "in mat.keys()): evalTimes.append(mat['evaluationTime']) averageNumberOfIterations = mat['averageNumberOfIterations'] numberOfIterationsAtHigh.append(averageNumberOfIterations[-1]) common.plotSNRvsBER(snrActual, berAxis, fileName", "projectDir = pathlib.Path(__file__).parent.absolute() ## <NAME>: added on 01/12/2020, need to", "= True, isHex = True, isGenerator = True ): #", "% 4)) m,n = binaryMatrix.shape #print(m) #print(n) assert( m %", "evalTimes = [] numberOfIterationsAtHigh = [] for root, dirs, files", "saveCodeInstance !\") return fileName def testFileHandler(): nearEarthGenerator = readMatrixFromFile(projectDir +", "dirs, files in os.walk(path): for file in files: if str(file).endswith('.mat'):", "locationList1 = list(map(int, locationList1)) upBlock = hotLocationsToCirculant(locationList1, circulantSize) if i", "< 3: evaluationFaildAt[len(snrAxis)] = evaluationFaildAt[len(snrAxis)] + 1 berAxis = mat['berData']", "i == '7': nibble = np.array([0,1,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif", "+ 1022), bipartite=1) # Add edges only between nodes of", "averageSnrAxis, berData, averageNumberOfIterations = evaluationData.getStatsV2() workspaceDict['snrData'] = scatterSNR workspaceDict['berData'] =", "file was initially intended purely to generate the matrices for", "h = 'A' elif n== 11: h = 'B' elif", "def nibbleToHex(inputArray): n = NIBBLE_CONVERTER.dot(inputArray) if n == 10: h", "mat.keys()): evalTimes.append(mat['evaluationTime']) averageNumberOfIterations = mat['averageNumberOfIterations'] numberOfIterationsAtHigh.append(averageNumberOfIterations[-1]) common.plotSNRvsBER(snrActual, berAxis, fileName =", "+ 1 berAxis = mat['berData'] if ('evaluationTime' in mat.keys()): evalTimes.append(mat['evaluationTime'])", "circulantSize, isRow = True, isHex = True, isGenerator = True", "= GENERAL_CODE_MATRIX_DATA_TYPE) elif i == 'E': nibble = np.array([1,1,1,0], dtype", "print(\"*** \" + fileName) workspaceDict = {} workspaceDict['parityMatrix'] = parityMatrix", "None, inputLabel = '', figureNumber = i, figureName = str(file))", "'B' elif n== 12: h = 'C' elif n== 13:", "accumulatedUpBlock1 = upBlock else: accumulatedUpBlock1 = np.hstack((accumulatedUpBlock1, upBlock)) for i", "i == '0': nibble = np.array([0,0,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) elif", "elif i == '7': nibble = np.array([0,1,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)", "i, figureName = str(file)) else: pass return evalTimes, evaluationFaildAt, numberOfIterationsAtHigh", "json or some matrix friendly format. import numpy as np", "import os projectDir = os.environ.get('LDPC') if projectDir == None: import", "str(file)) else: pass return evalTimes, evaluationFaildAt, numberOfIterationsAtHigh #plt.imshow(nearEarthParity) #nearEarthParity =", "on 01/12/2020, need to make sure this doesn't break anything.", "dim1, circulantSize, isRow = True, isHex = True, isGenerator =", "np.array([1,1,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) else: #print('Error, 0-9 or A-F') pass", "+ i)]) #X, Y = bipartite.sets(B) #pos = dict() #pos.update(", "if evaluationData != None: scatterSNR, scatterBER, scatterITR, snrAxis, averageSnrAxis, berData,", "fileName def testFileHandler(): nearEarthGenerator = readMatrixFromFile(projectDir + '/codeMatrices/nearEarthGenerator.txt', 7154, 8176,", "// circulantSize)): locationList = list(lines[(dim1 // circulantSize) + i].rstrip('\\n').split(',')) locationList", "in either json or some matrix friendly format. import numpy", "in: https://public.ccsds.org/Pubs/131x1o2e2s.pdf ## The values from the above pdf were", "0) M = m // circulantSize N = n //", "# B.add_edges_from([(j, 7156 + i)]) #X, Y = bipartite.sets(B) #pos", "for the near earth code found in: https://public.ccsds.org/Pubs/131x1o2e2s.pdf ## The", "locationList = list(map(int, locationList)) newBlock = hotLocationsToCirculant(locationList, circulantSize) if i", "of a circulant. # Each line in the file then", "utf-8 -*- \"\"\" Created on Thu Nov 28 12:10:11 2019", "newCirculant = circulant(generatingVector) newCirculant = newCirculant.T return newCirculant def readMatrixFromFile(fileName,", "for j in range(d1//4): nibble = inputArray[4 * j :", "n // circulantSize hexName = '' for r in range(M):", "0: accumulatedUpBlock1 = upBlock else: accumulatedUpBlock1 = np.hstack((accumulatedUpBlock1, upBlock)) for", "figureNumber = i, figureName = str(file)) else: pass return evalTimes,", "numberOfIterationsAtHigh.append(averageNumberOfIterations[-1]) common.plotSNRvsBER(snrActual, berAxis, fileName = None, inputLabel = '', figureNumber", "= np.array([1,1,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) else: #print('Error, 0-9 or A-F')", "outputArray, outputString def hexStringToBinaryArray(hexString): outputBinary = np.array([], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)", "the order in which they are defined is top to", "h = 'D' elif n== 14: h = 'E' elif", "parityMatrix.shape M = m // circulantSize N = n //", "for i, n in enumerate(X) ) #pos.update( (n, (2, i))", "binaryArray = np.hstack(np.zeros(circulantSize-len(binaryArray), dtype = GENERAL_CODE_MATRIX_DATA_TYPE)) else: binaryArray = binaryArray[1:]", "nx #from networkx.algorithms import bipartite #B = nx.Graph() #B.add_nodes_from(range(1022), bipartite=0)", "parse it. ## The emphasis here is on correctness, I", "this file, since matrices will be saved in either json", "= np.array([], dtype = GENERAL_CODE_MATRIX_DATA_TYPE) for i in hexString: if" ]
[ "'agent': '\"Mozilla/4.08 [en] (Win98; I ;Nav)\"', 'auth': 'frank', 'ident': '-',", "'ndc': 'unknown', 'message': 'This is sample log message'}, 'COMMON_LOG_FORMAT': {'request':", "{'message': 'client denied by server configuration:/export/home/live/ap/htdocs/' 'test', 'timestamp': 'Wed Oct", "'ident': '-', 'response': '500', 'bytes': '17', 'clientip': '10.185.248.71', 'verb': 'GET',", "test_configuration_required_fields(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet implemented') def test_configuration_retry_count(sdc_builder, sdc_executor): pass", "= sdc_builder.get_pipeline_builder() builder.add_error_stage('Discard') s3_origin = builder.add_stage('Amazon S3', type='origin') s3_origin.set_attributes(**attributes) trash", "sdc_executor.get_pipeline_status(pipeline).response.json().get('status') == 'RUNNING': sdc_executor.stop_pipeline(pipeline) # cleaning up s3 bucket delete_aws_objects(client,", "log file and parse the same. Pipeline for the same-", "pass @pytest.mark.skip('Not yet implemented') def test_configuration_retry_count(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet", "'09/Jan/2015:9:12:06 +0000'}, 'LEEF': {'eventId': '4000030', 'product': 'Deep Security Agent', 'extensions':", "'rawrequest': None, 'timestamp': '10/Oct/2000:13:55:36 -0700'}, 'APACHE_CUSTOM_LOG_FORMAT': {'remoteUser': '-', 'requestTime': '09/Jan/2015:9:12:06", "file_content = data_format_content[log_format] client = aws.s3 s3_key = f'{S3_SANDBOX_PREFIX}/{get_random_string()}' attributes", "{'bucket': aws.s3_bucket_name, 'prefix_pattern': f'{s3_key}/*', 'number_of_threads': 1, 'read_order': 'LEXICOGRAPHICAL', 'data_format': data_format,", "'src': '10.217.253.78', 'spt': '53743'}, 'signature': 'APPFW', 'vendor': 'Citrix', 'cefVersion': 0,", "'denied by server configuration:/export/home/live/ap/htdocs/test', 'COMBINED_LOG_FORMAT': '127.0.0.1 - frank [10/Oct/2000:13:55:36 -0700]", "implemented') def test_configuration_proxy_host(sdc_builder, sdc_executor, use_proxy): pass @pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not yet", "test_configuration_proxy_user(sdc_builder, sdc_executor, use_proxy): pass @pytest.mark.parametrize('region', ['AP_NORTHEAST_1', 'AP_NORTHEAST_2', 'AP_NORTHEAST_3', 'AP_SOUTHEAST_1', 'AP_SOUTHEAST_2',", "4}, {'fieldPath': '/file', 'group': 5}, {'fieldPath': '/message', 'group': 6}] REGULAR_EXPRESSION", "'method': 'GET', 'src': '10.217.253.78', 'spt': '53743'}, 'signature': 'APPFW', 'vendor': 'Citrix',", "yet implemented') def test_configuration_connection_timeout(sdc_builder, sdc_executor): pass @pytest.mark.parametrize('task', ['CREATE_NEW_OBJECT']) @pytest.mark.skip('Not yet", "def test_configuration_proxy_host(sdc_builder, sdc_executor, use_proxy): pass @pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not yet implemented')", "'CEF', 'LEEF']) def test_configurations_data_format_log(sdc_executor, sdc_builder, aws, data_format, log_format): \"\"\"Check whether", "sdc_executor): pass @pytest.mark.skip('Not yet implemented') def test_configuration_retry_count(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not", "use_proxy): pass @pytest.mark.parametrize('region', ['AP_NORTHEAST_1', 'AP_NORTHEAST_2', 'AP_NORTHEAST_3', 'AP_SOUTHEAST_1', 'AP_SOUTHEAST_2', 'AP_SOUTH_1', 'CA_CENTRAL_1',", "'prefix_pattern': f'{s3_key}/*', 'number_of_threads': 1, 'read_order': 'LEXICOGRAPHICAL', 'data_format': data_format, 'log_format': log_format,", "get_data_to_verify_output[log_format] finally: if sdc_executor.get_pipeline_status(pipeline).response.json().get('status') == 'RUNNING': sdc_executor.stop_pipeline(pipeline) # cleaning up", "s3_origin_pipeline.configuration['shouldRetry'] = False return s3_origin_pipeline def delete_aws_objects(client, aws, s3_key): #", "= False return s3_origin_pipeline def delete_aws_objects(client, aws, s3_key): # Clean", "'message': 'This is sample log message'}, 'COMMON_LOG_FORMAT': {'request': '/apache.gif', 'auth':", "test_configuration_use_proxy(sdc_builder, sdc_executor, use_proxy): pass @aws('s3') @pytest.mark.parametrize('data_format', ['LOG']) @pytest.mark.parametrize('log_format', ['COMMON_LOG_FORMAT', 'APACHE_ERROR_LOG_FORMAT',", "method=GET request=http://vpx247.example.net/FFC/login.html msg=Disallow Illegal URL.', 'LEEF': 'LEEF: 2.0|Trend Micro|Deep Security", "[09/Jan/2015:9:12:06 +0000] \"GET ' '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300 ' 'HTTP/1.1\" 500 17 ',", "log format or not. A log file is being created", "@pytest.mark.parametrize('task', ['CREATE_NEW_OBJECT']) @pytest.mark.skip('Not yet implemented') def test_configuration_content(sdc_builder, sdc_executor, task): pass", "'LEEF']) def test_configurations_data_format_log(sdc_executor, sdc_builder, aws, data_format, log_format): \"\"\"Check whether S3", "format or not. A log file is being created in", "is sample log message', 'APACHE_ERROR_LOG_FORMAT': '[Wed Oct 11 14:32:52 2000]", "'<DSA version>'}, 'REGEX': {'/time': '08:23:53', '/date': '2019-04-30', '/timehalf': 'AM', '/info':", "test_configuration_socket_timeout(sdc_builder, sdc_executor): pass @pytest.mark.parametrize('task', ['CHANGE_EXISTING_OBJECT']) @pytest.mark.skip('Not yet implemented') def test_configuration_tags(sdc_builder,", "'500'}, 'CEF': {'severity': '6', 'product': 'NetScaler', 'extensions': {'msg': 'Disallow Illegal", "bucket delete_aws_objects(client, aws, s3_key) def get_aws_origin_to_trash_pipeline(sdc_builder, attributes, aws): # Build", "'timestamp': '10/Oct/2000:13:55:36 -0700'}, 'APACHE_ERROR_LOG_FORMAT': {'message': 'client denied by server configuration:/export/home/live/ap/htdocs/'", "frank [10/Oct/2000:13:55:36 -0700] ' '\"GET /apache.gif HTTP/1.0\" 200 232', 'LOG4J':", "'/apache.gif', 'agent': '\"Mozilla/4.08 [en] (Win98; I ;Nav)\"', 'auth': 'frank', 'ident':", "'/apache.gif', 'auth': 'frank', 'ident': '-', 'response': '200', 'bytes': '232', 'clientip':", "builder.add_stage('Trash') pipeline_finisher_executor = builder.add_stage('Pipeline Finisher Executor') pipeline_finisher_executor.set_attributes(stage_record_preconditions=[\"${record:eventType() == 'no-more-data'}\"]) s3_origin", "'127.0.0.1', 'httpversion': '1.0', 'rawrequest': None, 'timestamp': '10/Oct/2000:13:55:36 -0700'}, 'APACHE_CUSTOM_LOG_FORMAT': {'remoteUser':", "== 'no-more-data'}\"]) s3_origin >> trash s3_origin >= pipeline_finisher_executor s3_origin_pipeline =", "'http://vpx247.example.net/FFC/login.html', 'method': 'GET', 'src': '10.217.253.78', 'spt': '53743'}, 'signature': 'APPFW', 'vendor':", "yet implemented') def test_configuration_endpoint(sdc_builder, sdc_executor, region): pass @pytest.mark.parametrize('task', ['COPY_OBJECT']) @pytest.mark.skip('Not", "pipeline_finisher_executor s3_origin_pipeline = builder.build().configure_for_environment(aws) s3_origin_pipeline.configuration['shouldRetry'] = False return s3_origin_pipeline def", "'httpversion': '1.0', 'rawrequest': None, 'timestamp': '10/Oct/2000:13:55:36 -0700'}, 'APACHE_ERROR_LOG_FORMAT': {'message': 'client", "yet implemented') def test_configuration_secret_access_key(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet implemented') def", "CEF:0|Citrix|NetScaler|NS10.0|APPFW|APPFW_STARTURL|6|src=10.217.253.78 ' 'spt=53743 method=GET request=http://vpx247.example.net/FFC/login.html msg=Disallow Illegal URL.', 'LEEF': 'LEEF:", "def test_configuration_socket_timeout(sdc_builder, sdc_executor): pass @pytest.mark.parametrize('task', ['CHANGE_EXISTING_OBJECT']) @pytest.mark.skip('Not yet implemented') def", "'Wed Oct 11 14:32:52 2000', 'loglevel': 'error', 'clientip': '127.0.0.1'}, 'COMBINED_LOG_FORMAT':", "'vendor': 'Trend Micro', 'version': '<DSA version>'}, 'REGEX': {'/time': '08:23:53', '/date':", "aws, s3_key) def get_aws_origin_to_trash_pipeline(sdc_builder, attributes, aws): # Build pipeline. builder", "= aws.s3 s3_key = f'{S3_SANDBOX_PREFIX}/{get_random_string()}' attributes = {'bucket': aws.s3_bucket_name, 'prefix_pattern':", "S3', type='origin') s3_origin.set_attributes(**attributes) trash = builder.add_stage('Trash') pipeline_finisher_executor = builder.add_stage('Pipeline Finisher", "yet implemented') def test_configuration_required_fields(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet implemented') def", "'EU_WEST_3', 'OTHER', 'SA_EAST_1', 'US_EAST_1', 'US_EAST_2', 'US_GOV_WEST_1', 'US_WEST_1', 'US_WEST_2']) @pytest.mark.skip('Not yet", "LOG_FIELD_MAPPING } pipeline = get_aws_origin_to_trash_pipeline(sdc_builder, attributes, aws) s3_origin = pipeline.origin_stage", "Oct 11 14:32:52 2000', 'loglevel': 'error', 'clientip': '127.0.0.1'}, 'COMBINED_LOG_FORMAT': {'request':", "'COMMON_LOG_FORMAT': {'request': '/apache.gif', 'auth': 'frank', 'ident': '-', 'response': '200', 'bytes':", "test_configuration_access_key_id(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet implemented') def test_configuration_bucket(sdc_builder, sdc_executor): pass", "s3_origin_pipeline def delete_aws_objects(client, aws, s3_key): # Clean up S3. delete_keys", "yet implemented') def test_configuration_on_record_error(sdc_builder, sdc_executor, on_record_error): pass @pytest.mark.skip('Not yet implemented')", "'CN_NORTH_1', 'EU_CENTRAL_1', 'EU_WEST_1', 'EU_WEST_2', 'EU_WEST_3', 'OTHER', 'SA_EAST_1', 'US_EAST_1', 'US_EAST_2', 'US_GOV_WEST_1',", "{'severity': '6', 'product': 'NetScaler', 'extensions': {'msg': 'Disallow Illegal URL.', 'request':", "# cleaning up s3 bucket delete_aws_objects(client, aws, s3_key) def get_aws_origin_to_trash_pipeline(sdc_builder,", "= execute_pipeline_and_get_output(sdc_executor, s3_origin, pipeline) assert output_records[0].field == get_data_to_verify_output[log_format] finally: if", "attributes, aws) s3_origin = pipeline.origin_stage try: client.put_object(Bucket=aws.s3_bucket_name, Key=f'{s3_key}/{get_random_string()}.log', Body=file_content) output_records", "'-', 'remoteHost': '10.185.248.71', 'bytesSent': '17', 'status': '500'}, 'CEF': {'severity': '6',", "<gh_stars>0 import logging import pytest from streamsets.testframework.markers import aws, sdc_min_version", "yet implemented') def test_configuration_tags(sdc_builder, sdc_executor, task): pass @pytest.mark.parametrize('task', ['CHANGE_EXISTING_OBJECT', 'COPY_OBJECT',", "sdc_executor.add_pipeline(pipeline) snapshot = sdc_executor.capture_snapshot(pipeline, start_pipeline=True).snapshot output_records = snapshot[s3_origin].output return output_records", "AM [INFO] [streamsets.sdk.sdc_api] Pipeline Filewriterpipeline53'} # data to verify the", "the same. Pipeline for the same- s3_origin >> trash s3_origin", "delete_aws_objects(client, aws, s3_key) def get_aws_origin_to_trash_pipeline(sdc_builder, attributes, aws): # Build pipeline.", "['DISCARD', 'STOP_PIPELINE', 'TO_ERROR']) @pytest.mark.skip('Not yet implemented') def test_configuration_on_record_error(sdc_builder, sdc_executor, on_record_error):", "Security Agent', 'extensions': {'cat': 'Realtime'}, 'leefVersion': 2.0, 'vendor': 'Trend Micro',", "Oct 11 14:32:52 2000] [error] [client 127.0.0.1] client ' 'denied", "+0000] \"GET ' '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300 ' 'HTTP/1.1\" 500 17 ', 'CEF':", "2.0|Trend Micro|Deep Security Agent|<DSA version>|4000030|cat=Anti-Malware ' 'name=HEU_AEGIS_CRYPT desc=HEU_AEGIS_CRYPT sev=6 cn1=241", "@pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not yet implemented') def test_configuration_proxy_user(sdc_builder, sdc_executor, use_proxy): pass", "[client 127.0.0.1] client ' 'denied by server configuration:/export/home/live/ap/htdocs/test', 'COMBINED_LOG_FORMAT': '127.0.0.1", "'APACHE_CUSTOM_LOG_FORMAT', 'REGEX', 'GROK', 'LOG4J', 'CEF', 'LEEF']) def test_configurations_data_format_log(sdc_executor, sdc_builder, aws,", "@pytest.mark.skip('Not yet implemented') def test_configuration_on_record_error(sdc_builder, sdc_executor, on_record_error): pass @pytest.mark.skip('Not yet", "'APACHE_ERROR_LOG_FORMAT': {'message': 'client denied by server configuration:/export/home/live/ap/htdocs/' 'test', 'timestamp': 'Wed", "'Trend Micro', 'version': '<DSA version>'}, 'REGEX': {'/time': '08:23:53', '/date': '2019-04-30',", "True]) @pytest.mark.skip('Not yet implemented') def test_configuration_delete_original_object(sdc_builder, sdc_executor, task, delete_original_object): pass", "implemented') def test_configuration_tags(sdc_builder, sdc_executor, task): pass @pytest.mark.parametrize('task', ['CHANGE_EXISTING_OBJECT', 'COPY_OBJECT', 'CREATE_NEW_OBJECT'])", "yet implemented') def test_configuration_new_object_path(sdc_builder, sdc_executor, task): pass @pytest.mark.skip('Not yet implemented')", "'relativetime': '200', 'thread': 'main', 'category': 'org.StreamSets.Log4j', 'ndc': 'unknown', 'message': 'This", "return s3_origin_pipeline def delete_aws_objects(client, aws, s3_key): # Clean up S3.", "unknown - This is sample log message', 'APACHE_ERROR_LOG_FORMAT': '[Wed Oct", "'%h %l %u [%t] \"%r\" %>s %b', 'regular_expression': REGULAR_EXPRESSION, 'field_path_to_regex_group_mapping':", "'main', 'category': 'org.StreamSets.Log4j', 'ndc': 'unknown', 'message': 'This is sample log", "@pytest.mark.parametrize('region', ['OTHER']) @pytest.mark.skip('Not yet implemented') def test_configuration_endpoint(sdc_builder, sdc_executor, region): pass", "test_configuration_preconditions(sdc_builder, sdc_executor): pass @pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not yet implemented') def test_configuration_proxy_host(sdc_builder,", "Delete=delete_keys) def execute_pipeline_and_get_output(sdc_executor, s3_origin, pipeline): sdc_executor.add_pipeline(pipeline) snapshot = sdc_executor.capture_snapshot(pipeline, start_pipeline=True).snapshot", "region): pass @pytest.mark.skip('Not yet implemented') def test_configuration_required_fields(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not", "# Clean up S3. delete_keys = {'Objects': [{'Key': k['Key']} for", "\"GET /apache.gif' ' HTTP/1.0\" 200 2326 \"http://www.example.com/strt.html\" \"Mozilla/4.08' ' [en]", "[en] (Win98; I ;Nav)\"', 'auth': 'frank', 'ident': '-', 'verb': 'GET',", "same. Pipeline for the same- s3_origin >> trash s3_origin >=", "up s3 bucket delete_aws_objects(client, aws, s3_key) def get_aws_origin_to_trash_pipeline(sdc_builder, attributes, aws):", "'-', 'requestTime': '09/Jan/2015:9:12:06 +0000', 'request': 'GET ' '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300 HTTP/1.1', 'logName':", "'[Wed Oct 11 14:32:52 2000] [error] [client 127.0.0.1] client '", "test_configuration_retry_count(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet implemented') def test_configuration_secret_access_key(sdc_builder, sdc_executor): pass", "on_record_error): pass @pytest.mark.skip('Not yet implemented') def test_configuration_preconditions(sdc_builder, sdc_executor): pass @pytest.mark.parametrize('use_proxy',", "'test', 'timestamp': 'Wed Oct 11 14:32:52 2000', 'loglevel': 'error', 'clientip':", "def delete_aws_objects(client, aws, s3_key): # Clean up S3. delete_keys =", "sdc_executor, task): pass @pytest.mark.parametrize('use_proxy', [False, True]) @pytest.mark.skip('Not yet implemented') def", "= data_format_content['APACHE_CUSTOM_LOG_FORMAT'] else: file_content = data_format_content[log_format] client = aws.s3 s3_key", "verify the output of amazon s3 origin. get_data_to_verify_output = {", "{'fieldPath': '/timehalf', 'group': 3}, {'fieldPath': '/info', 'group': 4}, {'fieldPath': '/file',", "region): pass @pytest.mark.parametrize('task', ['COPY_OBJECT']) @pytest.mark.skip('Not yet implemented') def test_configuration_new_object_path(sdc_builder, sdc_executor,", "'APPFW_STARTURL', 'version': 'NS10.0'}, 'GROK': {'request': '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300', 'auth': '-', 'ident': '-',", "yet implemented') def test_configuration_access_key_id(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet implemented') def", "11 14:32:52 2000', 'loglevel': 'error', 'clientip': '127.0.0.1'}, 'COMBINED_LOG_FORMAT': {'request': '/apache.gif',", "[error] [client 127.0.0.1] client ' 'denied by server configuration:/export/home/live/ap/htdocs/test', 'COMBINED_LOG_FORMAT':", "written int the file on s3 data_format_content = { 'COMMON_LOG_FORMAT':", "origin can parse different log format or not. A log", "'DEBUG', 'relativetime': '200', 'thread': 'main', 'category': 'org.StreamSets.Log4j', 'ndc': 'unknown', 'message':", "2000] [error] [client 127.0.0.1] client ' 'denied by server configuration:/export/home/live/ap/htdocs/test',", "'CEF': {'severity': '6', 'product': 'NetScaler', 'extensions': {'msg': 'Disallow Illegal URL.',", "(Win98; I ;Nav)\"', 'auth': 'frank', 'ident': '-', 'verb': 'GET', 'referrer':", "@pytest.mark.skip('Not yet implemented') def test_configuration_preconditions(sdc_builder, sdc_executor): pass @pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not", "r'(\\S+) (\\S+) (\\S+) (\\S+) (\\S+) (.*)' # log to be", "'APACHE_ERROR_LOG_FORMAT', 'COMBINED_LOG_FORMAT', 'APACHE_CUSTOM_LOG_FORMAT', 'REGEX', 'GROK', 'LOG4J', 'CEF', 'LEEF']) def test_configurations_data_format_log(sdc_executor,", "'COMBINED_LOG_FORMAT', 'APACHE_CUSTOM_LOG_FORMAT', 'REGEX', 'GROK', 'LOG4J', 'CEF', 'LEEF']) def test_configurations_data_format_log(sdc_executor, sdc_builder,", "= builder.add_stage('Trash') pipeline_finisher_executor = builder.add_stage('Pipeline Finisher Executor') pipeline_finisher_executor.set_attributes(stage_record_preconditions=[\"${record:eventType() == 'no-more-data'}\"])", "'Disallow Illegal URL.', 'request': 'http://vpx247.example.net/FFC/login.html', 'method': 'GET', 'src': '10.217.253.78', 'spt':", "@pytest.mark.parametrize('task', ['COPY_OBJECT']) @pytest.mark.parametrize('delete_original_object', [False, True]) @pytest.mark.skip('Not yet implemented') def test_configuration_delete_original_object(sdc_builder,", "'loglevel': 'error', 'clientip': '127.0.0.1'}, 'COMBINED_LOG_FORMAT': {'request': '/apache.gif', 'agent': '\"Mozilla/4.08 [en]", "implemented') def test_configuration_use_proxy(sdc_builder, sdc_executor, use_proxy): pass @aws('s3') @pytest.mark.parametrize('data_format', ['LOG']) @pytest.mark.parametrize('log_format',", "'127.0.0.1'}, 'COMBINED_LOG_FORMAT': {'request': '/apache.gif', 'agent': '\"Mozilla/4.08 [en] (Win98; I ;Nav)\"',", "builder.add_stage('Pipeline Finisher Executor') pipeline_finisher_executor.set_attributes(stage_record_preconditions=[\"${record:eventType() == 'no-more-data'}\"]) s3_origin >> trash s3_origin", "(\\S+) (\\S+) (.*)' # log to be written int the", "aws) s3_origin = pipeline.origin_stage try: client.put_object(Bucket=aws.s3_bucket_name, Key=f'{s3_key}/{get_random_string()}.log', Body=file_content) output_records =", "by server configuration:/export/home/live/ap/htdocs/test', 'COMBINED_LOG_FORMAT': '127.0.0.1 - frank [10/Oct/2000:13:55:36 -0700] \"GET", "'rawrequest': None, 'timestamp': '09/Jan/2015:9:12:06 +0000'}, 'LEEF': {'eventId': '4000030', 'product': 'Deep", "'10.185.248.71', 'bytesSent': '17', 'status': '500'}, 'CEF': {'severity': '6', 'product': 'NetScaler',", "@pytest.mark.skip('Not yet implemented') def test_configuration_new_object_path(sdc_builder, sdc_executor, task): pass @pytest.mark.skip('Not yet", "2326 \"http://www.example.com/strt.html\" \"Mozilla/4.08' ' [en] (Win98; I ;Nav)\"', 'APACHE_CUSTOM_LOG_FORMAT': '10.185.248.71", "DEBUG org.StreamSets.Log4j unknown - This is sample log message', 'APACHE_ERROR_LOG_FORMAT':", "'vendor': 'Citrix', 'cefVersion': 0, 'name': 'APPFW_STARTURL', 'version': 'NS10.0'}, 'GROK': {'request':", "@pytest.mark.parametrize('data_format', ['LOG']) @pytest.mark.parametrize('log_format', ['COMMON_LOG_FORMAT', 'APACHE_ERROR_LOG_FORMAT', 'COMBINED_LOG_FORMAT', 'APACHE_CUSTOM_LOG_FORMAT', 'REGEX', 'GROK', 'LOG4J',", "'group': 1}, {'fieldPath': '/time', 'group': 2}, {'fieldPath': '/timehalf', 'group': 3},", "s3_origin = pipeline.origin_stage try: client.put_object(Bucket=aws.s3_bucket_name, Key=f'{s3_key}/{get_random_string()}.log', Body=file_content) output_records = execute_pipeline_and_get_output(sdc_executor,", "client.put_object(Bucket=aws.s3_bucket_name, Key=f'{s3_key}/{get_random_string()}.log', Body=file_content) output_records = execute_pipeline_and_get_output(sdc_executor, s3_origin, pipeline) assert output_records[0].field", "'clientip': '127.0.0.1', 'httpversion': '1.0', 'rawrequest': None, 'timestamp': '10/Oct/2000:13:55:36 -0700'}, 'APACHE_CUSTOM_LOG_FORMAT':", "{'eventId': '4000030', 'product': 'Deep Security Agent', 'extensions': {'cat': 'Realtime'}, 'leefVersion':", "'LOG4J': {'severity': 'DEBUG', 'relativetime': '200', 'thread': 'main', 'category': 'org.StreamSets.Log4j', 'ndc':", "REGULAR_EXPRESSION, 'field_path_to_regex_group_mapping': LOG_FIELD_MAPPING } pipeline = get_aws_origin_to_trash_pipeline(sdc_builder, attributes, aws) s3_origin", "is sample log message'}, 'COMMON_LOG_FORMAT': {'request': '/apache.gif', 'auth': 'frank', 'ident':", "from streamsets.testframework.utils import get_random_string logger = logging.getLogger(__name__) S3_SANDBOX_PREFIX = 'sandbox'", "implemented') def test_configuration_new_object_path(sdc_builder, sdc_executor, task): pass @pytest.mark.skip('Not yet implemented') def", "pass @aws('s3') @pytest.mark.parametrize('data_format', ['LOG']) @pytest.mark.parametrize('log_format', ['COMMON_LOG_FORMAT', 'APACHE_ERROR_LOG_FORMAT', 'COMBINED_LOG_FORMAT', 'APACHE_CUSTOM_LOG_FORMAT', 'REGEX',", "2}, {'fieldPath': '/timehalf', 'group': 3}, {'fieldPath': '/info', 'group': 4}, {'fieldPath':", "client = aws.s3 s3_key = f'{S3_SANDBOX_PREFIX}/{get_random_string()}' attributes = {'bucket': aws.s3_bucket_name,", "yet implemented') def test_configuration_delete_original_object(sdc_builder, sdc_executor, task, delete_original_object): pass @pytest.mark.parametrize('region', ['OTHER'])", "pipeline_finisher_executor = builder.add_stage('Pipeline Finisher Executor') pipeline_finisher_executor.set_attributes(stage_record_preconditions=[\"${record:eventType() == 'no-more-data'}\"]) s3_origin >>", "HTTP/1.1', 'logName': '-', 'remoteHost': '10.185.248.71', 'bytesSent': '17', 'status': '500'}, 'CEF':", "' HTTP/1.0\" 200 2326 \"http://www.example.com/strt.html\" \"Mozilla/4.08' ' [en] (Win98; I", "/apache.gif HTTP/1.0\" 200 232', 'LOG4J': '200 [main] DEBUG org.StreamSets.Log4j unknown", "'GET ' '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300 HTTP/1.1', 'logName': '-', 'remoteHost': '10.185.248.71', 'bytesSent': '17',", "'bytes': '17', 'clientip': '10.185.248.71', 'verb': 'GET', 'httpversion': '1.1', 'rawrequest': None,", "implemented') def test_configuration_required_fields(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet implemented') def test_configuration_retry_count(sdc_builder,", "'name': 'APPFW_STARTURL', 'version': 'NS10.0'}, 'GROK': {'request': '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300', 'auth': '-', 'ident':", "be written int the file on s3 data_format_content = {", "def test_configuration_use_proxy(sdc_builder, sdc_executor, use_proxy): pass @aws('s3') @pytest.mark.parametrize('data_format', ['LOG']) @pytest.mark.parametrize('log_format', ['COMMON_LOG_FORMAT',", "import pytest from streamsets.testframework.markers import aws, sdc_min_version from streamsets.testframework.utils import", "['COMMON_LOG_FORMAT', 'APACHE_ERROR_LOG_FORMAT', 'COMBINED_LOG_FORMAT', 'APACHE_CUSTOM_LOG_FORMAT', 'REGEX', 'GROK', 'LOG4J', 'CEF', 'LEEF']) def", "delete_keys = {'Objects': [{'Key': k['Key']} for k in client.list_objects_v2(Bucket=aws.s3_bucket_name, Prefix=s3_key)['Contents']]}", "pass @pytest.mark.skip('Not yet implemented') def test_configuration_preconditions(sdc_builder, sdc_executor): pass @pytest.mark.parametrize('use_proxy', [True])", "s3_origin = builder.add_stage('Amazon S3', type='origin') s3_origin.set_attributes(**attributes) trash = builder.add_stage('Trash') pipeline_finisher_executor", "type='origin') s3_origin.set_attributes(**attributes) trash = builder.add_stage('Trash') pipeline_finisher_executor = builder.add_stage('Pipeline Finisher Executor')", "' [en] (Win98; I ;Nav)\"', 'APACHE_CUSTOM_LOG_FORMAT': '10.185.248.71 - - [09/Jan/2015:9:12:06", "on s3 data_format_content = { 'COMMON_LOG_FORMAT': '127.0.0.1 - frank [10/Oct/2000:13:55:36", "'US_EAST_2', 'US_GOV_WEST_1', 'US_WEST_1', 'US_WEST_2']) @pytest.mark.skip('Not yet implemented') def test_configuration_region(sdc_builder, sdc_executor,", "'group': 4}, {'fieldPath': '/file', 'group': 5}, {'fieldPath': '/message', 'group': 6}]", "'/date': '2019-04-30', '/timehalf': 'AM', '/info': '[INFO]', '/message': 'Pipeline Filewriterpipeline53', '/file':", "'2019-04-30 08:23:53 AM [INFO] [streamsets.sdk.sdc_api] Pipeline Filewriterpipeline53'} # data to", ";Nav)\"', 'APACHE_CUSTOM_LOG_FORMAT': '10.185.248.71 - - [09/Jan/2015:9:12:06 +0000] \"GET ' '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300", "232', 'LOG4J': '200 [main] DEBUG org.StreamSets.Log4j unknown - This is", "data_format_content[log_format] client = aws.s3 s3_key = f'{S3_SANDBOX_PREFIX}/{get_random_string()}' attributes = {'bucket':", "['CREATE_NEW_OBJECT']) @pytest.mark.skip('Not yet implemented') def test_configuration_content(sdc_builder, sdc_executor, task): pass @pytest.mark.parametrize('task',", "version>|4000030|cat=Anti-Malware ' 'name=HEU_AEGIS_CRYPT desc=HEU_AEGIS_CRYPT sev=6 cn1=241 msg=Realtime', 'REGEX': '2019-04-30 08:23:53", "aws.s3 s3_key = f'{S3_SANDBOX_PREFIX}/{get_random_string()}' attributes = {'bucket': aws.s3_bucket_name, 'prefix_pattern': f'{s3_key}/*',", "'REGEX', 'GROK', 'LOG4J', 'CEF', 'LEEF']) def test_configurations_data_format_log(sdc_executor, sdc_builder, aws, data_format,", "s3_origin >= pipeline_finisher_executor \"\"\" if log_format == 'GROK': file_content =", "'/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300 HTTP/1.1', 'logName': '-', 'remoteHost': '10.185.248.71', 'bytesSent': '17', 'status': '500'},", "execute_pipeline_and_get_output(sdc_executor, s3_origin, pipeline) assert output_records[0].field == get_data_to_verify_output[log_format] finally: if sdc_executor.get_pipeline_status(pipeline).response.json().get('status')", "1, 'read_order': 'LEXICOGRAPHICAL', 'data_format': data_format, 'log_format': log_format, 'custom_log_format': '%h %l", "= builder.add_stage('Amazon S3', type='origin') s3_origin.set_attributes(**attributes) trash = builder.add_stage('Trash') pipeline_finisher_executor =", "= {'Objects': [{'Key': k['Key']} for k in client.list_objects_v2(Bucket=aws.s3_bucket_name, Prefix=s3_key)['Contents']]} client.delete_objects(Bucket=aws.s3_bucket_name,", "logging import pytest from streamsets.testframework.markers import aws, sdc_min_version from streamsets.testframework.utils", "{ 'LOG4J': {'severity': 'DEBUG', 'relativetime': '200', 'thread': 'main', 'category': 'org.StreamSets.Log4j',", "'group': 5}, {'fieldPath': '/message', 'group': 6}] REGULAR_EXPRESSION = r'(\\S+) (\\S+)", "def test_configuration_object(sdc_builder, sdc_executor): pass @pytest.mark.parametrize('on_record_error', ['DISCARD', 'STOP_PIPELINE', 'TO_ERROR']) @pytest.mark.skip('Not yet", "'GROK': file_content = data_format_content['APACHE_CUSTOM_LOG_FORMAT'] else: file_content = data_format_content[log_format] client =", "'EU_WEST_1', 'EU_WEST_2', 'EU_WEST_3', 'OTHER', 'SA_EAST_1', 'US_EAST_1', 'US_EAST_2', 'US_GOV_WEST_1', 'US_WEST_1', 'US_WEST_2'])", "else: file_content = data_format_content[log_format] client = aws.s3 s3_key = f'{S3_SANDBOX_PREFIX}/{get_random_string()}'", "'AP_SOUTHEAST_1', 'AP_SOUTHEAST_2', 'AP_SOUTH_1', 'CA_CENTRAL_1', 'CN_NORTHWEST_1', 'CN_NORTH_1', 'EU_CENTRAL_1', 'EU_WEST_1', 'EU_WEST_2', 'EU_WEST_3',", "'1.0', 'rawrequest': None, 'timestamp': '10/Oct/2000:13:55:36 -0700'}, 'APACHE_ERROR_LOG_FORMAT': {'message': 'client denied", "[False, True]) @pytest.mark.skip('Not yet implemented') def test_configuration_delete_original_object(sdc_builder, sdc_executor, task, delete_original_object):", "-0700] \"GET /apache.gif' ' HTTP/1.0\" 200 2326 \"http://www.example.com/strt.html\" \"Mozilla/4.08' '", "'LEEF': {'eventId': '4000030', 'product': 'Deep Security Agent', 'extensions': {'cat': 'Realtime'},", "'/message': 'Pipeline Filewriterpipeline53', '/file': '[streamsets.sdk.sdc_api]'}} @pytest.mark.skip('Not yet implemented') def test_configuration_access_key_id(sdc_builder,", "'2019-04-30', '/timehalf': 'AM', '/info': '[INFO]', '/message': 'Pipeline Filewriterpipeline53', '/file': '[streamsets.sdk.sdc_api]'}}", "pipeline = get_aws_origin_to_trash_pipeline(sdc_builder, attributes, aws) s3_origin = pipeline.origin_stage try: client.put_object(Bucket=aws.s3_bucket_name,", "implemented') def test_configuration_content(sdc_builder, sdc_executor, task): pass @pytest.mark.parametrize('task', ['COPY_OBJECT']) @pytest.mark.parametrize('delete_original_object', [False,", "'CA_CENTRAL_1', 'CN_NORTHWEST_1', 'CN_NORTH_1', 'EU_CENTRAL_1', 'EU_WEST_1', 'EU_WEST_2', 'EU_WEST_3', 'OTHER', 'SA_EAST_1', 'US_EAST_1',", "= { 'COMMON_LOG_FORMAT': '127.0.0.1 - frank [10/Oct/2000:13:55:36 -0700] ' '\"GET", "file_content = data_format_content['APACHE_CUSTOM_LOG_FORMAT'] else: file_content = data_format_content[log_format] client = aws.s3", "of amazon s3 origin. get_data_to_verify_output = { 'LOG4J': {'severity': 'DEBUG',", "@pytest.mark.skip('Not yet implemented') def test_configuration_proxy_host(sdc_builder, sdc_executor, use_proxy): pass @pytest.mark.parametrize('use_proxy', [True])", "\"GET ' '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300 ' 'HTTP/1.1\" 500 17 ', 'CEF': '10.217.31.247", "implemented') def test_configuration_bucket(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet implemented') def test_configuration_connection_timeout(sdc_builder,", "'verb': 'GET', 'httpversion': '1.1', 'rawrequest': None, 'timestamp': '09/Jan/2015:9:12:06 +0000'}, 'LEEF':", "def test_configuration_on_record_error(sdc_builder, sdc_executor, on_record_error): pass @pytest.mark.skip('Not yet implemented') def test_configuration_preconditions(sdc_builder,", "'US_EAST_1', 'US_EAST_2', 'US_GOV_WEST_1', 'US_WEST_1', 'US_WEST_2']) @pytest.mark.skip('Not yet implemented') def test_configuration_region(sdc_builder,", "whether S3 origin can parse different log format or not.", "= data_format_content[log_format] client = aws.s3 s3_key = f'{S3_SANDBOX_PREFIX}/{get_random_string()}' attributes =", "Clean up S3. delete_keys = {'Objects': [{'Key': k['Key']} for k", "'COMBINED_LOG_FORMAT': '127.0.0.1 - frank [10/Oct/2000:13:55:36 -0700] \"GET /apache.gif' ' HTTP/1.0\"", "def test_configuration_bucket(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet implemented') def test_configuration_connection_timeout(sdc_builder, sdc_executor):", "def test_configuration_retry_count(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet implemented') def test_configuration_secret_access_key(sdc_builder, sdc_executor):", "14:32:52 2000] [error] [client 127.0.0.1] client ' 'denied by server", "URL.', 'LEEF': 'LEEF: 2.0|Trend Micro|Deep Security Agent|<DSA version>|4000030|cat=Anti-Malware ' 'name=HEU_AEGIS_CRYPT", "'\"Mozilla/4.08 [en] (Win98; I ;Nav)\"', 'auth': 'frank', 'ident': '-', 'verb':", "'CN_NORTHWEST_1', 'CN_NORTH_1', 'EU_CENTRAL_1', 'EU_WEST_1', 'EU_WEST_2', 'EU_WEST_3', 'OTHER', 'SA_EAST_1', 'US_EAST_1', 'US_EAST_2',", "yet implemented') def test_configuration_proxy_port(sdc_builder, sdc_executor, use_proxy): pass @pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not", "A log file is being created in s3 bucket mentioned", "'LEXICOGRAPHICAL', 'data_format': data_format, 'log_format': log_format, 'custom_log_format': '%h %l %u [%t]", "trash = builder.add_stage('Trash') pipeline_finisher_executor = builder.add_stage('Pipeline Finisher Executor') pipeline_finisher_executor.set_attributes(stage_record_preconditions=[\"${record:eventType() ==", "False return s3_origin_pipeline def delete_aws_objects(client, aws, s3_key): # Clean up", "- - [09/Jan/2015:9:12:06 +0000] \"GET ' '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300 ' 'HTTP/1.1\" 500", "sev=6 cn1=241 msg=Realtime', 'REGEX': '2019-04-30 08:23:53 AM [INFO] [streamsets.sdk.sdc_api] Pipeline", "{'remoteUser': '-', 'requestTime': '09/Jan/2015:9:12:06 +0000', 'request': 'GET ' '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300 HTTP/1.1',", "builder = sdc_builder.get_pipeline_builder() builder.add_error_stage('Discard') s3_origin = builder.add_stage('Amazon S3', type='origin') s3_origin.set_attributes(**attributes)", "None, 'timestamp': '10/Oct/2000:13:55:36 -0700'}, 'APACHE_ERROR_LOG_FORMAT': {'message': 'client denied by server", "sdc_executor, use_proxy): pass @pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not yet implemented') def test_configuration_proxy_user(sdc_builder,", "being created in s3 bucket mentioned below .S3 origin reads", "Build pipeline. builder = sdc_builder.get_pipeline_builder() builder.add_error_stage('Discard') s3_origin = builder.add_stage('Amazon S3',", "= 'sandbox' LOG_FIELD_MAPPING = [{'fieldPath': '/date', 'group': 1}, {'fieldPath': '/time',", "@pytest.mark.skip('Not yet implemented') def test_configuration_proxy_password(sdc_builder, sdc_executor, use_proxy): pass @pytest.mark.parametrize('use_proxy', [True])", "is being created in s3 bucket mentioned below .S3 origin", "@pytest.mark.skip('Not yet implemented') def test_configuration_region(sdc_builder, sdc_executor, region): pass @pytest.mark.skip('Not yet", "bucket mentioned below .S3 origin reads the log file and", "def test_configuration_endpoint(sdc_builder, sdc_executor, region): pass @pytest.mark.parametrize('task', ['COPY_OBJECT']) @pytest.mark.skip('Not yet implemented')", "sdc_executor): pass @pytest.mark.skip('Not yet implemented') def test_configuration_secret_access_key(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not", "200 232', 'LOG4J': '200 [main] DEBUG org.StreamSets.Log4j unknown - This", "} pipeline = get_aws_origin_to_trash_pipeline(sdc_builder, attributes, aws) s3_origin = pipeline.origin_stage try:", "['COPY_OBJECT']) @pytest.mark.parametrize('delete_original_object', [False, True]) @pytest.mark.skip('Not yet implemented') def test_configuration_delete_original_object(sdc_builder, sdc_executor,", "'bytes': '2326', 'clientip': '127.0.0.1', 'httpversion': '1.0', 'rawrequest': None, 'timestamp': '10/Oct/2000:13:55:36", "s3 bucket delete_aws_objects(client, aws, s3_key) def get_aws_origin_to_trash_pipeline(sdc_builder, attributes, aws): #", "@pytest.mark.parametrize('use_proxy', [False, True]) @pytest.mark.skip('Not yet implemented') def test_configuration_use_proxy(sdc_builder, sdc_executor, use_proxy):", "in s3 bucket mentioned below .S3 origin reads the log", "client ' 'denied by server configuration:/export/home/live/ap/htdocs/test', 'COMBINED_LOG_FORMAT': '127.0.0.1 - frank", "'/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300 ' 'HTTP/1.1\" 500 17 ', 'CEF': '10.217.31.247 CEF:0|Citrix|NetScaler|NS10.0|APPFW|APPFW_STARTURL|6|src=10.217.253.78 '", "'httpversion': '1.0', 'rawrequest': None, 'timestamp': '10/Oct/2000:13:55:36 -0700'}, 'APACHE_CUSTOM_LOG_FORMAT': {'remoteUser': '-',", "s3_origin, pipeline) assert output_records[0].field == get_data_to_verify_output[log_format] finally: if sdc_executor.get_pipeline_status(pipeline).response.json().get('status') ==", "streamsets.testframework.markers import aws, sdc_min_version from streamsets.testframework.utils import get_random_string logger =", "'remoteHost': '10.185.248.71', 'bytesSent': '17', 'status': '500'}, 'CEF': {'severity': '6', 'product':", "message', 'APACHE_ERROR_LOG_FORMAT': '[Wed Oct 11 14:32:52 2000] [error] [client 127.0.0.1]", "log_format): \"\"\"Check whether S3 origin can parse different log format", "' '\"GET /apache.gif HTTP/1.0\" 200 232', 'LOG4J': '200 [main] DEBUG", "server configuration:/export/home/live/ap/htdocs/' 'test', 'timestamp': 'Wed Oct 11 14:32:52 2000', 'loglevel':", "0, 'name': 'APPFW_STARTURL', 'version': 'NS10.0'}, 'GROK': {'request': '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300', 'auth': '-',", "'signature': 'APPFW', 'vendor': 'Citrix', 'cefVersion': 0, 'name': 'APPFW_STARTURL', 'version': 'NS10.0'},", "try: client.put_object(Bucket=aws.s3_bucket_name, Key=f'{s3_key}/{get_random_string()}.log', Body=file_content) output_records = execute_pipeline_and_get_output(sdc_executor, s3_origin, pipeline) assert", "[True]) @pytest.mark.skip('Not yet implemented') def test_configuration_proxy_password(sdc_builder, sdc_executor, use_proxy): pass @pytest.mark.parametrize('use_proxy',", "sdc_executor, use_proxy): pass @pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not yet implemented') def test_configuration_proxy_password(sdc_builder,", "import get_random_string logger = logging.getLogger(__name__) S3_SANDBOX_PREFIX = 'sandbox' LOG_FIELD_MAPPING =", "test_configuration_on_record_error(sdc_builder, sdc_executor, on_record_error): pass @pytest.mark.skip('Not yet implemented') def test_configuration_preconditions(sdc_builder, sdc_executor):", "'httpversion': '1.1', 'rawrequest': None, 'timestamp': '09/Jan/2015:9:12:06 +0000'}, 'LEEF': {'eventId': '4000030',", "@pytest.mark.parametrize('log_format', ['COMMON_LOG_FORMAT', 'APACHE_ERROR_LOG_FORMAT', 'COMBINED_LOG_FORMAT', 'APACHE_CUSTOM_LOG_FORMAT', 'REGEX', 'GROK', 'LOG4J', 'CEF', 'LEEF'])", "== 'GROK': file_content = data_format_content['APACHE_CUSTOM_LOG_FORMAT'] else: file_content = data_format_content[log_format] client", "'response': '500', 'bytes': '17', 'clientip': '10.185.248.71', 'verb': 'GET', 'httpversion': '1.1',", "attributes, aws): # Build pipeline. builder = sdc_builder.get_pipeline_builder() builder.add_error_stage('Discard') s3_origin", "def test_configuration_delete_original_object(sdc_builder, sdc_executor, task, delete_original_object): pass @pytest.mark.parametrize('region', ['OTHER']) @pytest.mark.skip('Not yet", "LOG_FIELD_MAPPING = [{'fieldPath': '/date', 'group': 1}, {'fieldPath': '/time', 'group': 2},", "@pytest.mark.parametrize('on_record_error', ['DISCARD', 'STOP_PIPELINE', 'TO_ERROR']) @pytest.mark.skip('Not yet implemented') def test_configuration_on_record_error(sdc_builder, sdc_executor,", "- This is sample log message', 'APACHE_ERROR_LOG_FORMAT': '[Wed Oct 11", "k in client.list_objects_v2(Bucket=aws.s3_bucket_name, Prefix=s3_key)['Contents']]} client.delete_objects(Bucket=aws.s3_bucket_name, Delete=delete_keys) def execute_pipeline_and_get_output(sdc_executor, s3_origin, pipeline):", "{'fieldPath': '/message', 'group': 6}] REGULAR_EXPRESSION = r'(\\S+) (\\S+) (\\S+) (\\S+)", "'/file', 'group': 5}, {'fieldPath': '/message', 'group': 6}] REGULAR_EXPRESSION = r'(\\S+)", "= {'bucket': aws.s3_bucket_name, 'prefix_pattern': f'{s3_key}/*', 'number_of_threads': 1, 'read_order': 'LEXICOGRAPHICAL', 'data_format':", "{ 'COMMON_LOG_FORMAT': '127.0.0.1 - frank [10/Oct/2000:13:55:36 -0700] ' '\"GET /apache.gif", "-0700] ' '\"GET /apache.gif HTTP/1.0\" 200 232', 'LOG4J': '200 [main]", "implemented') def test_configuration_connection_timeout(sdc_builder, sdc_executor): pass @pytest.mark.parametrize('task', ['CREATE_NEW_OBJECT']) @pytest.mark.skip('Not yet implemented')", "@pytest.mark.skip('Not yet implemented') def test_configuration_bucket(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet implemented')", "trash s3_origin >= pipeline_finisher_executor s3_origin_pipeline = builder.build().configure_for_environment(aws) s3_origin_pipeline.configuration['shouldRetry'] = False", "@pytest.mark.parametrize('region', ['AP_NORTHEAST_1', 'AP_NORTHEAST_2', 'AP_NORTHEAST_3', 'AP_SOUTHEAST_1', 'AP_SOUTHEAST_2', 'AP_SOUTH_1', 'CA_CENTRAL_1', 'CN_NORTHWEST_1', 'CN_NORTH_1',", "= pipeline.origin_stage try: client.put_object(Bucket=aws.s3_bucket_name, Key=f'{s3_key}/{get_random_string()}.log', Body=file_content) output_records = execute_pipeline_and_get_output(sdc_executor, s3_origin,", "def test_configuration_proxy_password(sdc_builder, sdc_executor, use_proxy): pass @pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not yet implemented')", "'Citrix', 'cefVersion': 0, 'name': 'APPFW_STARTURL', 'version': 'NS10.0'}, 'GROK': {'request': '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300',", "' 'HTTP/1.1\" 500 17 ', 'CEF': '10.217.31.247 CEF:0|Citrix|NetScaler|NS10.0|APPFW|APPFW_STARTURL|6|src=10.217.253.78 ' 'spt=53743", "= builder.build().configure_for_environment(aws) s3_origin_pipeline.configuration['shouldRetry'] = False return s3_origin_pipeline def delete_aws_objects(client, aws,", ">= pipeline_finisher_executor s3_origin_pipeline = builder.build().configure_for_environment(aws) s3_origin_pipeline.configuration['shouldRetry'] = False return s3_origin_pipeline", "'17', 'clientip': '10.185.248.71', 'verb': 'GET', 'httpversion': '1.1', 'rawrequest': None, 'timestamp':", "(\\S+) (.*)' # log to be written int the file", "Illegal URL.', 'LEEF': 'LEEF: 2.0|Trend Micro|Deep Security Agent|<DSA version>|4000030|cat=Anti-Malware '", "[INFO] [streamsets.sdk.sdc_api] Pipeline Filewriterpipeline53'} # data to verify the output", "if sdc_executor.get_pipeline_status(pipeline).response.json().get('status') == 'RUNNING': sdc_executor.stop_pipeline(pipeline) # cleaning up s3 bucket", "'name=HEU_AEGIS_CRYPT desc=HEU_AEGIS_CRYPT sev=6 cn1=241 msg=Realtime', 'REGEX': '2019-04-30 08:23:53 AM [INFO]", "use_proxy): pass @aws('s3') @pytest.mark.parametrize('data_format', ['LOG']) @pytest.mark.parametrize('log_format', ['COMMON_LOG_FORMAT', 'APACHE_ERROR_LOG_FORMAT', 'COMBINED_LOG_FORMAT', 'APACHE_CUSTOM_LOG_FORMAT',", "'auth': 'frank', 'ident': '-', 'verb': 'GET', 'referrer': '\"http://www.example.com/strt.' 'html\"', 'response':", "S3 origin can parse different log format or not. A", "def test_configuration_required_fields(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet implemented') def test_configuration_retry_count(sdc_builder, sdc_executor):", "'US_GOV_WEST_1', 'US_WEST_1', 'US_WEST_2']) @pytest.mark.skip('Not yet implemented') def test_configuration_region(sdc_builder, sdc_executor, region):", "= logging.getLogger(__name__) S3_SANDBOX_PREFIX = 'sandbox' LOG_FIELD_MAPPING = [{'fieldPath': '/date', 'group':", "'sandbox' LOG_FIELD_MAPPING = [{'fieldPath': '/date', 'group': 1}, {'fieldPath': '/time', 'group':", "= { 'LOG4J': {'severity': 'DEBUG', 'relativetime': '200', 'thread': 'main', 'category':", "test_configuration_tags(sdc_builder, sdc_executor, task): pass @pytest.mark.parametrize('task', ['CHANGE_EXISTING_OBJECT', 'COPY_OBJECT', 'CREATE_NEW_OBJECT']) @pytest.mark.skip('Not yet", ".S3 origin reads the log file and parse the same.", "'read_order': 'LEXICOGRAPHICAL', 'data_format': data_format, 'log_format': log_format, 'custom_log_format': '%h %l %u", "'extensions': {'cat': 'Realtime'}, 'leefVersion': 2.0, 'vendor': 'Trend Micro', 'version': '<DSA", "reads the log file and parse the same. Pipeline for", "@pytest.mark.skip('Not yet implemented') def test_configuration_retry_count(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet implemented')", "Micro', 'version': '<DSA version>'}, 'REGEX': {'/time': '08:23:53', '/date': '2019-04-30', '/timehalf':", "@pytest.mark.skip('Not yet implemented') def test_configuration_content(sdc_builder, sdc_executor, task): pass @pytest.mark.parametrize('task', ['COPY_OBJECT'])", "builder.add_stage('Amazon S3', type='origin') s3_origin.set_attributes(**attributes) trash = builder.add_stage('Trash') pipeline_finisher_executor = builder.add_stage('Pipeline", "sdc_executor, task): pass @pytest.mark.parametrize('task', ['CHANGE_EXISTING_OBJECT', 'COPY_OBJECT', 'CREATE_NEW_OBJECT']) @pytest.mark.skip('Not yet implemented')", "data_format_content = { 'COMMON_LOG_FORMAT': '127.0.0.1 - frank [10/Oct/2000:13:55:36 -0700] '", "I ;Nav)\"', 'APACHE_CUSTOM_LOG_FORMAT': '10.185.248.71 - - [09/Jan/2015:9:12:06 +0000] \"GET '", "denied by server configuration:/export/home/live/ap/htdocs/' 'test', 'timestamp': 'Wed Oct 11 14:32:52", "'HTTP/1.1\" 500 17 ', 'CEF': '10.217.31.247 CEF:0|Citrix|NetScaler|NS10.0|APPFW|APPFW_STARTURL|6|src=10.217.253.78 ' 'spt=53743 method=GET", "configuration:/export/home/live/ap/htdocs/' 'test', 'timestamp': 'Wed Oct 11 14:32:52 2000', 'loglevel': 'error',", "yet implemented') def test_configuration_bucket(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet implemented') def", "pipeline_finisher_executor \"\"\" if log_format == 'GROK': file_content = data_format_content['APACHE_CUSTOM_LOG_FORMAT'] else:", "file and parse the same. Pipeline for the same- s3_origin", "'referrer': '\"http://www.example.com/strt.' 'html\"', 'response': '200', 'bytes': '2326', 'clientip': '127.0.0.1', 'httpversion':", "'request': 'http://vpx247.example.net/FFC/login.html', 'method': 'GET', 'src': '10.217.253.78', 'spt': '53743'}, 'signature': 'APPFW',", "[True]) @pytest.mark.skip('Not yet implemented') def test_configuration_proxy_port(sdc_builder, sdc_executor, use_proxy): pass @pytest.mark.parametrize('use_proxy',", "parse the same. Pipeline for the same- s3_origin >> trash", "test_configuration_object(sdc_builder, sdc_executor): pass @pytest.mark.parametrize('on_record_error', ['DISCARD', 'STOP_PIPELINE', 'TO_ERROR']) @pytest.mark.skip('Not yet implemented')", "'APACHE_CUSTOM_LOG_FORMAT': {'remoteUser': '-', 'requestTime': '09/Jan/2015:9:12:06 +0000', 'request': 'GET ' '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300", "s3_origin >> trash s3_origin >= pipeline_finisher_executor \"\"\" if log_format ==", "'cefVersion': 0, 'name': 'APPFW_STARTURL', 'version': 'NS10.0'}, 'GROK': {'request': '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300', 'auth':", "data_format_content['APACHE_CUSTOM_LOG_FORMAT'] else: file_content = data_format_content[log_format] client = aws.s3 s3_key =", "pass @pytest.mark.skip('Not yet implemented') def test_configuration_required_fields(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet", "3}, {'fieldPath': '/info', 'group': 4}, {'fieldPath': '/file', 'group': 5}, {'fieldPath':", "data_format, 'log_format': log_format, 'custom_log_format': '%h %l %u [%t] \"%r\" %>s", "parse different log format or not. A log file is", "'LEEF': 'LEEF: 2.0|Trend Micro|Deep Security Agent|<DSA version>|4000030|cat=Anti-Malware ' 'name=HEU_AEGIS_CRYPT desc=HEU_AEGIS_CRYPT", "'1.0', 'rawrequest': None, 'timestamp': '10/Oct/2000:13:55:36 -0700'}, 'APACHE_CUSTOM_LOG_FORMAT': {'remoteUser': '-', 'requestTime':", "'clientip': '127.0.0.1'}, 'COMBINED_LOG_FORMAT': {'request': '/apache.gif', 'agent': '\"Mozilla/4.08 [en] (Win98; I", "@pytest.mark.skip('Not yet implemented') def test_configuration_task(sdc_builder, sdc_executor, task): pass @pytest.mark.parametrize('use_proxy', [False,", "s3 origin. get_data_to_verify_output = { 'LOG4J': {'severity': 'DEBUG', 'relativetime': '200',", "from streamsets.testframework.markers import aws, sdc_min_version from streamsets.testframework.utils import get_random_string logger", "file on s3 data_format_content = { 'COMMON_LOG_FORMAT': '127.0.0.1 - frank", "'RUNNING': sdc_executor.stop_pipeline(pipeline) # cleaning up s3 bucket delete_aws_objects(client, aws, s3_key)", "@pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not yet implemented') def test_configuration_proxy_password(sdc_builder, sdc_executor, use_proxy): pass", "'number_of_threads': 1, 'read_order': 'LEXICOGRAPHICAL', 'data_format': data_format, 'log_format': log_format, 'custom_log_format': '%h", "sdc_min_version from streamsets.testframework.utils import get_random_string logger = logging.getLogger(__name__) S3_SANDBOX_PREFIX =", "(\\S+) (\\S+) (\\S+) (\\S+) (.*)' # log to be written", "task, delete_original_object): pass @pytest.mark.parametrize('region', ['OTHER']) @pytest.mark.skip('Not yet implemented') def test_configuration_endpoint(sdc_builder,", "test_configuration_delete_original_object(sdc_builder, sdc_executor, task, delete_original_object): pass @pytest.mark.parametrize('region', ['OTHER']) @pytest.mark.skip('Not yet implemented')", "aws.s3_bucket_name, 'prefix_pattern': f'{s3_key}/*', 'number_of_threads': 1, 'read_order': 'LEXICOGRAPHICAL', 'data_format': data_format, 'log_format':", "# data to verify the output of amazon s3 origin.", "sdc_executor, on_record_error): pass @pytest.mark.skip('Not yet implemented') def test_configuration_preconditions(sdc_builder, sdc_executor): pass", "# Build pipeline. builder = sdc_builder.get_pipeline_builder() builder.add_error_stage('Discard') s3_origin = builder.add_stage('Amazon", "and parse the same. Pipeline for the same- s3_origin >>", "in client.list_objects_v2(Bucket=aws.s3_bucket_name, Prefix=s3_key)['Contents']]} client.delete_objects(Bucket=aws.s3_bucket_name, Delete=delete_keys) def execute_pipeline_and_get_output(sdc_executor, s3_origin, pipeline): sdc_executor.add_pipeline(pipeline)", "same- s3_origin >> trash s3_origin >= pipeline_finisher_executor \"\"\" if log_format", "'GET', 'referrer': '\"http://www.example.com/strt.' 'html\"', 'response': '200', 'bytes': '2326', 'clientip': '127.0.0.1',", "08:23:53 AM [INFO] [streamsets.sdk.sdc_api] Pipeline Filewriterpipeline53'} # data to verify", "sdc_executor, task, delete_original_object): pass @pytest.mark.parametrize('region', ['OTHER']) @pytest.mark.skip('Not yet implemented') def", "(Win98; I ;Nav)\"', 'APACHE_CUSTOM_LOG_FORMAT': '10.185.248.71 - - [09/Jan/2015:9:12:06 +0000] \"GET", "sample log message'}, 'COMMON_LOG_FORMAT': {'request': '/apache.gif', 'auth': 'frank', 'ident': '-',", "def test_configuration_task(sdc_builder, sdc_executor, task): pass @pytest.mark.parametrize('use_proxy', [False, True]) @pytest.mark.skip('Not yet", "'frank', 'ident': '-', 'verb': 'GET', 'referrer': '\"http://www.example.com/strt.' 'html\"', 'response': '200',", "to be written int the file on s3 data_format_content =", "pass @pytest.mark.skip('Not yet implemented') def test_configuration_socket_timeout(sdc_builder, sdc_executor): pass @pytest.mark.parametrize('task', ['CHANGE_EXISTING_OBJECT'])", "sdc_executor.stop_pipeline(pipeline) # cleaning up s3 bucket delete_aws_objects(client, aws, s3_key) def", "'logName': '-', 'remoteHost': '10.185.248.71', 'bytesSent': '17', 'status': '500'}, 'CEF': {'severity':", "'/date', 'group': 1}, {'fieldPath': '/time', 'group': 2}, {'fieldPath': '/timehalf', 'group':", "the log file and parse the same. Pipeline for the", "@pytest.mark.parametrize('delete_original_object', [False, True]) @pytest.mark.skip('Not yet implemented') def test_configuration_delete_original_object(sdc_builder, sdc_executor, task,", ">= pipeline_finisher_executor \"\"\" if log_format == 'GROK': file_content = data_format_content['APACHE_CUSTOM_LOG_FORMAT']", "s3_key): # Clean up S3. delete_keys = {'Objects': [{'Key': k['Key']}", "execute_pipeline_and_get_output(sdc_executor, s3_origin, pipeline): sdc_executor.add_pipeline(pipeline) snapshot = sdc_executor.capture_snapshot(pipeline, start_pipeline=True).snapshot output_records =", "'10.217.253.78', 'spt': '53743'}, 'signature': 'APPFW', 'vendor': 'Citrix', 'cefVersion': 0, 'name':", "yet implemented') def test_configuration_proxy_host(sdc_builder, sdc_executor, use_proxy): pass @pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not", "pass @pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not yet implemented') def test_configuration_proxy_port(sdc_builder, sdc_executor, use_proxy):", "yet implemented') def test_configuration_use_proxy(sdc_builder, sdc_executor, use_proxy): pass @aws('s3') @pytest.mark.parametrize('data_format', ['LOG'])", "'requestTime': '09/Jan/2015:9:12:06 +0000', 'request': 'GET ' '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300 HTTP/1.1', 'logName': '-',", "f'{s3_key}/*', 'number_of_threads': 1, 'read_order': 'LEXICOGRAPHICAL', 'data_format': data_format, 'log_format': log_format, 'custom_log_format':", "implemented') def test_configuration_secret_access_key(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet implemented') def test_configuration_socket_timeout(sdc_builder,", "I ;Nav)\"', 'auth': 'frank', 'ident': '-', 'verb': 'GET', 'referrer': '\"http://www.example.com/strt.'", "@pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not yet implemented') def test_configuration_proxy_port(sdc_builder, sdc_executor, use_proxy): pass", "test_configuration_content(sdc_builder, sdc_executor, task): pass @pytest.mark.parametrize('task', ['COPY_OBJECT']) @pytest.mark.parametrize('delete_original_object', [False, True]) @pytest.mark.skip('Not", "sample log message', 'APACHE_ERROR_LOG_FORMAT': '[Wed Oct 11 14:32:52 2000] [error]", "sdc_executor, use_proxy): pass @pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not yet implemented') def test_configuration_proxy_port(sdc_builder,", "cn1=241 msg=Realtime', 'REGEX': '2019-04-30 08:23:53 AM [INFO] [streamsets.sdk.sdc_api] Pipeline Filewriterpipeline53'}", "pass @pytest.mark.parametrize('task', ['CHANGE_EXISTING_OBJECT']) @pytest.mark.skip('Not yet implemented') def test_configuration_tags(sdc_builder, sdc_executor, task):", "k['Key']} for k in client.list_objects_v2(Bucket=aws.s3_bucket_name, Prefix=s3_key)['Contents']]} client.delete_objects(Bucket=aws.s3_bucket_name, Delete=delete_keys) def execute_pipeline_and_get_output(sdc_executor,", "{'severity': 'DEBUG', 'relativetime': '200', 'thread': 'main', 'category': 'org.StreamSets.Log4j', 'ndc': 'unknown',", "pass @pytest.mark.parametrize('task', ['COPY_OBJECT']) @pytest.mark.parametrize('delete_original_object', [False, True]) @pytest.mark.skip('Not yet implemented') def", "'GET', 'httpversion': '1.0', 'rawrequest': None, 'timestamp': '10/Oct/2000:13:55:36 -0700'}, 'APACHE_ERROR_LOG_FORMAT': {'message':", "'APACHE_ERROR_LOG_FORMAT': '[Wed Oct 11 14:32:52 2000] [error] [client 127.0.0.1] client", "'product': 'Deep Security Agent', 'extensions': {'cat': 'Realtime'}, 'leefVersion': 2.0, 'vendor':", "{'/time': '08:23:53', '/date': '2019-04-30', '/timehalf': 'AM', '/info': '[INFO]', '/message': 'Pipeline", "'data_format': data_format, 'log_format': log_format, 'custom_log_format': '%h %l %u [%t] \"%r\"", "['CHANGE_EXISTING_OBJECT', 'COPY_OBJECT', 'CREATE_NEW_OBJECT']) @pytest.mark.skip('Not yet implemented') def test_configuration_task(sdc_builder, sdc_executor, task):", "'Realtime'}, 'leefVersion': 2.0, 'vendor': 'Trend Micro', 'version': '<DSA version>'}, 'REGEX':", "aws, s3_key): # Clean up S3. delete_keys = {'Objects': [{'Key':", "'COMBINED_LOG_FORMAT': {'request': '/apache.gif', 'agent': '\"Mozilla/4.08 [en] (Win98; I ;Nav)\"', 'auth':", "implemented') def test_configuration_delete_original_object(sdc_builder, sdc_executor, task, delete_original_object): pass @pytest.mark.parametrize('region', ['OTHER']) @pytest.mark.skip('Not", "'ident': '-', 'verb': 'GET', 'referrer': '\"http://www.example.com/strt.' 'html\"', 'response': '200', 'bytes':", "task): pass @pytest.mark.parametrize('use_proxy', [False, True]) @pytest.mark.skip('Not yet implemented') def test_configuration_use_proxy(sdc_builder,", "s3 bucket mentioned below .S3 origin reads the log file", "test_configuration_region(sdc_builder, sdc_executor, region): pass @pytest.mark.skip('Not yet implemented') def test_configuration_required_fields(sdc_builder, sdc_executor):", "yet implemented') def test_configuration_proxy_password(sdc_builder, sdc_executor, use_proxy): pass @pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not", "@pytest.mark.skip('Not yet implemented') def test_configuration_endpoint(sdc_builder, sdc_executor, region): pass @pytest.mark.parametrize('task', ['COPY_OBJECT'])", "aws): # Build pipeline. builder = sdc_builder.get_pipeline_builder() builder.add_error_stage('Discard') s3_origin =", "None, 'timestamp': '09/Jan/2015:9:12:06 +0000'}, 'LEEF': {'eventId': '4000030', 'product': 'Deep Security", "pass @pytest.mark.parametrize('region', ['AP_NORTHEAST_1', 'AP_NORTHEAST_2', 'AP_NORTHEAST_3', 'AP_SOUTHEAST_1', 'AP_SOUTHEAST_2', 'AP_SOUTH_1', 'CA_CENTRAL_1', 'CN_NORTHWEST_1',", "'log_format': log_format, 'custom_log_format': '%h %l %u [%t] \"%r\" %>s %b',", "message'}, 'COMMON_LOG_FORMAT': {'request': '/apache.gif', 'auth': 'frank', 'ident': '-', 'response': '200',", "different log format or not. A log file is being", "', 'CEF': '10.217.31.247 CEF:0|Citrix|NetScaler|NS10.0|APPFW|APPFW_STARTURL|6|src=10.217.253.78 ' 'spt=53743 method=GET request=http://vpx247.example.net/FFC/login.html msg=Disallow Illegal", "def test_configurations_data_format_log(sdc_executor, sdc_builder, aws, data_format, log_format): \"\"\"Check whether S3 origin", "Pipeline Filewriterpipeline53'} # data to verify the output of amazon", "pipeline_finisher_executor.set_attributes(stage_record_preconditions=[\"${record:eventType() == 'no-more-data'}\"]) s3_origin >> trash s3_origin >= pipeline_finisher_executor s3_origin_pipeline", "s3 data_format_content = { 'COMMON_LOG_FORMAT': '127.0.0.1 - frank [10/Oct/2000:13:55:36 -0700]", "' '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300 ' 'HTTP/1.1\" 500 17 ', 'CEF': '10.217.31.247 CEF:0|Citrix|NetScaler|NS10.0|APPFW|APPFW_STARTURL|6|src=10.217.253.78", "'AM', '/info': '[INFO]', '/message': 'Pipeline Filewriterpipeline53', '/file': '[streamsets.sdk.sdc_api]'}} @pytest.mark.skip('Not yet", "'NS10.0'}, 'GROK': {'request': '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300', 'auth': '-', 'ident': '-', 'response': '500',", "127.0.0.1] client ' 'denied by server configuration:/export/home/live/ap/htdocs/test', 'COMBINED_LOG_FORMAT': '127.0.0.1 -", "- [09/Jan/2015:9:12:06 +0000] \"GET ' '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300 ' 'HTTP/1.1\" 500 17", "'-', 'verb': 'GET', 'referrer': '\"http://www.example.com/strt.' 'html\"', 'response': '200', 'bytes': '2326',", "'field_path_to_regex_group_mapping': LOG_FIELD_MAPPING } pipeline = get_aws_origin_to_trash_pipeline(sdc_builder, attributes, aws) s3_origin =", "test_configuration_new_object_path(sdc_builder, sdc_executor, task): pass @pytest.mark.skip('Not yet implemented') def test_configuration_object(sdc_builder, sdc_executor):", "not. A log file is being created in s3 bucket", "implemented') def test_configuration_access_key_id(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet implemented') def test_configuration_bucket(sdc_builder,", "'127.0.0.1', 'verb': 'GET', 'httpversion': '1.0', 'rawrequest': None, 'timestamp': '10/Oct/2000:13:55:36 -0700'},", "sdc_executor): pass @pytest.mark.parametrize('task', ['CHANGE_EXISTING_OBJECT']) @pytest.mark.skip('Not yet implemented') def test_configuration_tags(sdc_builder, sdc_executor,", "log_format, 'custom_log_format': '%h %l %u [%t] \"%r\" %>s %b', 'regular_expression':", "[%t] \"%r\" %>s %b', 'regular_expression': REGULAR_EXPRESSION, 'field_path_to_regex_group_mapping': LOG_FIELD_MAPPING } pipeline", "@pytest.mark.skip('Not yet implemented') def test_configuration_connection_timeout(sdc_builder, sdc_executor): pass @pytest.mark.parametrize('task', ['CREATE_NEW_OBJECT']) @pytest.mark.skip('Not", "'verb': 'GET', 'httpversion': '1.0', 'rawrequest': None, 'timestamp': '10/Oct/2000:13:55:36 -0700'}, 'APACHE_ERROR_LOG_FORMAT':", "'timestamp': '09/Jan/2015:9:12:06 +0000'}, 'LEEF': {'eventId': '4000030', 'product': 'Deep Security Agent',", "pass @pytest.mark.skip('Not yet implemented') def test_configuration_secret_access_key(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet", "'clientip': '10.185.248.71', 'verb': 'GET', 'httpversion': '1.1', 'rawrequest': None, 'timestamp': '09/Jan/2015:9:12:06", "'[streamsets.sdk.sdc_api]'}} @pytest.mark.skip('Not yet implemented') def test_configuration_access_key_id(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet", "'10.185.248.71 - - [09/Jan/2015:9:12:06 +0000] \"GET ' '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300 ' 'HTTP/1.1\"", "pass @pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not yet implemented') def test_configuration_proxy_host(sdc_builder, sdc_executor, use_proxy):", "- frank [10/Oct/2000:13:55:36 -0700] ' '\"GET /apache.gif HTTP/1.0\" 200 232',", "pass @pytest.mark.skip('Not yet implemented') def test_configuration_object(sdc_builder, sdc_executor): pass @pytest.mark.parametrize('on_record_error', ['DISCARD',", "sdc_executor): pass @pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not yet implemented') def test_configuration_proxy_host(sdc_builder, sdc_executor,", "'response': '200', 'bytes': '232', 'clientip': '127.0.0.1', 'verb': 'GET', 'httpversion': '1.0',", "pass @pytest.mark.parametrize('task', ['CREATE_NEW_OBJECT']) @pytest.mark.skip('Not yet implemented') def test_configuration_content(sdc_builder, sdc_executor, task):", "sdc_executor): pass @pytest.mark.skip('Not yet implemented') def test_configuration_socket_timeout(sdc_builder, sdc_executor): pass @pytest.mark.parametrize('task',", "if log_format == 'GROK': file_content = data_format_content['APACHE_CUSTOM_LOG_FORMAT'] else: file_content =", "desc=HEU_AEGIS_CRYPT sev=6 cn1=241 msg=Realtime', 'REGEX': '2019-04-30 08:23:53 AM [INFO] [streamsets.sdk.sdc_api]", "sdc_executor, task): pass @pytest.mark.parametrize('task', ['COPY_OBJECT']) @pytest.mark.parametrize('delete_original_object', [False, True]) @pytest.mark.skip('Not yet", "'auth': '-', 'ident': '-', 'response': '500', 'bytes': '17', 'clientip': '10.185.248.71',", "implemented') def test_configuration_region(sdc_builder, sdc_executor, region): pass @pytest.mark.skip('Not yet implemented') def", "'Deep Security Agent', 'extensions': {'cat': 'Realtime'}, 'leefVersion': 2.0, 'vendor': 'Trend", "'Pipeline Filewriterpipeline53', '/file': '[streamsets.sdk.sdc_api]'}} @pytest.mark.skip('Not yet implemented') def test_configuration_access_key_id(sdc_builder, sdc_executor):", "%l %u [%t] \"%r\" %>s %b', 'regular_expression': REGULAR_EXPRESSION, 'field_path_to_regex_group_mapping': LOG_FIELD_MAPPING", "'LOG4J', 'CEF', 'LEEF']) def test_configurations_data_format_log(sdc_executor, sdc_builder, aws, data_format, log_format): \"\"\"Check", "pipeline) assert output_records[0].field == get_data_to_verify_output[log_format] finally: if sdc_executor.get_pipeline_status(pipeline).response.json().get('status') == 'RUNNING':", "This is sample log message', 'APACHE_ERROR_LOG_FORMAT': '[Wed Oct 11 14:32:52", "log message'}, 'COMMON_LOG_FORMAT': {'request': '/apache.gif', 'auth': 'frank', 'ident': '-', 'response':", "'product': 'NetScaler', 'extensions': {'msg': 'Disallow Illegal URL.', 'request': 'http://vpx247.example.net/FFC/login.html', 'method':", "'2326', 'clientip': '127.0.0.1', 'httpversion': '1.0', 'rawrequest': None, 'timestamp': '10/Oct/2000:13:55:36 -0700'},", "' 'denied by server configuration:/export/home/live/ap/htdocs/test', 'COMBINED_LOG_FORMAT': '127.0.0.1 - frank [10/Oct/2000:13:55:36", "def test_configuration_secret_access_key(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet implemented') def test_configuration_socket_timeout(sdc_builder, sdc_executor):", "'regular_expression': REGULAR_EXPRESSION, 'field_path_to_regex_group_mapping': LOG_FIELD_MAPPING } pipeline = get_aws_origin_to_trash_pipeline(sdc_builder, attributes, aws)", "'/time', 'group': 2}, {'fieldPath': '/timehalf', 'group': 3}, {'fieldPath': '/info', 'group':", "'group': 2}, {'fieldPath': '/timehalf', 'group': 3}, {'fieldPath': '/info', 'group': 4},", "/apache.gif' ' HTTP/1.0\" 200 2326 \"http://www.example.com/strt.html\" \"Mozilla/4.08' ' [en] (Win98;", "'timestamp': 'Wed Oct 11 14:32:52 2000', 'loglevel': 'error', 'clientip': '127.0.0.1'},", "@pytest.mark.skip('Not yet implemented') def test_configuration_delete_original_object(sdc_builder, sdc_executor, task, delete_original_object): pass @pytest.mark.parametrize('region',", "amazon s3 origin. get_data_to_verify_output = { 'LOG4J': {'severity': 'DEBUG', 'relativetime':", "def test_configuration_region(sdc_builder, sdc_executor, region): pass @pytest.mark.skip('Not yet implemented') def test_configuration_required_fields(sdc_builder,", "def test_configuration_tags(sdc_builder, sdc_executor, task): pass @pytest.mark.parametrize('task', ['CHANGE_EXISTING_OBJECT', 'COPY_OBJECT', 'CREATE_NEW_OBJECT']) @pytest.mark.skip('Not", "def get_aws_origin_to_trash_pipeline(sdc_builder, attributes, aws): # Build pipeline. builder = sdc_builder.get_pipeline_builder()", "client.delete_objects(Bucket=aws.s3_bucket_name, Delete=delete_keys) def execute_pipeline_and_get_output(sdc_executor, s3_origin, pipeline): sdc_executor.add_pipeline(pipeline) snapshot = sdc_executor.capture_snapshot(pipeline,", "import aws, sdc_min_version from streamsets.testframework.utils import get_random_string logger = logging.getLogger(__name__)", "pipeline): sdc_executor.add_pipeline(pipeline) snapshot = sdc_executor.capture_snapshot(pipeline, start_pipeline=True).snapshot output_records = snapshot[s3_origin].output return", "file is being created in s3 bucket mentioned below .S3", "request=http://vpx247.example.net/FFC/login.html msg=Disallow Illegal URL.', 'LEEF': 'LEEF: 2.0|Trend Micro|Deep Security Agent|<DSA", "'REGEX': {'/time': '08:23:53', '/date': '2019-04-30', '/timehalf': 'AM', '/info': '[INFO]', '/message':", "'LEEF: 2.0|Trend Micro|Deep Security Agent|<DSA version>|4000030|cat=Anti-Malware ' 'name=HEU_AEGIS_CRYPT desc=HEU_AEGIS_CRYPT sev=6", "'APACHE_CUSTOM_LOG_FORMAT': '10.185.248.71 - - [09/Jan/2015:9:12:06 +0000] \"GET ' '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300 '", "use_proxy): pass @pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not yet implemented') def test_configuration_proxy_password(sdc_builder, sdc_executor,", "Body=file_content) output_records = execute_pipeline_and_get_output(sdc_executor, s3_origin, pipeline) assert output_records[0].field == get_data_to_verify_output[log_format]", "pass @pytest.mark.parametrize('task', ['CHANGE_EXISTING_OBJECT', 'COPY_OBJECT', 'CREATE_NEW_OBJECT']) @pytest.mark.skip('Not yet implemented') def test_configuration_task(sdc_builder,", "\"Mozilla/4.08' ' [en] (Win98; I ;Nav)\"', 'APACHE_CUSTOM_LOG_FORMAT': '10.185.248.71 - -", "def test_configuration_access_key_id(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet implemented') def test_configuration_bucket(sdc_builder, sdc_executor):", "pass @pytest.mark.skip('Not yet implemented') def test_configuration_connection_timeout(sdc_builder, sdc_executor): pass @pytest.mark.parametrize('task', ['CREATE_NEW_OBJECT'])", "= [{'fieldPath': '/date', 'group': 1}, {'fieldPath': '/time', 'group': 2}, {'fieldPath':", "def test_configuration_preconditions(sdc_builder, sdc_executor): pass @pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not yet implemented') def", "@aws('s3') @pytest.mark.parametrize('data_format', ['LOG']) @pytest.mark.parametrize('log_format', ['COMMON_LOG_FORMAT', 'APACHE_ERROR_LOG_FORMAT', 'COMBINED_LOG_FORMAT', 'APACHE_CUSTOM_LOG_FORMAT', 'REGEX', 'GROK',", "(\\S+) (\\S+) (\\S+) (.*)' # log to be written int", "test_configuration_endpoint(sdc_builder, sdc_executor, region): pass @pytest.mark.parametrize('task', ['COPY_OBJECT']) @pytest.mark.skip('Not yet implemented') def", "Illegal URL.', 'request': 'http://vpx247.example.net/FFC/login.html', 'method': 'GET', 'src': '10.217.253.78', 'spt': '53743'},", "['AP_NORTHEAST_1', 'AP_NORTHEAST_2', 'AP_NORTHEAST_3', 'AP_SOUTHEAST_1', 'AP_SOUTHEAST_2', 'AP_SOUTH_1', 'CA_CENTRAL_1', 'CN_NORTHWEST_1', 'CN_NORTH_1', 'EU_CENTRAL_1',", "int the file on s3 data_format_content = { 'COMMON_LOG_FORMAT': '127.0.0.1", "'\"http://www.example.com/strt.' 'html\"', 'response': '200', 'bytes': '2326', 'clientip': '127.0.0.1', 'httpversion': '1.0',", "'/timehalf', 'group': 3}, {'fieldPath': '/info', 'group': 4}, {'fieldPath': '/file', 'group':", "@pytest.mark.skip('Not yet implemented') def test_configuration_proxy_port(sdc_builder, sdc_executor, use_proxy): pass @pytest.mark.parametrize('use_proxy', [True])", "yet implemented') def test_configuration_object(sdc_builder, sdc_executor): pass @pytest.mark.parametrize('on_record_error', ['DISCARD', 'STOP_PIPELINE', 'TO_ERROR'])", "['LOG']) @pytest.mark.parametrize('log_format', ['COMMON_LOG_FORMAT', 'APACHE_ERROR_LOG_FORMAT', 'COMBINED_LOG_FORMAT', 'APACHE_CUSTOM_LOG_FORMAT', 'REGEX', 'GROK', 'LOG4J', 'CEF',", "[False, True]) @pytest.mark.skip('Not yet implemented') def test_configuration_use_proxy(sdc_builder, sdc_executor, use_proxy): pass", "'/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300', 'auth': '-', 'ident': '-', 'response': '500', 'bytes': '17', 'clientip':", "[en] (Win98; I ;Nav)\"', 'APACHE_CUSTOM_LOG_FORMAT': '10.185.248.71 - - [09/Jan/2015:9:12:06 +0000]", "sdc_executor, region): pass @pytest.mark.skip('Not yet implemented') def test_configuration_required_fields(sdc_builder, sdc_executor): pass", "get_random_string logger = logging.getLogger(__name__) S3_SANDBOX_PREFIX = 'sandbox' LOG_FIELD_MAPPING = [{'fieldPath':", "'REGEX': '2019-04-30 08:23:53 AM [INFO] [streamsets.sdk.sdc_api] Pipeline Filewriterpipeline53'} # data", "'08:23:53', '/date': '2019-04-30', '/timehalf': 'AM', '/info': '[INFO]', '/message': 'Pipeline Filewriterpipeline53',", "2.0, 'vendor': 'Trend Micro', 'version': '<DSA version>'}, 'REGEX': {'/time': '08:23:53',", "\"\"\" if log_format == 'GROK': file_content = data_format_content['APACHE_CUSTOM_LOG_FORMAT'] else: file_content", "for k in client.list_objects_v2(Bucket=aws.s3_bucket_name, Prefix=s3_key)['Contents']]} client.delete_objects(Bucket=aws.s3_bucket_name, Delete=delete_keys) def execute_pipeline_and_get_output(sdc_executor, s3_origin,", "'frank', 'ident': '-', 'response': '200', 'bytes': '232', 'clientip': '127.0.0.1', 'verb':", "log message', 'APACHE_ERROR_LOG_FORMAT': '[Wed Oct 11 14:32:52 2000] [error] [client", "'This is sample log message'}, 'COMMON_LOG_FORMAT': {'request': '/apache.gif', 'auth': 'frank',", "logging.getLogger(__name__) S3_SANDBOX_PREFIX = 'sandbox' LOG_FIELD_MAPPING = [{'fieldPath': '/date', 'group': 1},", "'request': 'GET ' '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300 HTTP/1.1', 'logName': '-', 'remoteHost': '10.185.248.71', 'bytesSent':", "import logging import pytest from streamsets.testframework.markers import aws, sdc_min_version from", "'leefVersion': 2.0, 'vendor': 'Trend Micro', 'version': '<DSA version>'}, 'REGEX': {'/time':", "origin. get_data_to_verify_output = { 'LOG4J': {'severity': 'DEBUG', 'relativetime': '200', 'thread':", "s3_key) def get_aws_origin_to_trash_pipeline(sdc_builder, attributes, aws): # Build pipeline. builder =", "frank [10/Oct/2000:13:55:36 -0700] \"GET /apache.gif' ' HTTP/1.0\" 200 2326 \"http://www.example.com/strt.html\"", "'group': 6}] REGULAR_EXPRESSION = r'(\\S+) (\\S+) (\\S+) (\\S+) (\\S+) (.*)'", "URL.', 'request': 'http://vpx247.example.net/FFC/login.html', 'method': 'GET', 'src': '10.217.253.78', 'spt': '53743'}, 'signature':", "'category': 'org.StreamSets.Log4j', 'ndc': 'unknown', 'message': 'This is sample log message'},", "'rawrequest': None, 'timestamp': '10/Oct/2000:13:55:36 -0700'}, 'APACHE_ERROR_LOG_FORMAT': {'message': 'client denied by", "pass @pytest.mark.skip('Not yet implemented') def test_configuration_bucket(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet", "' 'spt=53743 method=GET request=http://vpx247.example.net/FFC/login.html msg=Disallow Illegal URL.', 'LEEF': 'LEEF: 2.0|Trend", "log to be written int the file on s3 data_format_content", "implemented') def test_configuration_task(sdc_builder, sdc_executor, task): pass @pytest.mark.parametrize('use_proxy', [False, True]) @pytest.mark.skip('Not", "@pytest.mark.parametrize('task', ['CHANGE_EXISTING_OBJECT', 'COPY_OBJECT', 'CREATE_NEW_OBJECT']) @pytest.mark.skip('Not yet implemented') def test_configuration_task(sdc_builder, sdc_executor,", "finally: if sdc_executor.get_pipeline_status(pipeline).response.json().get('status') == 'RUNNING': sdc_executor.stop_pipeline(pipeline) # cleaning up s3", ">> trash s3_origin >= pipeline_finisher_executor s3_origin_pipeline = builder.build().configure_for_environment(aws) s3_origin_pipeline.configuration['shouldRetry'] =", "task): pass @pytest.mark.parametrize('task', ['CHANGE_EXISTING_OBJECT', 'COPY_OBJECT', 'CREATE_NEW_OBJECT']) @pytest.mark.skip('Not yet implemented') def", "get_aws_origin_to_trash_pipeline(sdc_builder, attributes, aws) s3_origin = pipeline.origin_stage try: client.put_object(Bucket=aws.s3_bucket_name, Key=f'{s3_key}/{get_random_string()}.log', Body=file_content)", ";Nav)\"', 'auth': 'frank', 'ident': '-', 'verb': 'GET', 'referrer': '\"http://www.example.com/strt.' 'html\"',", "['OTHER']) @pytest.mark.skip('Not yet implemented') def test_configuration_endpoint(sdc_builder, sdc_executor, region): pass @pytest.mark.parametrize('task',", "origin reads the log file and parse the same. Pipeline", "use_proxy): pass @pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not yet implemented') def test_configuration_proxy_port(sdc_builder, sdc_executor,", "'10.217.31.247 CEF:0|Citrix|NetScaler|NS10.0|APPFW|APPFW_STARTURL|6|src=10.217.253.78 ' 'spt=53743 method=GET request=http://vpx247.example.net/FFC/login.html msg=Disallow Illegal URL.', 'LEEF':", "'-', 'response': '500', 'bytes': '17', 'clientip': '10.185.248.71', 'verb': 'GET', 'httpversion':", "delete_aws_objects(client, aws, s3_key): # Clean up S3. delete_keys = {'Objects':", "below .S3 origin reads the log file and parse the", "task): pass @pytest.mark.skip('Not yet implemented') def test_configuration_object(sdc_builder, sdc_executor): pass @pytest.mark.parametrize('on_record_error',", "'LOG4J': '200 [main] DEBUG org.StreamSets.Log4j unknown - This is sample", "data to verify the output of amazon s3 origin. get_data_to_verify_output", "'org.StreamSets.Log4j', 'ndc': 'unknown', 'message': 'This is sample log message'}, 'COMMON_LOG_FORMAT':", "@pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not yet implemented') def test_configuration_proxy_host(sdc_builder, sdc_executor, use_proxy): pass", "-0700'}, 'APACHE_ERROR_LOG_FORMAT': {'message': 'client denied by server configuration:/export/home/live/ap/htdocs/' 'test', 'timestamp':", "HTTP/1.0\" 200 2326 \"http://www.example.com/strt.html\" \"Mozilla/4.08' ' [en] (Win98; I ;Nav)\"',", "Prefix=s3_key)['Contents']]} client.delete_objects(Bucket=aws.s3_bucket_name, Delete=delete_keys) def execute_pipeline_and_get_output(sdc_executor, s3_origin, pipeline): sdc_executor.add_pipeline(pipeline) snapshot =", "assert output_records[0].field == get_data_to_verify_output[log_format] finally: if sdc_executor.get_pipeline_status(pipeline).response.json().get('status') == 'RUNNING': sdc_executor.stop_pipeline(pipeline)", "yet implemented') def test_configuration_preconditions(sdc_builder, sdc_executor): pass @pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not yet", "%u [%t] \"%r\" %>s %b', 'regular_expression': REGULAR_EXPRESSION, 'field_path_to_regex_group_mapping': LOG_FIELD_MAPPING }", "'no-more-data'}\"]) s3_origin >> trash s3_origin >= pipeline_finisher_executor s3_origin_pipeline = builder.build().configure_for_environment(aws)", "[{'fieldPath': '/date', 'group': 1}, {'fieldPath': '/time', 'group': 2}, {'fieldPath': '/timehalf',", "[10/Oct/2000:13:55:36 -0700] ' '\"GET /apache.gif HTTP/1.0\" 200 232', 'LOG4J': '200", "'200 [main] DEBUG org.StreamSets.Log4j unknown - This is sample log", "log_format == 'GROK': file_content = data_format_content['APACHE_CUSTOM_LOG_FORMAT'] else: file_content = data_format_content[log_format]", "[True]) @pytest.mark.skip('Not yet implemented') def test_configuration_proxy_host(sdc_builder, sdc_executor, use_proxy): pass @pytest.mark.parametrize('use_proxy',", "cleaning up s3 bucket delete_aws_objects(client, aws, s3_key) def get_aws_origin_to_trash_pipeline(sdc_builder, attributes,", "test_configuration_proxy_port(sdc_builder, sdc_executor, use_proxy): pass @pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not yet implemented') def", "S3. delete_keys = {'Objects': [{'Key': k['Key']} for k in client.list_objects_v2(Bucket=aws.s3_bucket_name,", "aws, data_format, log_format): \"\"\"Check whether S3 origin can parse different", "= f'{S3_SANDBOX_PREFIX}/{get_random_string()}' attributes = {'bucket': aws.s3_bucket_name, 'prefix_pattern': f'{s3_key}/*', 'number_of_threads': 1,", "pytest from streamsets.testframework.markers import aws, sdc_min_version from streamsets.testframework.utils import get_random_string", "def execute_pipeline_and_get_output(sdc_executor, s3_origin, pipeline): sdc_executor.add_pipeline(pipeline) snapshot = sdc_executor.capture_snapshot(pipeline, start_pipeline=True).snapshot output_records", "org.StreamSets.Log4j unknown - This is sample log message', 'APACHE_ERROR_LOG_FORMAT': '[Wed", "data_format, log_format): \"\"\"Check whether S3 origin can parse different log", "# log to be written int the file on s3", "test_configuration_secret_access_key(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet implemented') def test_configuration_socket_timeout(sdc_builder, sdc_executor): pass", "server configuration:/export/home/live/ap/htdocs/test', 'COMBINED_LOG_FORMAT': '127.0.0.1 - frank [10/Oct/2000:13:55:36 -0700] \"GET /apache.gif'", "17 ', 'CEF': '10.217.31.247 CEF:0|Citrix|NetScaler|NS10.0|APPFW|APPFW_STARTURL|6|src=10.217.253.78 ' 'spt=53743 method=GET request=http://vpx247.example.net/FFC/login.html msg=Disallow", "'GET', 'httpversion': '1.1', 'rawrequest': None, 'timestamp': '09/Jan/2015:9:12:06 +0000'}, 'LEEF': {'eventId':", "'GROK', 'LOG4J', 'CEF', 'LEEF']) def test_configurations_data_format_log(sdc_executor, sdc_builder, aws, data_format, log_format):", "{'msg': 'Disallow Illegal URL.', 'request': 'http://vpx247.example.net/FFC/login.html', 'method': 'GET', 'src': '10.217.253.78',", "def test_configuration_proxy_port(sdc_builder, sdc_executor, use_proxy): pass @pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not yet implemented')", "Micro|Deep Security Agent|<DSA version>|4000030|cat=Anti-Malware ' 'name=HEU_AEGIS_CRYPT desc=HEU_AEGIS_CRYPT sev=6 cn1=241 msg=Realtime',", "{'Objects': [{'Key': k['Key']} for k in client.list_objects_v2(Bucket=aws.s3_bucket_name, Prefix=s3_key)['Contents']]} client.delete_objects(Bucket=aws.s3_bucket_name, Delete=delete_keys)", "aws, sdc_min_version from streamsets.testframework.utils import get_random_string logger = logging.getLogger(__name__) S3_SANDBOX_PREFIX", "'/info': '[INFO]', '/message': 'Pipeline Filewriterpipeline53', '/file': '[streamsets.sdk.sdc_api]'}} @pytest.mark.skip('Not yet implemented')", "the same- s3_origin >> trash s3_origin >= pipeline_finisher_executor \"\"\" if", "pass @pytest.mark.parametrize('on_record_error', ['DISCARD', 'STOP_PIPELINE', 'TO_ERROR']) @pytest.mark.skip('Not yet implemented') def test_configuration_on_record_error(sdc_builder,", "Pipeline for the same- s3_origin >> trash s3_origin >= pipeline_finisher_executor", "output of amazon s3 origin. get_data_to_verify_output = { 'LOG4J': {'severity':", "sdc_executor): pass @pytest.mark.parametrize('task', ['CREATE_NEW_OBJECT']) @pytest.mark.skip('Not yet implemented') def test_configuration_content(sdc_builder, sdc_executor,", "Agent|<DSA version>|4000030|cat=Anti-Malware ' 'name=HEU_AEGIS_CRYPT desc=HEU_AEGIS_CRYPT sev=6 cn1=241 msg=Realtime', 'REGEX': '2019-04-30", "'200', 'bytes': '232', 'clientip': '127.0.0.1', 'verb': 'GET', 'httpversion': '1.0', 'rawrequest':", "sdc_executor): pass @pytest.mark.skip('Not yet implemented') def test_configuration_connection_timeout(sdc_builder, sdc_executor): pass @pytest.mark.parametrize('task',", "500 17 ', 'CEF': '10.217.31.247 CEF:0|Citrix|NetScaler|NS10.0|APPFW|APPFW_STARTURL|6|src=10.217.253.78 ' 'spt=53743 method=GET request=http://vpx247.example.net/FFC/login.html", "(.*)' # log to be written int the file on", "'/info', 'group': 4}, {'fieldPath': '/file', 'group': 5}, {'fieldPath': '/message', 'group':", "'CREATE_NEW_OBJECT']) @pytest.mark.skip('Not yet implemented') def test_configuration_task(sdc_builder, sdc_executor, task): pass @pytest.mark.parametrize('use_proxy',", "yet implemented') def test_configuration_content(sdc_builder, sdc_executor, task): pass @pytest.mark.parametrize('task', ['COPY_OBJECT']) @pytest.mark.parametrize('delete_original_object',", "can parse different log format or not. A log file", "None, 'timestamp': '10/Oct/2000:13:55:36 -0700'}, 'APACHE_CUSTOM_LOG_FORMAT': {'remoteUser': '-', 'requestTime': '09/Jan/2015:9:12:06 +0000',", "'COMMON_LOG_FORMAT': '127.0.0.1 - frank [10/Oct/2000:13:55:36 -0700] ' '\"GET /apache.gif HTTP/1.0\"", "'/timehalf': 'AM', '/info': '[INFO]', '/message': 'Pipeline Filewriterpipeline53', '/file': '[streamsets.sdk.sdc_api]'}} @pytest.mark.skip('Not", "'STOP_PIPELINE', 'TO_ERROR']) @pytest.mark.skip('Not yet implemented') def test_configuration_on_record_error(sdc_builder, sdc_executor, on_record_error): pass", "'4000030', 'product': 'Deep Security Agent', 'extensions': {'cat': 'Realtime'}, 'leefVersion': 2.0,", "'OTHER', 'SA_EAST_1', 'US_EAST_1', 'US_EAST_2', 'US_GOV_WEST_1', 'US_WEST_1', 'US_WEST_2']) @pytest.mark.skip('Not yet implemented')", "'US_WEST_2']) @pytest.mark.skip('Not yet implemented') def test_configuration_region(sdc_builder, sdc_executor, region): pass @pytest.mark.skip('Not", "2000', 'loglevel': 'error', 'clientip': '127.0.0.1'}, 'COMBINED_LOG_FORMAT': {'request': '/apache.gif', 'agent': '\"Mozilla/4.08", "s3_origin.set_attributes(**attributes) trash = builder.add_stage('Trash') pipeline_finisher_executor = builder.add_stage('Pipeline Finisher Executor') pipeline_finisher_executor.set_attributes(stage_record_preconditions=[\"${record:eventType()", "client.list_objects_v2(Bucket=aws.s3_bucket_name, Prefix=s3_key)['Contents']]} client.delete_objects(Bucket=aws.s3_bucket_name, Delete=delete_keys) def execute_pipeline_and_get_output(sdc_executor, s3_origin, pipeline): sdc_executor.add_pipeline(pipeline) snapshot", "@pytest.mark.skip('Not yet implemented') def test_configuration_use_proxy(sdc_builder, sdc_executor, use_proxy): pass @aws('s3') @pytest.mark.parametrize('data_format',", "def test_configuration_proxy_user(sdc_builder, sdc_executor, use_proxy): pass @pytest.mark.parametrize('region', ['AP_NORTHEAST_1', 'AP_NORTHEAST_2', 'AP_NORTHEAST_3', 'AP_SOUTHEAST_1',", "'53743'}, 'signature': 'APPFW', 'vendor': 'Citrix', 'cefVersion': 0, 'name': 'APPFW_STARTURL', 'version':", "or not. A log file is being created in s3", "@pytest.mark.skip('Not yet implemented') def test_configuration_tags(sdc_builder, sdc_executor, task): pass @pytest.mark.parametrize('task', ['CHANGE_EXISTING_OBJECT',", "'response': '200', 'bytes': '2326', 'clientip': '127.0.0.1', 'httpversion': '1.0', 'rawrequest': None,", "task): pass @pytest.mark.parametrize('task', ['COPY_OBJECT']) @pytest.mark.parametrize('delete_original_object', [False, True]) @pytest.mark.skip('Not yet implemented')", "1}, {'fieldPath': '/time', 'group': 2}, {'fieldPath': '/timehalf', 'group': 3}, {'fieldPath':", "'TO_ERROR']) @pytest.mark.skip('Not yet implemented') def test_configuration_on_record_error(sdc_builder, sdc_executor, on_record_error): pass @pytest.mark.skip('Not", "= builder.add_stage('Pipeline Finisher Executor') pipeline_finisher_executor.set_attributes(stage_record_preconditions=[\"${record:eventType() == 'no-more-data'}\"]) s3_origin >> trash", "'bytes': '232', 'clientip': '127.0.0.1', 'verb': 'GET', 'httpversion': '1.0', 'rawrequest': None,", "[{'Key': k['Key']} for k in client.list_objects_v2(Bucket=aws.s3_bucket_name, Prefix=s3_key)['Contents']]} client.delete_objects(Bucket=aws.s3_bucket_name, Delete=delete_keys) def", "implemented') def test_configuration_preconditions(sdc_builder, sdc_executor): pass @pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not yet implemented')", "-0700'}, 'APACHE_CUSTOM_LOG_FORMAT': {'remoteUser': '-', 'requestTime': '09/Jan/2015:9:12:06 +0000', 'request': 'GET '", "'-', 'response': '200', 'bytes': '232', 'clientip': '127.0.0.1', 'verb': 'GET', 'httpversion':", "{'cat': 'Realtime'}, 'leefVersion': 2.0, 'vendor': 'Trend Micro', 'version': '<DSA version>'},", "def test_configuration_new_object_path(sdc_builder, sdc_executor, task): pass @pytest.mark.skip('Not yet implemented') def test_configuration_object(sdc_builder,", "mentioned below .S3 origin reads the log file and parse", "attributes = {'bucket': aws.s3_bucket_name, 'prefix_pattern': f'{s3_key}/*', 'number_of_threads': 1, 'read_order': 'LEXICOGRAPHICAL',", "'127.0.0.1 - frank [10/Oct/2000:13:55:36 -0700] ' '\"GET /apache.gif HTTP/1.0\" 200", "pipeline. builder = sdc_builder.get_pipeline_builder() builder.add_error_stage('Discard') s3_origin = builder.add_stage('Amazon S3', type='origin')", "trash s3_origin >= pipeline_finisher_executor \"\"\" if log_format == 'GROK': file_content", "'127.0.0.1 - frank [10/Oct/2000:13:55:36 -0700] \"GET /apache.gif' ' HTTP/1.0\" 200", "sdc_executor, use_proxy): pass @pytest.mark.parametrize('region', ['AP_NORTHEAST_1', 'AP_NORTHEAST_2', 'AP_NORTHEAST_3', 'AP_SOUTHEAST_1', 'AP_SOUTHEAST_2', 'AP_SOUTH_1',", "configuration:/export/home/live/ap/htdocs/test', 'COMBINED_LOG_FORMAT': '127.0.0.1 - frank [10/Oct/2000:13:55:36 -0700] \"GET /apache.gif' '", "'US_WEST_1', 'US_WEST_2']) @pytest.mark.skip('Not yet implemented') def test_configuration_region(sdc_builder, sdc_executor, region): pass", "%>s %b', 'regular_expression': REGULAR_EXPRESSION, 'field_path_to_regex_group_mapping': LOG_FIELD_MAPPING } pipeline = get_aws_origin_to_trash_pipeline(sdc_builder,", "msg=Realtime', 'REGEX': '2019-04-30 08:23:53 AM [INFO] [streamsets.sdk.sdc_api] Pipeline Filewriterpipeline53'} #", "== get_data_to_verify_output[log_format] finally: if sdc_executor.get_pipeline_status(pipeline).response.json().get('status') == 'RUNNING': sdc_executor.stop_pipeline(pipeline) # cleaning", "get_data_to_verify_output = { 'LOG4J': {'severity': 'DEBUG', 'relativetime': '200', 'thread': 'main',", "@pytest.mark.skip('Not yet implemented') def test_configuration_socket_timeout(sdc_builder, sdc_executor): pass @pytest.mark.parametrize('task', ['CHANGE_EXISTING_OBJECT']) @pytest.mark.skip('Not", "'CEF': '10.217.31.247 CEF:0|Citrix|NetScaler|NS10.0|APPFW|APPFW_STARTURL|6|src=10.217.253.78 ' 'spt=53743 method=GET request=http://vpx247.example.net/FFC/login.html msg=Disallow Illegal URL.',", "pass @pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not yet implemented') def test_configuration_proxy_password(sdc_builder, sdc_executor, use_proxy):", "implemented') def test_configuration_proxy_port(sdc_builder, sdc_executor, use_proxy): pass @pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not yet", "created in s3 bucket mentioned below .S3 origin reads the", "'10/Oct/2000:13:55:36 -0700'}, 'APACHE_CUSTOM_LOG_FORMAT': {'remoteUser': '-', 'requestTime': '09/Jan/2015:9:12:06 +0000', 'request': 'GET", "@pytest.mark.parametrize('task', ['COPY_OBJECT']) @pytest.mark.skip('Not yet implemented') def test_configuration_new_object_path(sdc_builder, sdc_executor, task): pass", "'status': '500'}, 'CEF': {'severity': '6', 'product': 'NetScaler', 'extensions': {'msg': 'Disallow", "'\"GET /apache.gif HTTP/1.0\" 200 232', 'LOG4J': '200 [main] DEBUG org.StreamSets.Log4j", "'group': 3}, {'fieldPath': '/info', 'group': 4}, {'fieldPath': '/file', 'group': 5},", "for the same- s3_origin >> trash s3_origin >= pipeline_finisher_executor \"\"\"", "'verb': 'GET', 'referrer': '\"http://www.example.com/strt.' 'html\"', 'response': '200', 'bytes': '2326', 'clientip':", "Filewriterpipeline53', '/file': '[streamsets.sdk.sdc_api]'}} @pytest.mark.skip('Not yet implemented') def test_configuration_access_key_id(sdc_builder, sdc_executor): pass", "%b', 'regular_expression': REGULAR_EXPRESSION, 'field_path_to_regex_group_mapping': LOG_FIELD_MAPPING } pipeline = get_aws_origin_to_trash_pipeline(sdc_builder, attributes,", "yet implemented') def test_configuration_proxy_user(sdc_builder, sdc_executor, use_proxy): pass @pytest.mark.parametrize('region', ['AP_NORTHEAST_1', 'AP_NORTHEAST_2',", "HTTP/1.0\" 200 232', 'LOG4J': '200 [main] DEBUG org.StreamSets.Log4j unknown -", "version>'}, 'REGEX': {'/time': '08:23:53', '/date': '2019-04-30', '/timehalf': 'AM', '/info': '[INFO]',", "== 'RUNNING': sdc_executor.stop_pipeline(pipeline) # cleaning up s3 bucket delete_aws_objects(client, aws,", "implemented') def test_configuration_endpoint(sdc_builder, sdc_executor, region): pass @pytest.mark.parametrize('task', ['COPY_OBJECT']) @pytest.mark.skip('Not yet", "test_configuration_proxy_password(sdc_builder, sdc_executor, use_proxy): pass @pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not yet implemented') def", "'EU_CENTRAL_1', 'EU_WEST_1', 'EU_WEST_2', 'EU_WEST_3', 'OTHER', 'SA_EAST_1', 'US_EAST_1', 'US_EAST_2', 'US_GOV_WEST_1', 'US_WEST_1',", "'version': '<DSA version>'}, 'REGEX': {'/time': '08:23:53', '/date': '2019-04-30', '/timehalf': 'AM',", "'AP_SOUTHEAST_2', 'AP_SOUTH_1', 'CA_CENTRAL_1', 'CN_NORTHWEST_1', 'CN_NORTH_1', 'EU_CENTRAL_1', 'EU_WEST_1', 'EU_WEST_2', 'EU_WEST_3', 'OTHER',", "sdc_executor): pass @pytest.mark.parametrize('on_record_error', ['DISCARD', 'STOP_PIPELINE', 'TO_ERROR']) @pytest.mark.skip('Not yet implemented') def", "the file on s3 data_format_content = { 'COMMON_LOG_FORMAT': '127.0.0.1 -", "'auth': 'frank', 'ident': '-', 'response': '200', 'bytes': '232', 'clientip': '127.0.0.1',", "'clientip': '127.0.0.1', 'verb': 'GET', 'httpversion': '1.0', 'rawrequest': None, 'timestamp': '10/Oct/2000:13:55:36", "'bytesSent': '17', 'status': '500'}, 'CEF': {'severity': '6', 'product': 'NetScaler', 'extensions':", "test_configuration_proxy_host(sdc_builder, sdc_executor, use_proxy): pass @pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not yet implemented') def", "implemented') def test_configuration_proxy_password(sdc_builder, sdc_executor, use_proxy): pass @pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not yet", "pipeline.origin_stage try: client.put_object(Bucket=aws.s3_bucket_name, Key=f'{s3_key}/{get_random_string()}.log', Body=file_content) output_records = execute_pipeline_and_get_output(sdc_executor, s3_origin, pipeline)", "'/message', 'group': 6}] REGULAR_EXPRESSION = r'(\\S+) (\\S+) (\\S+) (\\S+) (\\S+)", "'NetScaler', 'extensions': {'msg': 'Disallow Illegal URL.', 'request': 'http://vpx247.example.net/FFC/login.html', 'method': 'GET',", "test_configuration_connection_timeout(sdc_builder, sdc_executor): pass @pytest.mark.parametrize('task', ['CREATE_NEW_OBJECT']) @pytest.mark.skip('Not yet implemented') def test_configuration_content(sdc_builder,", "'AP_SOUTH_1', 'CA_CENTRAL_1', 'CN_NORTHWEST_1', 'CN_NORTH_1', 'EU_CENTRAL_1', 'EU_WEST_1', 'EU_WEST_2', 'EU_WEST_3', 'OTHER', 'SA_EAST_1',", "def test_configuration_content(sdc_builder, sdc_executor, task): pass @pytest.mark.parametrize('task', ['COPY_OBJECT']) @pytest.mark.parametrize('delete_original_object', [False, True])", "up S3. delete_keys = {'Objects': [{'Key': k['Key']} for k in", "delete_original_object): pass @pytest.mark.parametrize('region', ['OTHER']) @pytest.mark.skip('Not yet implemented') def test_configuration_endpoint(sdc_builder, sdc_executor,", "14:32:52 2000', 'loglevel': 'error', 'clientip': '127.0.0.1'}, 'COMBINED_LOG_FORMAT': {'request': '/apache.gif', 'agent':", "test_configuration_task(sdc_builder, sdc_executor, task): pass @pytest.mark.parametrize('use_proxy', [False, True]) @pytest.mark.skip('Not yet implemented')", "builder.add_error_stage('Discard') s3_origin = builder.add_stage('Amazon S3', type='origin') s3_origin.set_attributes(**attributes) trash = builder.add_stage('Trash')", "'timestamp': '10/Oct/2000:13:55:36 -0700'}, 'APACHE_CUSTOM_LOG_FORMAT': {'remoteUser': '-', 'requestTime': '09/Jan/2015:9:12:06 +0000', 'request':", "implemented') def test_configuration_on_record_error(sdc_builder, sdc_executor, on_record_error): pass @pytest.mark.skip('Not yet implemented') def", "+0000', 'request': 'GET ' '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300 HTTP/1.1', 'logName': '-', 'remoteHost': '10.185.248.71',", "\"\"\"Check whether S3 origin can parse different log format or", "'SA_EAST_1', 'US_EAST_1', 'US_EAST_2', 'US_GOV_WEST_1', 'US_WEST_1', 'US_WEST_2']) @pytest.mark.skip('Not yet implemented') def", "'10/Oct/2000:13:55:36 -0700'}, 'APACHE_ERROR_LOG_FORMAT': {'message': 'client denied by server configuration:/export/home/live/ap/htdocs/' 'test',", "' '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300 HTTP/1.1', 'logName': '-', 'remoteHost': '10.185.248.71', 'bytesSent': '17', 'status':", "= r'(\\S+) (\\S+) (\\S+) (\\S+) (\\S+) (.*)' # log to", "'error', 'clientip': '127.0.0.1'}, 'COMBINED_LOG_FORMAT': {'request': '/apache.gif', 'agent': '\"Mozilla/4.08 [en] (Win98;", "log file is being created in s3 bucket mentioned below", "yet implemented') def test_configuration_socket_timeout(sdc_builder, sdc_executor): pass @pytest.mark.parametrize('task', ['CHANGE_EXISTING_OBJECT']) @pytest.mark.skip('Not yet", "Key=f'{s3_key}/{get_random_string()}.log', Body=file_content) output_records = execute_pipeline_and_get_output(sdc_executor, s3_origin, pipeline) assert output_records[0].field ==", "'-', 'ident': '-', 'response': '500', 'bytes': '17', 'clientip': '10.185.248.71', 'verb':", "@pytest.mark.skip('Not yet implemented') def test_configuration_proxy_user(sdc_builder, sdc_executor, use_proxy): pass @pytest.mark.parametrize('region', ['AP_NORTHEAST_1',", "['CHANGE_EXISTING_OBJECT']) @pytest.mark.skip('Not yet implemented') def test_configuration_tags(sdc_builder, sdc_executor, task): pass @pytest.mark.parametrize('task',", "implemented') def test_configuration_socket_timeout(sdc_builder, sdc_executor): pass @pytest.mark.parametrize('task', ['CHANGE_EXISTING_OBJECT']) @pytest.mark.skip('Not yet implemented')", "sdc_builder, aws, data_format, log_format): \"\"\"Check whether S3 origin can parse", "'200', 'bytes': '2326', 'clientip': '127.0.0.1', 'httpversion': '1.0', 'rawrequest': None, 'timestamp':", "implemented') def test_configuration_retry_count(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet implemented') def test_configuration_secret_access_key(sdc_builder,", ">> trash s3_origin >= pipeline_finisher_executor \"\"\" if log_format == 'GROK':", "'10.185.248.71', 'verb': 'GET', 'httpversion': '1.1', 'rawrequest': None, 'timestamp': '09/Jan/2015:9:12:06 +0000'},", "'6', 'product': 'NetScaler', 'extensions': {'msg': 'Disallow Illegal URL.', 'request': 'http://vpx247.example.net/FFC/login.html',", "['COPY_OBJECT']) @pytest.mark.skip('Not yet implemented') def test_configuration_new_object_path(sdc_builder, sdc_executor, task): pass @pytest.mark.skip('Not", "Filewriterpipeline53'} # data to verify the output of amazon s3", "[streamsets.sdk.sdc_api] Pipeline Filewriterpipeline53'} # data to verify the output of", "implemented') def test_configuration_object(sdc_builder, sdc_executor): pass @pytest.mark.parametrize('on_record_error', ['DISCARD', 'STOP_PIPELINE', 'TO_ERROR']) @pytest.mark.skip('Not", "'500', 'bytes': '17', 'clientip': '10.185.248.71', 'verb': 'GET', 'httpversion': '1.1', 'rawrequest':", "'GET', 'src': '10.217.253.78', 'spt': '53743'}, 'signature': 'APPFW', 'vendor': 'Citrix', 'cefVersion':", "200 2326 \"http://www.example.com/strt.html\" \"Mozilla/4.08' ' [en] (Win98; I ;Nav)\"', 'APACHE_CUSTOM_LOG_FORMAT':", "'version': 'NS10.0'}, 'GROK': {'request': '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300', 'auth': '-', 'ident': '-', 'response':", "'spt': '53743'}, 'signature': 'APPFW', 'vendor': 'Citrix', 'cefVersion': 0, 'name': 'APPFW_STARTURL',", "test_configuration_bucket(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet implemented') def test_configuration_connection_timeout(sdc_builder, sdc_executor): pass", "[main] DEBUG org.StreamSets.Log4j unknown - This is sample log message',", "{'fieldPath': '/time', 'group': 2}, {'fieldPath': '/timehalf', 'group': 3}, {'fieldPath': '/info',", "pass @pytest.mark.parametrize('task', ['COPY_OBJECT']) @pytest.mark.skip('Not yet implemented') def test_configuration_new_object_path(sdc_builder, sdc_executor, task):", "'1.1', 'rawrequest': None, 'timestamp': '09/Jan/2015:9:12:06 +0000'}, 'LEEF': {'eventId': '4000030', 'product':", "'html\"', 'response': '200', 'bytes': '2326', 'clientip': '127.0.0.1', 'httpversion': '1.0', 'rawrequest':", "'EU_WEST_2', 'EU_WEST_3', 'OTHER', 'SA_EAST_1', 'US_EAST_1', 'US_EAST_2', 'US_GOV_WEST_1', 'US_WEST_1', 'US_WEST_2']) @pytest.mark.skip('Not", "use_proxy): pass @pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not yet implemented') def test_configuration_proxy_user(sdc_builder, sdc_executor,", "s3_origin_pipeline = builder.build().configure_for_environment(aws) s3_origin_pipeline.configuration['shouldRetry'] = False return s3_origin_pipeline def delete_aws_objects(client,", "test_configurations_data_format_log(sdc_executor, sdc_builder, aws, data_format, log_format): \"\"\"Check whether S3 origin can", "s3_origin >> trash s3_origin >= pipeline_finisher_executor s3_origin_pipeline = builder.build().configure_for_environment(aws) s3_origin_pipeline.configuration['shouldRetry']", "by server configuration:/export/home/live/ap/htdocs/' 'test', 'timestamp': 'Wed Oct 11 14:32:52 2000',", "'[INFO]', '/message': 'Pipeline Filewriterpipeline53', '/file': '[streamsets.sdk.sdc_api]'}} @pytest.mark.skip('Not yet implemented') def", "{'request': '/apache.gif', 'auth': 'frank', 'ident': '-', 'response': '200', 'bytes': '232',", "= get_aws_origin_to_trash_pipeline(sdc_builder, attributes, aws) s3_origin = pipeline.origin_stage try: client.put_object(Bucket=aws.s3_bucket_name, Key=f'{s3_key}/{get_random_string()}.log',", "output_records = execute_pipeline_and_get_output(sdc_executor, s3_origin, pipeline) assert output_records[0].field == get_data_to_verify_output[log_format] finally:", "'spt=53743 method=GET request=http://vpx247.example.net/FFC/login.html msg=Disallow Illegal URL.', 'LEEF': 'LEEF: 2.0|Trend Micro|Deep", "s3_key = f'{S3_SANDBOX_PREFIX}/{get_random_string()}' attributes = {'bucket': aws.s3_bucket_name, 'prefix_pattern': f'{s3_key}/*', 'number_of_threads':", "'200', 'thread': 'main', 'category': 'org.StreamSets.Log4j', 'ndc': 'unknown', 'message': 'This is", "'/file': '[streamsets.sdk.sdc_api]'}} @pytest.mark.skip('Not yet implemented') def test_configuration_access_key_id(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not", "def test_configuration_connection_timeout(sdc_builder, sdc_executor): pass @pytest.mark.parametrize('task', ['CREATE_NEW_OBJECT']) @pytest.mark.skip('Not yet implemented') def", "- frank [10/Oct/2000:13:55:36 -0700] \"GET /apache.gif' ' HTTP/1.0\" 200 2326", "{'request': '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300', 'auth': '-', 'ident': '-', 'response': '500', 'bytes': '17',", "@pytest.mark.parametrize('task', ['CHANGE_EXISTING_OBJECT']) @pytest.mark.skip('Not yet implemented') def test_configuration_tags(sdc_builder, sdc_executor, task): pass", "logger = logging.getLogger(__name__) S3_SANDBOX_PREFIX = 'sandbox' LOG_FIELD_MAPPING = [{'fieldPath': '/date',", "[True]) @pytest.mark.skip('Not yet implemented') def test_configuration_proxy_user(sdc_builder, sdc_executor, use_proxy): pass @pytest.mark.parametrize('region',", "yet implemented') def test_configuration_task(sdc_builder, sdc_executor, task): pass @pytest.mark.parametrize('use_proxy', [False, True])", "REGULAR_EXPRESSION = r'(\\S+) (\\S+) (\\S+) (\\S+) (\\S+) (.*)' # log", "Executor') pipeline_finisher_executor.set_attributes(stage_record_preconditions=[\"${record:eventType() == 'no-more-data'}\"]) s3_origin >> trash s3_origin >= pipeline_finisher_executor", "pass @pytest.mark.parametrize('use_proxy', [True]) @pytest.mark.skip('Not yet implemented') def test_configuration_proxy_user(sdc_builder, sdc_executor, use_proxy):", "sdc_executor): pass @pytest.mark.skip('Not yet implemented') def test_configuration_bucket(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not", "'unknown', 'message': 'This is sample log message'}, 'COMMON_LOG_FORMAT': {'request': '/apache.gif',", "pass @pytest.mark.parametrize('region', ['OTHER']) @pytest.mark.skip('Not yet implemented') def test_configuration_endpoint(sdc_builder, sdc_executor, region):", "{'fieldPath': '/info', 'group': 4}, {'fieldPath': '/file', 'group': 5}, {'fieldPath': '/message',", "\"http://www.example.com/strt.html\" \"Mozilla/4.08' ' [en] (Win98; I ;Nav)\"', 'APACHE_CUSTOM_LOG_FORMAT': '10.185.248.71 -", "5}, {'fieldPath': '/message', 'group': 6}] REGULAR_EXPRESSION = r'(\\S+) (\\S+) (\\S+)", "to verify the output of amazon s3 origin. get_data_to_verify_output =", "'ident': '-', 'response': '200', 'bytes': '232', 'clientip': '127.0.0.1', 'verb': 'GET',", "{'fieldPath': '/file', 'group': 5}, {'fieldPath': '/message', 'group': 6}] REGULAR_EXPRESSION =", "' 'name=HEU_AEGIS_CRYPT desc=HEU_AEGIS_CRYPT sev=6 cn1=241 msg=Realtime', 'REGEX': '2019-04-30 08:23:53 AM", "'17', 'status': '500'}, 'CEF': {'severity': '6', 'product': 'NetScaler', 'extensions': {'msg':", "f'{S3_SANDBOX_PREFIX}/{get_random_string()}' attributes = {'bucket': aws.s3_bucket_name, 'prefix_pattern': f'{s3_key}/*', 'number_of_threads': 1, 'read_order':", "builder.build().configure_for_environment(aws) s3_origin_pipeline.configuration['shouldRetry'] = False return s3_origin_pipeline def delete_aws_objects(client, aws, s3_key):", "'232', 'clientip': '127.0.0.1', 'verb': 'GET', 'httpversion': '1.0', 'rawrequest': None, 'timestamp':", "output_records[0].field == get_data_to_verify_output[log_format] finally: if sdc_executor.get_pipeline_status(pipeline).response.json().get('status') == 'RUNNING': sdc_executor.stop_pipeline(pipeline) #", "'APPFW', 'vendor': 'Citrix', 'cefVersion': 0, 'name': 'APPFW_STARTURL', 'version': 'NS10.0'}, 'GROK':", "yet implemented') def test_configuration_region(sdc_builder, sdc_executor, region): pass @pytest.mark.skip('Not yet implemented')", "implemented') def test_configuration_proxy_user(sdc_builder, sdc_executor, use_proxy): pass @pytest.mark.parametrize('region', ['AP_NORTHEAST_1', 'AP_NORTHEAST_2', 'AP_NORTHEAST_3',", "\"%r\" %>s %b', 'regular_expression': REGULAR_EXPRESSION, 'field_path_to_regex_group_mapping': LOG_FIELD_MAPPING } pipeline =", "@pytest.mark.skip('Not yet implemented') def test_configuration_secret_access_key(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet implemented')", "sdc_builder.get_pipeline_builder() builder.add_error_stage('Discard') s3_origin = builder.add_stage('Amazon S3', type='origin') s3_origin.set_attributes(**attributes) trash =", "+0000'}, 'LEEF': {'eventId': '4000030', 'product': 'Deep Security Agent', 'extensions': {'cat':", "streamsets.testframework.utils import get_random_string logger = logging.getLogger(__name__) S3_SANDBOX_PREFIX = 'sandbox' LOG_FIELD_MAPPING", "'client denied by server configuration:/export/home/live/ap/htdocs/' 'test', 'timestamp': 'Wed Oct 11", "True]) @pytest.mark.skip('Not yet implemented') def test_configuration_use_proxy(sdc_builder, sdc_executor, use_proxy): pass @aws('s3')", "the output of amazon s3 origin. get_data_to_verify_output = { 'LOG4J':", "@pytest.mark.skip('Not yet implemented') def test_configuration_object(sdc_builder, sdc_executor): pass @pytest.mark.parametrize('on_record_error', ['DISCARD', 'STOP_PIPELINE',", "'thread': 'main', 'category': 'org.StreamSets.Log4j', 'ndc': 'unknown', 'message': 'This is sample", "sdc_executor, task): pass @pytest.mark.skip('Not yet implemented') def test_configuration_object(sdc_builder, sdc_executor): pass", "Finisher Executor') pipeline_finisher_executor.set_attributes(stage_record_preconditions=[\"${record:eventType() == 'no-more-data'}\"]) s3_origin >> trash s3_origin >=", "{'request': '/apache.gif', 'agent': '\"Mozilla/4.08 [en] (Win98; I ;Nav)\"', 'auth': 'frank',", "sdc_executor, use_proxy): pass @aws('s3') @pytest.mark.parametrize('data_format', ['LOG']) @pytest.mark.parametrize('log_format', ['COMMON_LOG_FORMAT', 'APACHE_ERROR_LOG_FORMAT', 'COMBINED_LOG_FORMAT',", "s3_origin, pipeline): sdc_executor.add_pipeline(pipeline) snapshot = sdc_executor.capture_snapshot(pipeline, start_pipeline=True).snapshot output_records = snapshot[s3_origin].output", "pass @pytest.mark.parametrize('use_proxy', [False, True]) @pytest.mark.skip('Not yet implemented') def test_configuration_use_proxy(sdc_builder, sdc_executor,", "@pytest.mark.skip('Not yet implemented') def test_configuration_required_fields(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet implemented')", "'09/Jan/2015:9:12:06 +0000', 'request': 'GET ' '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300 HTTP/1.1', 'logName': '-', 'remoteHost':", "'extensions': {'msg': 'Disallow Illegal URL.', 'request': 'http://vpx247.example.net/FFC/login.html', 'method': 'GET', 'src':", "'custom_log_format': '%h %l %u [%t] \"%r\" %>s %b', 'regular_expression': REGULAR_EXPRESSION,", "Agent', 'extensions': {'cat': 'Realtime'}, 'leefVersion': 2.0, 'vendor': 'Trend Micro', 'version':", "[10/Oct/2000:13:55:36 -0700] \"GET /apache.gif' ' HTTP/1.0\" 200 2326 \"http://www.example.com/strt.html\" \"Mozilla/4.08'", "'AP_NORTHEAST_2', 'AP_NORTHEAST_3', 'AP_SOUTHEAST_1', 'AP_SOUTHEAST_2', 'AP_SOUTH_1', 'CA_CENTRAL_1', 'CN_NORTHWEST_1', 'CN_NORTH_1', 'EU_CENTRAL_1', 'EU_WEST_1',", "get_aws_origin_to_trash_pipeline(sdc_builder, attributes, aws): # Build pipeline. builder = sdc_builder.get_pipeline_builder() builder.add_error_stage('Discard')", "sdc_executor, region): pass @pytest.mark.parametrize('task', ['COPY_OBJECT']) @pytest.mark.skip('Not yet implemented') def test_configuration_new_object_path(sdc_builder,", "yet implemented') def test_configuration_retry_count(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet implemented') def", "'GROK': {'request': '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300', 'auth': '-', 'ident': '-', 'response': '500', 'bytes':", "'COPY_OBJECT', 'CREATE_NEW_OBJECT']) @pytest.mark.skip('Not yet implemented') def test_configuration_task(sdc_builder, sdc_executor, task): pass", "S3_SANDBOX_PREFIX = 'sandbox' LOG_FIELD_MAPPING = [{'fieldPath': '/date', 'group': 1}, {'fieldPath':", "6}] REGULAR_EXPRESSION = r'(\\S+) (\\S+) (\\S+) (\\S+) (\\S+) (.*)' #", "msg=Disallow Illegal URL.', 'LEEF': 'LEEF: 2.0|Trend Micro|Deep Security Agent|<DSA version>|4000030|cat=Anti-Malware", "11 14:32:52 2000] [error] [client 127.0.0.1] client ' 'denied by", "@pytest.mark.skip('Not yet implemented') def test_configuration_access_key_id(sdc_builder, sdc_executor): pass @pytest.mark.skip('Not yet implemented')", "'AP_NORTHEAST_3', 'AP_SOUTHEAST_1', 'AP_SOUTHEAST_2', 'AP_SOUTH_1', 'CA_CENTRAL_1', 'CN_NORTHWEST_1', 'CN_NORTH_1', 'EU_CENTRAL_1', 'EU_WEST_1', 'EU_WEST_2',", "Security Agent|<DSA version>|4000030|cat=Anti-Malware ' 'name=HEU_AEGIS_CRYPT desc=HEU_AEGIS_CRYPT sev=6 cn1=241 msg=Realtime', 'REGEX':", "s3_origin >= pipeline_finisher_executor s3_origin_pipeline = builder.build().configure_for_environment(aws) s3_origin_pipeline.configuration['shouldRetry'] = False return" ]
[ "linear regression Attributes ---------- _linear_regression : LinearRegression The actual scikt", "2019 <NAME> Copyright 2019 <NAME> Licensed under the Apache License,", "experiment configuration regression_inputs: RegressionInputs The input of the regression problem", "estimated values for a give set of data \"\"\" def", "configuration for linear regression Attributes ---------- _linear_regression : LinearRegression The", "as svm import model_building.experiment_configuration as ec class SVRExperimentConfiguration(ec.ExperimentConfiguration): \"\"\" Class", "ydata = self._regression_inputs.get_xy_data(self._regression_inputs.inputs_split[\"training\"]) self._regressor.fit(xdata, ydata) self._logger.debug(\"Model built\") # for idx,", "give set of data \"\"\" def __init__(self, campaign_configuration, hyperparameters, regression_inputs,", "Unless required by applicable law or agreed to in writing,", "by applicable law or agreed to in writing, software distributed", "model_building.experiment_configuration as ec class SVRExperimentConfiguration(ec.ExperimentConfiguration): \"\"\" Class representing a single", "def _train(self): \"\"\" Build the model with the experiment configuration", "experiment configuration represented by this object \"\"\" self._logger.debug(\"Building model for", "self._regression_inputs xdata, ydata = self._regression_inputs.get_xy_data(self._regression_inputs.inputs_split[\"training\"]) self._regressor.fit(xdata, ydata) self._logger.debug(\"Model built\") #", "for runs in rows \"\"\" xdata, _ = self._regression_inputs.get_xy_data(rows) return", "command line and campaign configuration files hyperparameters: dictionary The set", "the signature associated with this experiment configuration \"\"\" signature =", "hyperparameters, regression_inputs, prefix) self.technique = ec.Technique.SVR self._regressor = svm.SVR(C=self._hyperparameters['C'], epsilon=self._hyperparameters['epsilon'],", "software distributed under the License is distributed on an \"AS", "distributed under the License is distributed on an \"AS IS\"", "signature.append(\"gamma_\" + str(self._hyperparameters['gamma'])) signature.append(\"kernel_\" + str(self._hyperparameters['kernel'])) signature.append(\"degree_\" + str(self._hyperparameters['degree'])) return", "line and campaign configuration files hyperparameters: dictionary The set of", "be solved \"\"\" super().__init__(campaign_configuration, hyperparameters, regression_inputs, prefix) self.technique = ec.Technique.SVR", "dict: The set of options specified by the user though", "= svm.SVR(C=self._hyperparameters['C'], epsilon=self._hyperparameters['epsilon'], gamma=self._hyperparameters['gamma'], kernel=self._hyperparameters['kernel'], degree=self._hyperparameters['degree']) def _compute_signature(self, prefix): \"\"\"", "specified by the user though command line and campaign configuration", "xdata, ydata = self._regression_inputs.get_xy_data(self._regression_inputs.inputs_split[\"training\"]) self._regressor.fit(xdata, ydata) self._logger.debug(\"Model built\") # for", "# for idx, col_name in enumerate(self._regression_inputs.x_columns): # self._logger.debug(\"The coefficient for", "set of hyperparameters of this experiment configuration regression_inputs: RegressionInputs The", "%s\", self._signature) assert self._regression_inputs xdata, ydata = self._regression_inputs.get_xy_data(self._regression_inputs.inputs_split[\"training\"]) self._regressor.fit(xdata, ydata)", "CONDITIONS OF ANY KIND, either express or implied. See the", "hyperparameters: dictionary The set of hyperparameters of this experiment configuration", "gamma=self._hyperparameters['gamma'], kernel=self._hyperparameters['kernel'], degree=self._hyperparameters['degree']) def _compute_signature(self, prefix): \"\"\" Compute the signature", "Version 2.0 (the \"License\"); you may not use this file", "<NAME> Copyright 2019 <NAME> Licensed under the Apache License, Version", "coefficient for %s is %f\", col_name, self._linear_regression.coef_[idx]) def compute_estimations(self, rows):", "%f\", col_name, self._linear_regression.coef_[idx]) def compute_estimations(self, rows): \"\"\" Compute the estimations", "writing, software distributed under the License is distributed on an", "of hyperparameters of this experiment configuration regression_inputs: RegressionInputs The input", "the MAPE for runs in rows \"\"\" xdata, _ =", "dict of dict: The set of options specified by the", "object \"\"\" self._logger.debug(\"Building model for %s\", self._signature) assert self._regression_inputs xdata,", "= self._regression_inputs.get_xy_data(self._regression_inputs.inputs_split[\"training\"]) self._regressor.fit(xdata, ydata) self._logger.debug(\"Model built\") # for idx, col_name", ": LinearRegression The actual scikt object which performs the linear", "not use this file except in compliance with the License.", "2.0 (the \"License\"); you may not use this file except", "Apache License, Version 2.0 (the \"License\"); you may not use", "copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable", "express or implied. See the License for the specific language", "LinearRegression The actual scikt object which performs the linear regression", "sklearn.svm as svm import model_building.experiment_configuration as ec class SVRExperimentConfiguration(ec.ExperimentConfiguration): \"\"\"", "of this experiment configuration regression_inputs: RegressionInputs The input of the", "of the linear model compute_estimations() Compute the estimated values for", "+ str(self._hyperparameters['kernel'])) signature.append(\"degree_\" + str(self._hyperparameters['degree'])) return signature def _train(self): \"\"\"", "regression_inputs, prefix): \"\"\" campaign_configuration: dict of dict: The set of", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "Compute the estimated values for a give set of data", "regression Methods ------- _train() Performs the actual building of the", "in compliance with the License. You may obtain a copy", "str(self._hyperparameters['kernel'])) signature.append(\"degree_\" + str(self._hyperparameters['degree'])) return signature def _train(self): \"\"\" Build", "a single experiment configuration for linear regression Attributes ---------- _linear_regression", "_train(self): \"\"\" Build the model with the experiment configuration represented", "of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law", "you may not use this file except in compliance with", "ydata) self._logger.debug(\"Model built\") # for idx, col_name in enumerate(self._regression_inputs.x_columns): #", "in enumerate(self._regression_inputs.x_columns): # self._logger.debug(\"The coefficient for %s is %f\", col_name,", "_linear_regression : LinearRegression The actual scikt object which performs the", "limitations under the License. \"\"\" import sklearn.svm as svm import", "is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "the License. You may obtain a copy of the License", "agreed to in writing, software distributed under the License is", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "svm.SVR(C=self._hyperparameters['C'], epsilon=self._hyperparameters['epsilon'], gamma=self._hyperparameters['gamma'], kernel=self._hyperparameters['kernel'], degree=self._hyperparameters['degree']) def _compute_signature(self, prefix): \"\"\" Compute", "RegressionInputs The input of the regression problem to be solved", "self.technique = ec.Technique.SVR self._regressor = svm.SVR(C=self._hyperparameters['C'], epsilon=self._hyperparameters['epsilon'], gamma=self._hyperparameters['gamma'], kernel=self._hyperparameters['kernel'], degree=self._hyperparameters['degree'])", "distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to", "\"\"\" Class representing a single experiment configuration for linear regression", "use this file except in compliance with the License. You", "this experiment configuration \"\"\" signature = prefix.copy() signature.append(\"C_\" + str(self._hyperparameters['C']))", "the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or", "and campaign configuration files hyperparameters: dictionary The set of hyperparameters", "ANY KIND, either express or implied. See the License for", "solved \"\"\" super().__init__(campaign_configuration, hyperparameters, regression_inputs, prefix) self.technique = ec.Technique.SVR self._regressor", "svm import model_building.experiment_configuration as ec class SVRExperimentConfiguration(ec.ExperimentConfiguration): \"\"\" Class representing", "Class representing a single experiment configuration for linear regression Attributes", "http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in", "may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless", "Performs the actual building of the linear model compute_estimations() Compute", "obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required", "return signature def _train(self): \"\"\" Build the model with the", "hyperparameters, regression_inputs, prefix): \"\"\" campaign_configuration: dict of dict: The set", "model with the experiment configuration represented by this object \"\"\"", "under the License. \"\"\" import sklearn.svm as svm import model_building.experiment_configuration", "model for %s\", self._signature) assert self._regression_inputs xdata, ydata = self._regression_inputs.get_xy_data(self._regression_inputs.inputs_split[\"training\"])", "= ec.Technique.SVR self._regressor = svm.SVR(C=self._hyperparameters['C'], epsilon=self._hyperparameters['epsilon'], gamma=self._hyperparameters['gamma'], kernel=self._hyperparameters['kernel'], degree=self._hyperparameters['degree']) def", "configuration regression_inputs: RegressionInputs The input of the regression problem to", "linear model compute_estimations() Compute the estimated values for a give", "runs in rows \"\"\" xdata, _ = self._regression_inputs.get_xy_data(rows) return self._regressor.predict(xdata)", "compute_estimations() Compute the estimated values for a give set of", "either express or implied. See the License for the specific", "and limitations under the License. \"\"\" import sklearn.svm as svm", "a give set of data \"\"\" def __init__(self, campaign_configuration, hyperparameters,", "regression Attributes ---------- _linear_regression : LinearRegression The actual scikt object", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "with this experiment configuration \"\"\" signature = prefix.copy() signature.append(\"C_\" +", "signature = prefix.copy() signature.append(\"C_\" + str(self._hyperparameters['C'])) signature.append(\"epsilon_\" + str(self._hyperparameters['epsilon'])) signature.append(\"gamma_\"", "self._regressor = svm.SVR(C=self._hyperparameters['C'], epsilon=self._hyperparameters['epsilon'], gamma=self._hyperparameters['gamma'], kernel=self._hyperparameters['kernel'], degree=self._hyperparameters['degree']) def _compute_signature(self, prefix):", "under the License is distributed on an \"AS IS\" BASIS,", "\"License\"); you may not use this file except in compliance", "user though command line and campaign configuration files hyperparameters: dictionary", "MAPE for runs in rows \"\"\" xdata, _ = self._regression_inputs.get_xy_data(rows)", "License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES", "Licensed under the Apache License, Version 2.0 (the \"License\"); you", "def __init__(self, campaign_configuration, hyperparameters, regression_inputs, prefix): \"\"\" campaign_configuration: dict of", "self._logger.debug(\"Building model for %s\", self._signature) assert self._regression_inputs xdata, ydata =", "estimations and the MAPE for runs in rows \"\"\" xdata,", "with the License. You may obtain a copy of the", "regression_inputs: RegressionInputs The input of the regression problem to be", "The input of the regression problem to be solved \"\"\"", "represented by this object \"\"\" self._logger.debug(\"Building model for %s\", self._signature)", "self._regression_inputs.get_xy_data(self._regression_inputs.inputs_split[\"training\"]) self._regressor.fit(xdata, ydata) self._logger.debug(\"Model built\") # for idx, col_name in", "actual scikt object which performs the linear regression Methods -------", "+ str(self._hyperparameters['degree'])) return signature def _train(self): \"\"\" Build the model", "License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed", "hyperparameters of this experiment configuration regression_inputs: RegressionInputs The input of", "of dict: The set of options specified by the user", "campaign configuration files hyperparameters: dictionary The set of hyperparameters of", "Compute the signature associated with this experiment configuration \"\"\" signature", "License for the specific language governing permissions and limitations under", "scikt object which performs the linear regression Methods ------- _train()", "data \"\"\" def __init__(self, campaign_configuration, hyperparameters, regression_inputs, prefix): \"\"\" campaign_configuration:", "+ str(self._hyperparameters['epsilon'])) signature.append(\"gamma_\" + str(self._hyperparameters['gamma'])) signature.append(\"kernel_\" + str(self._hyperparameters['kernel'])) signature.append(\"degree_\" +", "linear regression Methods ------- _train() Performs the actual building of", "\"\"\" def __init__(self, campaign_configuration, hyperparameters, regression_inputs, prefix): \"\"\" campaign_configuration: dict", "is %f\", col_name, self._linear_regression.coef_[idx]) def compute_estimations(self, rows): \"\"\" Compute the", "epsilon=self._hyperparameters['epsilon'], gamma=self._hyperparameters['gamma'], kernel=self._hyperparameters['kernel'], degree=self._hyperparameters['degree']) def _compute_signature(self, prefix): \"\"\" Compute the", "Copyright 2019 <NAME> Licensed under the Apache License, Version 2.0", "this file except in compliance with the License. You may", "ec.Technique.SVR self._regressor = svm.SVR(C=self._hyperparameters['C'], epsilon=self._hyperparameters['epsilon'], gamma=self._hyperparameters['gamma'], kernel=self._hyperparameters['kernel'], degree=self._hyperparameters['degree']) def _compute_signature(self,", "---------- _linear_regression : LinearRegression The actual scikt object which performs", "idx, col_name in enumerate(self._regression_inputs.x_columns): # self._logger.debug(\"The coefficient for %s is", "specific language governing permissions and limitations under the License. \"\"\"", "files hyperparameters: dictionary The set of hyperparameters of this experiment", "(the \"License\"); you may not use this file except in", "the regression problem to be solved \"\"\" super().__init__(campaign_configuration, hyperparameters, regression_inputs,", "object which performs the linear regression Methods ------- _train() Performs", "self._linear_regression.coef_[idx]) def compute_estimations(self, rows): \"\"\" Compute the estimations and the", "governing permissions and limitations under the License. \"\"\" import sklearn.svm", "_compute_signature(self, prefix): \"\"\" Compute the signature associated with this experiment", "options specified by the user though command line and campaign", "prefix.copy() signature.append(\"C_\" + str(self._hyperparameters['C'])) signature.append(\"epsilon_\" + str(self._hyperparameters['epsilon'])) signature.append(\"gamma_\" + str(self._hyperparameters['gamma']))", "str(self._hyperparameters['epsilon'])) signature.append(\"gamma_\" + str(self._hyperparameters['gamma'])) signature.append(\"kernel_\" + str(self._hyperparameters['kernel'])) signature.append(\"degree_\" + str(self._hyperparameters['degree']))", "def _compute_signature(self, prefix): \"\"\" Compute the signature associated with this", "experiment configuration for linear regression Attributes ---------- _linear_regression : LinearRegression", "the linear model compute_estimations() Compute the estimated values for a", "applicable law or agreed to in writing, software distributed under", "\"\"\" self._logger.debug(\"Building model for %s\", self._signature) assert self._regression_inputs xdata, ydata", "rows): \"\"\" Compute the estimations and the MAPE for runs", "though command line and campaign configuration files hyperparameters: dictionary The", "with the experiment configuration represented by this object \"\"\" self._logger.debug(\"Building", "campaign_configuration: dict of dict: The set of options specified by", "class SVRExperimentConfiguration(ec.ExperimentConfiguration): \"\"\" Class representing a single experiment configuration for", "campaign_configuration, hyperparameters, regression_inputs, prefix): \"\"\" campaign_configuration: dict of dict: The", "\"\"\" Compute the signature associated with this experiment configuration \"\"\"", "%s is %f\", col_name, self._linear_regression.coef_[idx]) def compute_estimations(self, rows): \"\"\" Compute", "the License is distributed on an \"AS IS\" BASIS, WITHOUT", "# self._logger.debug(\"The coefficient for %s is %f\", col_name, self._linear_regression.coef_[idx]) def", "values for a give set of data \"\"\" def __init__(self,", "prefix): \"\"\" campaign_configuration: dict of dict: The set of options", "col_name, self._linear_regression.coef_[idx]) def compute_estimations(self, rows): \"\"\" Compute the estimations and", "You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0", "the specific language governing permissions and limitations under the License.", "License. \"\"\" import sklearn.svm as svm import model_building.experiment_configuration as ec", "Compute the estimations and the MAPE for runs in rows", "Attributes ---------- _linear_regression : LinearRegression The actual scikt object which", "\"\"\" import sklearn.svm as svm import model_building.experiment_configuration as ec class", "building of the linear model compute_estimations() Compute the estimated values", "set of options specified by the user though command line", "super().__init__(campaign_configuration, hyperparameters, regression_inputs, prefix) self.technique = ec.Technique.SVR self._regressor = svm.SVR(C=self._hyperparameters['C'],", "degree=self._hyperparameters['degree']) def _compute_signature(self, prefix): \"\"\" Compute the signature associated with", "and the MAPE for runs in rows \"\"\" xdata, _", "the model with the experiment configuration represented by this object", "signature def _train(self): \"\"\" Build the model with the experiment", "kernel=self._hyperparameters['kernel'], degree=self._hyperparameters['degree']) def _compute_signature(self, prefix): \"\"\" Compute the signature associated", "the Apache License, Version 2.0 (the \"License\"); you may not", "file except in compliance with the License. You may obtain", "\"\"\" Copyright 2019 <NAME> Copyright 2019 <NAME> Licensed under the", "except in compliance with the License. You may obtain a", "KIND, either express or implied. See the License for the", "or implied. See the License for the specific language governing", "ec class SVRExperimentConfiguration(ec.ExperimentConfiguration): \"\"\" Class representing a single experiment configuration", "SVRExperimentConfiguration(ec.ExperimentConfiguration): \"\"\" Class representing a single experiment configuration for linear", "to in writing, software distributed under the License is distributed", "model compute_estimations() Compute the estimated values for a give set", "The set of options specified by the user though command", "prefix) self.technique = ec.Technique.SVR self._regressor = svm.SVR(C=self._hyperparameters['C'], epsilon=self._hyperparameters['epsilon'], gamma=self._hyperparameters['gamma'], kernel=self._hyperparameters['kernel'],", "signature associated with this experiment configuration \"\"\" signature = prefix.copy()", "\"\"\" signature = prefix.copy() signature.append(\"C_\" + str(self._hyperparameters['C'])) signature.append(\"epsilon_\" + str(self._hyperparameters['epsilon']))", "+ str(self._hyperparameters['gamma'])) signature.append(\"kernel_\" + str(self._hyperparameters['kernel'])) signature.append(\"degree_\" + str(self._hyperparameters['degree'])) return signature", "<gh_stars>1-10 \"\"\" Copyright 2019 <NAME> Copyright 2019 <NAME> Licensed under", "for %s is %f\", col_name, self._linear_regression.coef_[idx]) def compute_estimations(self, rows): \"\"\"", "or agreed to in writing, software distributed under the License", "performs the linear regression Methods ------- _train() Performs the actual", "to be solved \"\"\" super().__init__(campaign_configuration, hyperparameters, regression_inputs, prefix) self.technique =", "representing a single experiment configuration for linear regression Attributes ----------", "law or agreed to in writing, software distributed under the", "OR CONDITIONS OF ANY KIND, either express or implied. See", "the estimated values for a give set of data \"\"\"", "compliance with the License. You may obtain a copy of", "str(self._hyperparameters['C'])) signature.append(\"epsilon_\" + str(self._hyperparameters['epsilon'])) signature.append(\"gamma_\" + str(self._hyperparameters['gamma'])) signature.append(\"kernel_\" + str(self._hyperparameters['kernel']))", "\"\"\" Compute the estimations and the MAPE for runs in", "language governing permissions and limitations under the License. \"\"\" import", "OF ANY KIND, either express or implied. See the License", "the License. \"\"\" import sklearn.svm as svm import model_building.experiment_configuration as", "under the Apache License, Version 2.0 (the \"License\"); you may", "single experiment configuration for linear regression Attributes ---------- _linear_regression :", "= prefix.copy() signature.append(\"C_\" + str(self._hyperparameters['C'])) signature.append(\"epsilon_\" + str(self._hyperparameters['epsilon'])) signature.append(\"gamma_\" +", "on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "for linear regression Attributes ---------- _linear_regression : LinearRegression The actual", "of options specified by the user though command line and", "import sklearn.svm as svm import model_building.experiment_configuration as ec class SVRExperimentConfiguration(ec.ExperimentConfiguration):", "problem to be solved \"\"\" super().__init__(campaign_configuration, hyperparameters, regression_inputs, prefix) self.technique", "Methods ------- _train() Performs the actual building of the linear", "assert self._regression_inputs xdata, ydata = self._regression_inputs.get_xy_data(self._regression_inputs.inputs_split[\"training\"]) self._regressor.fit(xdata, ydata) self._logger.debug(\"Model built\")", "for idx, col_name in enumerate(self._regression_inputs.x_columns): # self._logger.debug(\"The coefficient for %s", "str(self._hyperparameters['gamma'])) signature.append(\"kernel_\" + str(self._hyperparameters['kernel'])) signature.append(\"degree_\" + str(self._hyperparameters['degree'])) return signature def", "configuration files hyperparameters: dictionary The set of hyperparameters of this", "__init__(self, campaign_configuration, hyperparameters, regression_inputs, prefix): \"\"\" campaign_configuration: dict of dict:", "the user though command line and campaign configuration files hyperparameters:", "self._signature) assert self._regression_inputs xdata, ydata = self._regression_inputs.get_xy_data(self._regression_inputs.inputs_split[\"training\"]) self._regressor.fit(xdata, ydata) self._logger.debug(\"Model", "regression problem to be solved \"\"\" super().__init__(campaign_configuration, hyperparameters, regression_inputs, prefix)", "configuration \"\"\" signature = prefix.copy() signature.append(\"C_\" + str(self._hyperparameters['C'])) signature.append(\"epsilon_\" +", "License, Version 2.0 (the \"License\"); you may not use this", "by this object \"\"\" self._logger.debug(\"Building model for %s\", self._signature) assert", "for %s\", self._signature) assert self._regression_inputs xdata, ydata = self._regression_inputs.get_xy_data(self._regression_inputs.inputs_split[\"training\"]) self._regressor.fit(xdata,", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "the experiment configuration represented by this object \"\"\" self._logger.debug(\"Building model", "for the specific language governing permissions and limitations under the", "str(self._hyperparameters['degree'])) return signature def _train(self): \"\"\" Build the model with", "See the License for the specific language governing permissions and", "self._logger.debug(\"The coefficient for %s is %f\", col_name, self._linear_regression.coef_[idx]) def compute_estimations(self,", "The actual scikt object which performs the linear regression Methods", "a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by", "compute_estimations(self, rows): \"\"\" Compute the estimations and the MAPE for", "of the regression problem to be solved \"\"\" super().__init__(campaign_configuration, hyperparameters,", "\"\"\" Build the model with the experiment configuration represented by", "experiment configuration \"\"\" signature = prefix.copy() signature.append(\"C_\" + str(self._hyperparameters['C'])) signature.append(\"epsilon_\"", "import model_building.experiment_configuration as ec class SVRExperimentConfiguration(ec.ExperimentConfiguration): \"\"\" Class representing a", "permissions and limitations under the License. \"\"\" import sklearn.svm as", "------- _train() Performs the actual building of the linear model", "_train() Performs the actual building of the linear model compute_estimations()", "for a give set of data \"\"\" def __init__(self, campaign_configuration,", "this object \"\"\" self._logger.debug(\"Building model for %s\", self._signature) assert self._regression_inputs", "enumerate(self._regression_inputs.x_columns): # self._logger.debug(\"The coefficient for %s is %f\", col_name, self._linear_regression.coef_[idx])", "the estimations and the MAPE for runs in rows \"\"\"", "License. You may obtain a copy of the License at", "\"\"\" super().__init__(campaign_configuration, hyperparameters, regression_inputs, prefix) self.technique = ec.Technique.SVR self._regressor =", "as ec class SVRExperimentConfiguration(ec.ExperimentConfiguration): \"\"\" Class representing a single experiment", "col_name in enumerate(self._regression_inputs.x_columns): # self._logger.debug(\"The coefficient for %s is %f\",", "signature.append(\"epsilon_\" + str(self._hyperparameters['epsilon'])) signature.append(\"gamma_\" + str(self._hyperparameters['gamma'])) signature.append(\"kernel_\" + str(self._hyperparameters['kernel'])) signature.append(\"degree_\"", "dictionary The set of hyperparameters of this experiment configuration regression_inputs:", "\"\"\" campaign_configuration: dict of dict: The set of options specified", "the License for the specific language governing permissions and limitations", "the actual building of the linear model compute_estimations() Compute the", "may not use this file except in compliance with the", "this experiment configuration regression_inputs: RegressionInputs The input of the regression", "signature.append(\"C_\" + str(self._hyperparameters['C'])) signature.append(\"epsilon_\" + str(self._hyperparameters['epsilon'])) signature.append(\"gamma_\" + str(self._hyperparameters['gamma'])) signature.append(\"kernel_\"", "signature.append(\"degree_\" + str(self._hyperparameters['degree'])) return signature def _train(self): \"\"\" Build the", "in writing, software distributed under the License is distributed on", "regression_inputs, prefix) self.technique = ec.Technique.SVR self._regressor = svm.SVR(C=self._hyperparameters['C'], epsilon=self._hyperparameters['epsilon'], gamma=self._hyperparameters['gamma'],", "self._regressor.fit(xdata, ydata) self._logger.debug(\"Model built\") # for idx, col_name in enumerate(self._regression_inputs.x_columns):", "required by applicable law or agreed to in writing, software", "self._logger.debug(\"Model built\") # for idx, col_name in enumerate(self._regression_inputs.x_columns): # self._logger.debug(\"The", "implied. See the License for the specific language governing permissions", "input of the regression problem to be solved \"\"\" super().__init__(campaign_configuration,", "built\") # for idx, col_name in enumerate(self._regression_inputs.x_columns): # self._logger.debug(\"The coefficient", "Build the model with the experiment configuration represented by this", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "by the user though command line and campaign configuration files", "associated with this experiment configuration \"\"\" signature = prefix.copy() signature.append(\"C_\"", "an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY", "2019 <NAME> Licensed under the Apache License, Version 2.0 (the", "actual building of the linear model compute_estimations() Compute the estimated", "which performs the linear regression Methods ------- _train() Performs the", "def compute_estimations(self, rows): \"\"\" Compute the estimations and the MAPE", "configuration represented by this object \"\"\" self._logger.debug(\"Building model for %s\",", "+ str(self._hyperparameters['C'])) signature.append(\"epsilon_\" + str(self._hyperparameters['epsilon'])) signature.append(\"gamma_\" + str(self._hyperparameters['gamma'])) signature.append(\"kernel_\" +", "<NAME> Licensed under the Apache License, Version 2.0 (the \"License\");", "signature.append(\"kernel_\" + str(self._hyperparameters['kernel'])) signature.append(\"degree_\" + str(self._hyperparameters['degree'])) return signature def _train(self):", "set of data \"\"\" def __init__(self, campaign_configuration, hyperparameters, regression_inputs, prefix):", "of data \"\"\" def __init__(self, campaign_configuration, hyperparameters, regression_inputs, prefix): \"\"\"", "the linear regression Methods ------- _train() Performs the actual building", "The set of hyperparameters of this experiment configuration regression_inputs: RegressionInputs", "Copyright 2019 <NAME> Copyright 2019 <NAME> Licensed under the Apache", "prefix): \"\"\" Compute the signature associated with this experiment configuration" ]
[ "# ---------------------------------------------------------------------------------------------------------------- def __init__(self, pin): raise NotImplementedError() # ---------------------------------------------------------------------------------------------------------------- @property", "from scs_host.sys.host_gpio import HostGPIO # -------------------------------------------------------------------------------------------------------------------- # noinspection PyUnusedLocal,PyAbstractClass class", "---------------------------------------------------------------------------------------------------------------- def __init__(self, pin): raise NotImplementedError() # ---------------------------------------------------------------------------------------------------------------- @property def", "raise NotImplementedError() # ---------------------------------------------------------------------------------------------------------------- def __str__(self, *args, **kwargs): raise NotImplementedError()", "<NAME> (<EMAIL>) \"\"\" from scs_host.sys.host_gpio import HostGPIO # -------------------------------------------------------------------------------------------------------------------- #", "@property def state(self): raise NotImplementedError() def wait(self, edge): raise NotImplementedError()", "wait(self, edge): raise NotImplementedError() # ---------------------------------------------------------------------------------------------------------------- def __str__(self, *args, **kwargs):", "PyUnusedLocal,PyAbstractClass class HostGPI(HostGPIO): \"\"\" classdocs \"\"\" # ---------------------------------------------------------------------------------------------------------------- def __init__(self,", "\"\"\" Created on 12 May 2017 @author: <NAME> (<EMAIL>) \"\"\"", "noinspection PyUnusedLocal,PyAbstractClass class HostGPI(HostGPIO): \"\"\" classdocs \"\"\" # ---------------------------------------------------------------------------------------------------------------- def", "state(self): raise NotImplementedError() def wait(self, edge): raise NotImplementedError() # ----------------------------------------------------------------------------------------------------------------", "# noinspection PyUnusedLocal,PyAbstractClass class HostGPI(HostGPIO): \"\"\" classdocs \"\"\" # ----------------------------------------------------------------------------------------------------------------", "def __init__(self, pin): raise NotImplementedError() # ---------------------------------------------------------------------------------------------------------------- @property def state(self):", "<reponame>south-coast-science/scs_host_rpi<filename>src/scs_host/sys/host_gpi.py<gh_stars>0 \"\"\" Created on 12 May 2017 @author: <NAME> (<EMAIL>)", "pin): raise NotImplementedError() # ---------------------------------------------------------------------------------------------------------------- @property def state(self): raise NotImplementedError()", "May 2017 @author: <NAME> (<EMAIL>) \"\"\" from scs_host.sys.host_gpio import HostGPIO", "__init__(self, pin): raise NotImplementedError() # ---------------------------------------------------------------------------------------------------------------- @property def state(self): raise", "\"\"\" # ---------------------------------------------------------------------------------------------------------------- def __init__(self, pin): raise NotImplementedError() # ----------------------------------------------------------------------------------------------------------------", "2017 @author: <NAME> (<EMAIL>) \"\"\" from scs_host.sys.host_gpio import HostGPIO #", "raise NotImplementedError() # ---------------------------------------------------------------------------------------------------------------- @property def state(self): raise NotImplementedError() def", "scs_host.sys.host_gpio import HostGPIO # -------------------------------------------------------------------------------------------------------------------- # noinspection PyUnusedLocal,PyAbstractClass class HostGPI(HostGPIO):", "class HostGPI(HostGPIO): \"\"\" classdocs \"\"\" # ---------------------------------------------------------------------------------------------------------------- def __init__(self, pin):", "---------------------------------------------------------------------------------------------------------------- @property def state(self): raise NotImplementedError() def wait(self, edge): raise", "edge): raise NotImplementedError() # ---------------------------------------------------------------------------------------------------------------- def __str__(self, *args, **kwargs): raise", "Created on 12 May 2017 @author: <NAME> (<EMAIL>) \"\"\" from", "12 May 2017 @author: <NAME> (<EMAIL>) \"\"\" from scs_host.sys.host_gpio import", "-------------------------------------------------------------------------------------------------------------------- # noinspection PyUnusedLocal,PyAbstractClass class HostGPI(HostGPIO): \"\"\" classdocs \"\"\" #", "NotImplementedError() # ---------------------------------------------------------------------------------------------------------------- @property def state(self): raise NotImplementedError() def wait(self,", "classdocs \"\"\" # ---------------------------------------------------------------------------------------------------------------- def __init__(self, pin): raise NotImplementedError() #", "def wait(self, edge): raise NotImplementedError() # ---------------------------------------------------------------------------------------------------------------- def __str__(self, *args,", "# -------------------------------------------------------------------------------------------------------------------- # noinspection PyUnusedLocal,PyAbstractClass class HostGPI(HostGPIO): \"\"\" classdocs \"\"\"", "HostGPI(HostGPIO): \"\"\" classdocs \"\"\" # ---------------------------------------------------------------------------------------------------------------- def __init__(self, pin): raise", "raise NotImplementedError() def wait(self, edge): raise NotImplementedError() # ---------------------------------------------------------------------------------------------------------------- def", "# ---------------------------------------------------------------------------------------------------------------- @property def state(self): raise NotImplementedError() def wait(self, edge):", "(<EMAIL>) \"\"\" from scs_host.sys.host_gpio import HostGPIO # -------------------------------------------------------------------------------------------------------------------- # noinspection", "import HostGPIO # -------------------------------------------------------------------------------------------------------------------- # noinspection PyUnusedLocal,PyAbstractClass class HostGPI(HostGPIO): \"\"\"", "@author: <NAME> (<EMAIL>) \"\"\" from scs_host.sys.host_gpio import HostGPIO # --------------------------------------------------------------------------------------------------------------------", "\"\"\" from scs_host.sys.host_gpio import HostGPIO # -------------------------------------------------------------------------------------------------------------------- # noinspection PyUnusedLocal,PyAbstractClass", "def state(self): raise NotImplementedError() def wait(self, edge): raise NotImplementedError() #", "NotImplementedError() def wait(self, edge): raise NotImplementedError() # ---------------------------------------------------------------------------------------------------------------- def __str__(self,", "HostGPIO # -------------------------------------------------------------------------------------------------------------------- # noinspection PyUnusedLocal,PyAbstractClass class HostGPI(HostGPIO): \"\"\" classdocs", "on 12 May 2017 @author: <NAME> (<EMAIL>) \"\"\" from scs_host.sys.host_gpio", "\"\"\" classdocs \"\"\" # ---------------------------------------------------------------------------------------------------------------- def __init__(self, pin): raise NotImplementedError()" ]
[ "UnsubclassableType(type): def __new__(cls, name, bases, dct): c = super().__new__(cls, name,", "c = super().__new__(cls, name, bases, dct) setattr(c, '__init_subclass__', prevent_subclassing) return", "type') def prevent_subclassing(): raise TypeError('Unacceptable base type') def final_class(cls): setattr(cls,", "raise TypeError('Unacceptable base type') def final_class(cls): setattr(cls, '__init_subclass__', prevent_subclassing) return", "def final_class(cls): setattr(cls, '__init_subclass__', prevent_subclassing) return cls class UnsubclassableType(type): def", "prevent_subclassing) return cls class UnsubclassableType(type): def __new__(cls, name, bases, dct):", "return cls class UnsubclassableType(type): def __new__(cls, name, bases, dct): c", "final_class/final_class.py class Unsubclassable: def __init_subclass__(cls, **kwargs): raise TypeError('Unacceptable base type')", "def prevent_subclassing(): raise TypeError('Unacceptable base type') def final_class(cls): setattr(cls, '__init_subclass__',", "prevent_subclassing(): raise TypeError('Unacceptable base type') def final_class(cls): setattr(cls, '__init_subclass__', prevent_subclassing)", "**kwargs): raise TypeError('Unacceptable base type') def prevent_subclassing(): raise TypeError('Unacceptable base", "bases, dct): c = super().__new__(cls, name, bases, dct) setattr(c, '__init_subclass__',", "__new__(cls, name, bases, dct): c = super().__new__(cls, name, bases, dct)", "'__init_subclass__', prevent_subclassing) return cls class UnsubclassableType(type): def __new__(cls, name, bases,", "= super().__new__(cls, name, bases, dct) setattr(c, '__init_subclass__', prevent_subclassing) return c", "class Unsubclassable: def __init_subclass__(cls, **kwargs): raise TypeError('Unacceptable base type') def", "Unsubclassable: def __init_subclass__(cls, **kwargs): raise TypeError('Unacceptable base type') def prevent_subclassing():", "cls class UnsubclassableType(type): def __new__(cls, name, bases, dct): c =", "base type') def final_class(cls): setattr(cls, '__init_subclass__', prevent_subclassing) return cls class", "dct): c = super().__new__(cls, name, bases, dct) setattr(c, '__init_subclass__', prevent_subclassing)", "setattr(cls, '__init_subclass__', prevent_subclassing) return cls class UnsubclassableType(type): def __new__(cls, name,", "class UnsubclassableType(type): def __new__(cls, name, bases, dct): c = super().__new__(cls,", "type') def final_class(cls): setattr(cls, '__init_subclass__', prevent_subclassing) return cls class UnsubclassableType(type):", "__init_subclass__(cls, **kwargs): raise TypeError('Unacceptable base type') def prevent_subclassing(): raise TypeError('Unacceptable", "final_class(cls): setattr(cls, '__init_subclass__', prevent_subclassing) return cls class UnsubclassableType(type): def __new__(cls,", "name, bases, dct): c = super().__new__(cls, name, bases, dct) setattr(c,", "raise TypeError('Unacceptable base type') def prevent_subclassing(): raise TypeError('Unacceptable base type')", "def __init_subclass__(cls, **kwargs): raise TypeError('Unacceptable base type') def prevent_subclassing(): raise", "TypeError('Unacceptable base type') def prevent_subclassing(): raise TypeError('Unacceptable base type') def", "base type') def prevent_subclassing(): raise TypeError('Unacceptable base type') def final_class(cls):", "def __new__(cls, name, bases, dct): c = super().__new__(cls, name, bases,", "<filename>30-39/35. final_class/final_class.py class Unsubclassable: def __init_subclass__(cls, **kwargs): raise TypeError('Unacceptable base", "TypeError('Unacceptable base type') def final_class(cls): setattr(cls, '__init_subclass__', prevent_subclassing) return cls" ]
[ "L4 as ARCHI from ...my_argparser import TP_parse_args from collections import", "def tensor(self, data, requires_grad=False, dtype=None): return self.data_generator.tensor(data, requires_grad=requires_grad, dtype=dtype) def", "evaluate_summary_computer from utils.images import gather_images from visual.misc import plot_params from", "valid_generator, test_generator) print(results) return results if __name__ == '__main__': main()", "pd.concat(results, ignore_index=True) # EVALUATION results.to_csv(os.path.join(model.results_directory, 'threshold.csv')) print(results) print(\"DONE !\") def", "self.tensor(Config.CALIBRATED.les, requires_grad=True) self.params = (self.tes, self.jes, self.tes, self.mu) self.nuisance_params =", "test_generator = get_generators_torch(seed, cuda=args.cuda) train_generator = TrainGenerator(train_generator, cuda=args.cuda) valid_generator =", "= get_model(args, TangentPropClassifier) model.set_info(DATA_NAME, BENCHMARK_NAME, i_cv) return model # =====================================================================", "self.data_generator.cuda() else: self.data_generator.cpu() self.mu = self.tensor(Config.CALIBRATED.mu, requires_grad=True) self.tes = self.tensor(Config.CALIBRATED.tes,", "= get_optimizer(args) model = get_model(args, TangentPropClassifier) model.set_info(DATA_NAME, BENCHMARK_NAME, i_cv) return", "import train_or_load_neural_net from utils.evaluation import evaluate_summary_computer from utils.images import gather_images", "self.mu = self.tensor(Config.CALIBRATED.mu, requires_grad=True) self.tes = self.tensor(Config.CALIBRATED.tes, requires_grad=True) self.jes =", "as Config from problem.higgs import get_generators_torch from problem.higgs import GeneratorCPU", "from problem.higgs import get_generators_torch from problem.higgs import GeneratorCPU from problem.higgs", "-1) os.makedirs(model.results_directory, exist_ok=True) # RUN logger.info(f'Running runs [{args.start_cv},{args.end_cv}[') results =", "classifier') model = build_model(args, i_cv) os.makedirs(model.results_path, exist_ok=True) flush(logger) # TRAINING", "requires_grad=True) self.jes = self.tensor(Config.CALIBRATED.jes, requires_grad=True) self.les = self.tensor(Config.CALIBRATED.les, requires_grad=True) self.params", "run(args, i_cv): logger = logging.getLogger() print_line() logger.info('Running iter n°{}'.format(i_cv)) print_line()", "tensor(self, data, requires_grad=False, dtype=None): return self.data_generator.tensor(data, requires_grad=requires_grad, dtype=dtype) def build_model(args,", "self.mu) self.nuisance_params = OrderedDict([ ('tes', self.tes), ('jes', self.jes), ('les', self.les),", "set_logger() args = TP_parse_args(main_description=\"Training launcher for INFERNO on GG benchmark\")", "from utils.model import train_or_load_neural_net from utils.evaluation import evaluate_summary_computer from utils.images", "flush(logger) # TRAINING / LOADING train_or_load_neural_net(model, train_generator, retrain=args.retrain) # MEASUREMENT", "# ===================================================================== # MAIN # ===================================================================== def main(): # BASIC", "get_generators_torch(seed, cuda=args.cuda) train_generator = TrainGenerator(train_generator, cuda=args.cuda) valid_generator = GeneratorCPU(valid_generator) test_generator", "LOAD/GENERATE DATA logger.info('Set up data generator') config = Config() seed", "up classifier') model = build_model(args, i_cv) os.makedirs(model.results_path, exist_ok=True) flush(logger) #", "as NLLComputer from model.tangent_prop import TangentPropClassifier from archi.classic import L4", "===================================================================== def main(): # BASIC SETUP logger = set_logger() args", "import L4 as ARCHI from ...my_argparser import TP_parse_args from collections", "y, w def reset(self): self.data_generator.reset() def tensor(self, data, requires_grad=False, dtype=None):", "_TRUTH import numpy as np import pandas as pd from", "import pandas as pd from visual.misc import set_plot_config set_plot_config() from", "results = [run(args, i_cv) for i_cv in range(args.start_cv, args.end_cv)] results", "import _TRUTH import numpy as np import pandas as pd", "as np import pandas as pd from visual.misc import set_plot_config", "logger = set_logger() args = TP_parse_args(main_description=\"Training launcher for INFERNO on", "for i_cv in range(args.start_cv, args.end_cv)] results = pd.concat(results, ignore_index=True) #", "collections import OrderedDict from .common import measurement DATA_NAME = 'HIGGSTES'", "MODEL logger.info('Set up classifier') model = build_model(args, i_cv) os.makedirs(model.results_path, exist_ok=True)", "results.to_csv(os.path.join(model.results_directory, 'threshold.csv')) print(results) print(\"DONE !\") def run(args, i_cv): logger =", "# MAIN # ===================================================================== def main(): # BASIC SETUP logger", "* 5 train_generator, valid_generator, test_generator = get_generators_torch(seed, cuda=args.cuda) train_generator =", "logger.info('Set up classifier') model = build_model(args, i_cv) os.makedirs(model.results_path, exist_ok=True) flush(logger)", "HiggsNLL as NLLComputer from model.tangent_prop import TangentPropClassifier from archi.classic import", "logger.info(args) flush(logger) # INFO model = build_model(args, -1) os.makedirs(model.results_directory, exist_ok=True)", "+ i_cv * 5 train_generator, valid_generator, test_generator = get_generators_torch(seed, cuda=args.cuda)", "def generate(self, n_samples=None): X, y, w = self.data_generator.diff_generate(*self.params, n_samples=n_samples) return", "SEED from config import _ERROR from config import _TRUTH import", "SET MODEL logger.info('Set up classifier') model = build_model(args, i_cv) os.makedirs(model.results_path,", "import print_line from utils.model import get_model from utils.model import get_optimizer", "dtype=None): return self.data_generator.tensor(data, requires_grad=requires_grad, dtype=dtype) def build_model(args, i_cv): args.net =", "# SET MODEL logger.info('Set up classifier') model = build_model(args, i_cv)", "self.tensor(Config.CALIBRATED.jes, requires_grad=True) self.les = self.tensor(Config.CALIBRATED.les, requires_grad=True) self.params = (self.tes, self.jes,", "# TRAINING / LOADING train_or_load_neural_net(model, train_generator, retrain=args.retrain) # MEASUREMENT results", "requires_grad=False, dtype=None): return self.data_generator.tensor(data, requires_grad=requires_grad, dtype=dtype) def build_model(args, i_cv): args.net", "def build_model(args, i_cv): args.net = ARCHI(n_in=29, n_out=2, n_unit=args.n_unit) args.optimizer =", "print_line() # LOAD/GENERATE DATA logger.info('Set up data generator') config =", "= 30 class TrainGenerator: def __init__(self, data_generator, cuda=False): self.data_generator =", "self.tensor(Config.CALIBRATED.mu, requires_grad=True) self.tes = self.tensor(Config.CALIBRATED.tes, requires_grad=True) self.jes = self.tensor(Config.CALIBRATED.jes, requires_grad=True)", "n_samples=None): X, y, w = self.data_generator.diff_generate(*self.params, n_samples=n_samples) return X, y,", "set_plot_config set_plot_config() from utils.log import set_logger from utils.log import flush", "Config from problem.higgs import get_generators_torch from problem.higgs import GeneratorCPU from", "self.data_generator.reset() def tensor(self, data, requires_grad=False, dtype=None): return self.data_generator.tensor(data, requires_grad=requires_grad, dtype=dtype)", "self.les = self.tensor(Config.CALIBRATED.les, requires_grad=True) self.params = (self.tes, self.jes, self.tes, self.mu)", "get_generators_torch from problem.higgs import GeneratorCPU from problem.higgs import GeneratorTorch from", "i_cv in range(args.start_cv, args.end_cv)] results = pd.concat(results, ignore_index=True) # EVALUATION", "import get_model from utils.model import get_optimizer from utils.model import train_or_load_neural_net", "!\") def run(args, i_cv): logger = logging.getLogger() print_line() logger.info('Running iter", "model = get_model(args, TangentPropClassifier) model.set_info(DATA_NAME, BENCHMARK_NAME, i_cv) return model #", "__future__ import unicode_literals # Command line : # python -m", "from utils.log import print_line from utils.model import get_model from utils.model", "args.optimizer = get_optimizer(args) model = get_model(args, TangentPropClassifier) model.set_info(DATA_NAME, BENCHMARK_NAME, i_cv)", "train_or_load_neural_net from utils.evaluation import evaluate_summary_computer from utils.images import gather_images from", "__future__ import division from __future__ import absolute_import from __future__ import", "results = measurement(model, i_cv, config, valid_generator, test_generator) print(results) return results", "import logging from config import SEED from config import _ERROR", "self.data_generator.diff_generate(*self.params, n_samples=n_samples) return X, y, w def reset(self): self.data_generator.reset() def", "model.tangent_prop import TangentPropClassifier from archi.classic import L4 as ARCHI from", "# Command line : # python -m benchmark.VAR.GG.TP import os", "exist_ok=True) # RUN logger.info(f'Running runs [{args.start_cv},{args.end_cv}[') results = [run(args, i_cv)", "TP_parse_args(main_description=\"Training launcher for INFERNO on GG benchmark\") logger.info(args) flush(logger) #", "from utils.log import set_logger from utils.log import flush from utils.log", "TrainGenerator(train_generator, cuda=args.cuda) valid_generator = GeneratorCPU(valid_generator) test_generator = GeneratorCPU(test_generator) # SET", "os.makedirs(model.results_path, exist_ok=True) flush(logger) # TRAINING / LOADING train_or_load_neural_net(model, train_generator, retrain=args.retrain)", "up data generator') config = Config() seed = SEED +", "requires_grad=requires_grad, dtype=dtype) def build_model(args, i_cv): args.net = ARCHI(n_in=29, n_out=2, n_unit=args.n_unit)", "i_cv * 5 train_generator, valid_generator, test_generator = get_generators_torch(seed, cuda=args.cuda) train_generator", "= measurement(model, i_cv, config, valid_generator, test_generator) print(results) return results if", "def main(): # BASIC SETUP logger = set_logger() args =", "i_cv) return model # ===================================================================== # MAIN # ===================================================================== def", "# BASIC SETUP logger = set_logger() args = TP_parse_args(main_description=\"Training launcher", "# RUN logger.info(f'Running runs [{args.start_cv},{args.end_cv}[') results = [run(args, i_cv) for", "config import SEED from config import _ERROR from config import", "utils.model import get_model from utils.model import get_optimizer from utils.model import", "iter n°{}'.format(i_cv)) print_line() # LOAD/GENERATE DATA logger.info('Set up data generator')", "HiggsConfigTesOnly as Config from problem.higgs import get_generators_torch from problem.higgs import", "import set_logger from utils.log import flush from utils.log import print_line", "absolute_import from __future__ import unicode_literals # Command line : #", "from __future__ import print_function from __future__ import division from __future__", "Command line : # python -m benchmark.VAR.GG.TP import os import", "utf-8 from __future__ import print_function from __future__ import division from", "('les', self.les), ]) def generate(self, n_samples=None): X, y, w =", "measurement DATA_NAME = 'HIGGSTES' BENCHMARK_NAME = 'VAR-'+DATA_NAME N_ITER = 30", "problem.higgs import HiggsConfigTesOnly as Config from problem.higgs import get_generators_torch from", "i_cv) for i_cv in range(args.start_cv, args.end_cv)] results = pd.concat(results, ignore_index=True)", "print_function from __future__ import division from __future__ import absolute_import from", "= 'VAR-'+DATA_NAME N_ITER = 30 class TrainGenerator: def __init__(self, data_generator,", "return self.data_generator.tensor(data, requires_grad=requires_grad, dtype=dtype) def build_model(args, i_cv): args.net = ARCHI(n_in=29,", "print(results) print(\"DONE !\") def run(args, i_cv): logger = logging.getLogger() print_line()", "train_generator = TrainGenerator(train_generator, cuda=args.cuda) valid_generator = GeneratorCPU(valid_generator) test_generator = GeneratorCPU(test_generator)", "cuda=False): self.data_generator = data_generator if cuda: self.data_generator.cuda() else: self.data_generator.cpu() self.mu", "from model.tangent_prop import TangentPropClassifier from archi.classic import L4 as ARCHI", "from ...my_argparser import TP_parse_args from collections import OrderedDict from .common", "from visual.misc import plot_params from problem.higgs import HiggsConfigTesOnly as Config", "range(args.start_cv, args.end_cv)] results = pd.concat(results, ignore_index=True) # EVALUATION results.to_csv(os.path.join(model.results_directory, 'threshold.csv'))", "flush(logger) # INFO model = build_model(args, -1) os.makedirs(model.results_directory, exist_ok=True) #", "N_ITER = 30 class TrainGenerator: def __init__(self, data_generator, cuda=False): self.data_generator", "import flush from utils.log import print_line from utils.model import get_model", "('jes', self.jes), ('les', self.les), ]) def generate(self, n_samples=None): X, y,", "utils.model import get_optimizer from utils.model import train_or_load_neural_net from utils.evaluation import", "OrderedDict([ ('tes', self.tes), ('jes', self.jes), ('les', self.les), ]) def generate(self,", "from visual.misc import set_plot_config set_plot_config() from utils.log import set_logger from", "[{args.start_cv},{args.end_cv}[') results = [run(args, i_cv) for i_cv in range(args.start_cv, args.end_cv)]", "args.net = ARCHI(n_in=29, n_out=2, n_unit=args.n_unit) args.optimizer = get_optimizer(args) model =", "import TangentPropClassifier from archi.classic import L4 as ARCHI from ...my_argparser", "generator') config = Config() seed = SEED + i_cv *", "SETUP logger = set_logger() args = TP_parse_args(main_description=\"Training launcher for INFERNO", "as pd from visual.misc import set_plot_config set_plot_config() from utils.log import", "import HiggsConfigTesOnly as Config from problem.higgs import get_generators_torch from problem.higgs", "if cuda: self.data_generator.cuda() else: self.data_generator.cpu() self.mu = self.tensor(Config.CALIBRATED.mu, requires_grad=True) self.tes", "utils.log import print_line from utils.model import get_model from utils.model import", "from .common import measurement DATA_NAME = 'HIGGSTES' BENCHMARK_NAME = 'VAR-'+DATA_NAME", "problem.higgs import GeneratorCPU from problem.higgs import GeneratorTorch from problem.higgs import", "# EVALUATION results.to_csv(os.path.join(model.results_directory, 'threshold.csv')) print(results) print(\"DONE !\") def run(args, i_cv):", "gather_images from visual.misc import plot_params from problem.higgs import HiggsConfigTesOnly as", "data_generator if cuda: self.data_generator.cuda() else: self.data_generator.cpu() self.mu = self.tensor(Config.CALIBRATED.mu, requires_grad=True)", "requires_grad=True) self.tes = self.tensor(Config.CALIBRATED.tes, requires_grad=True) self.jes = self.tensor(Config.CALIBRATED.jes, requires_grad=True) self.les", "self.data_generator.tensor(data, requires_grad=requires_grad, dtype=dtype) def build_model(args, i_cv): args.net = ARCHI(n_in=29, n_out=2,", "= [run(args, i_cv) for i_cv in range(args.start_cv, args.end_cv)] results =", "ignore_index=True) # EVALUATION results.to_csv(os.path.join(model.results_directory, 'threshold.csv')) print(results) print(\"DONE !\") def run(args,", "BENCHMARK_NAME, i_cv) return model # ===================================================================== # MAIN # =====================================================================", "#!/usr/bin/env python # coding: utf-8 from __future__ import print_function from", "= TP_parse_args(main_description=\"Training launcher for INFERNO on GG benchmark\") logger.info(args) flush(logger)", "config, valid_generator, test_generator) print(results) return results if __name__ == '__main__':", "__future__ import print_function from __future__ import division from __future__ import", "TP_parse_args from collections import OrderedDict from .common import measurement DATA_NAME", "self.jes), ('les', self.les), ]) def generate(self, n_samples=None): X, y, w", "= SEED + i_cv * 5 train_generator, valid_generator, test_generator =", "results = pd.concat(results, ignore_index=True) # EVALUATION results.to_csv(os.path.join(model.results_directory, 'threshold.csv')) print(results) print(\"DONE", "# ===================================================================== def main(): # BASIC SETUP logger = set_logger()", "benchmark\") logger.info(args) flush(logger) # INFO model = build_model(args, -1) os.makedirs(model.results_directory,", "runs [{args.start_cv},{args.end_cv}[') results = [run(args, i_cv) for i_cv in range(args.start_cv,", "config import _ERROR from config import _TRUTH import numpy as", "BENCHMARK_NAME = 'VAR-'+DATA_NAME N_ITER = 30 class TrainGenerator: def __init__(self,", "flush from utils.log import print_line from utils.model import get_model from", "from __future__ import division from __future__ import absolute_import from __future__", "= GeneratorCPU(test_generator) # SET MODEL logger.info('Set up classifier') model =", "self.data_generator = data_generator if cuda: self.data_generator.cuda() else: self.data_generator.cpu() self.mu =", "unicode_literals # Command line : # python -m benchmark.VAR.GG.TP import", "import GeneratorCPU from problem.higgs import GeneratorTorch from problem.higgs import HiggsNLL", "= get_generators_torch(seed, cuda=args.cuda) train_generator = TrainGenerator(train_generator, cuda=args.cuda) valid_generator = GeneratorCPU(valid_generator)", "TRAINING / LOADING train_or_load_neural_net(model, train_generator, retrain=args.retrain) # MEASUREMENT results =", "utils.log import flush from utils.log import print_line from utils.model import", "set_logger from utils.log import flush from utils.log import print_line from", "train_generator, retrain=args.retrain) # MEASUREMENT results = measurement(model, i_cv, config, valid_generator,", "from collections import OrderedDict from .common import measurement DATA_NAME =", "= self.tensor(Config.CALIBRATED.tes, requires_grad=True) self.jes = self.tensor(Config.CALIBRATED.jes, requires_grad=True) self.les = self.tensor(Config.CALIBRATED.les,", "= self.data_generator.diff_generate(*self.params, n_samples=n_samples) return X, y, w def reset(self): self.data_generator.reset()", "OrderedDict from .common import measurement DATA_NAME = 'HIGGSTES' BENCHMARK_NAME =", "i_cv) os.makedirs(model.results_path, exist_ok=True) flush(logger) # TRAINING / LOADING train_or_load_neural_net(model, train_generator,", "30 class TrainGenerator: def __init__(self, data_generator, cuda=False): self.data_generator = data_generator", "cuda=args.cuda) train_generator = TrainGenerator(train_generator, cuda=args.cuda) valid_generator = GeneratorCPU(valid_generator) test_generator =", "return X, y, w def reset(self): self.data_generator.reset() def tensor(self, data,", "args = TP_parse_args(main_description=\"Training launcher for INFERNO on GG benchmark\") logger.info(args)", "_ERROR from config import _TRUTH import numpy as np import", "]) def generate(self, n_samples=None): X, y, w = self.data_generator.diff_generate(*self.params, n_samples=n_samples)", "numpy as np import pandas as pd from visual.misc import", "self.nuisance_params = OrderedDict([ ('tes', self.tes), ('jes', self.jes), ('les', self.les), ])", "plot_params from problem.higgs import HiggsConfigTesOnly as Config from problem.higgs import", "X, y, w def reset(self): self.data_generator.reset() def tensor(self, data, requires_grad=False,", "EVALUATION results.to_csv(os.path.join(model.results_directory, 'threshold.csv')) print(results) print(\"DONE !\") def run(args, i_cv): logger", "= GeneratorCPU(valid_generator) test_generator = GeneratorCPU(test_generator) # SET MODEL logger.info('Set up", "from utils.model import get_optimizer from utils.model import train_or_load_neural_net from utils.evaluation", "MEASUREMENT results = measurement(model, i_cv, config, valid_generator, test_generator) print(results) return", "GeneratorCPU(test_generator) # SET MODEL logger.info('Set up classifier') model = build_model(args,", "__future__ import absolute_import from __future__ import unicode_literals # Command line", "SEED + i_cv * 5 train_generator, valid_generator, test_generator = get_generators_torch(seed,", "i_cv): args.net = ARCHI(n_in=29, n_out=2, n_unit=args.n_unit) args.optimizer = get_optimizer(args) model", "test_generator = GeneratorCPU(test_generator) # SET MODEL logger.info('Set up classifier') model", "on GG benchmark\") logger.info(args) flush(logger) # INFO model = build_model(args,", "from config import _ERROR from config import _TRUTH import numpy", "utils.evaluation import evaluate_summary_computer from utils.images import gather_images from visual.misc import", "retrain=args.retrain) # MEASUREMENT results = measurement(model, i_cv, config, valid_generator, test_generator)", "model.set_info(DATA_NAME, BENCHMARK_NAME, i_cv) return model # ===================================================================== # MAIN #", "import GeneratorTorch from problem.higgs import HiggsNLL as NLLComputer from model.tangent_prop", "n_samples=n_samples) return X, y, w def reset(self): self.data_generator.reset() def tensor(self,", "set_plot_config() from utils.log import set_logger from utils.log import flush from", "import print_function from __future__ import division from __future__ import absolute_import", "w def reset(self): self.data_generator.reset() def tensor(self, data, requires_grad=False, dtype=None): return", "INFERNO on GG benchmark\") logger.info(args) flush(logger) # INFO model =", "reset(self): self.data_generator.reset() def tensor(self, data, requires_grad=False, dtype=None): return self.data_generator.tensor(data, requires_grad=requires_grad,", "...my_argparser import TP_parse_args from collections import OrderedDict from .common import", "i_cv): logger = logging.getLogger() print_line() logger.info('Running iter n°{}'.format(i_cv)) print_line() #", "Config() seed = SEED + i_cv * 5 train_generator, valid_generator,", "from problem.higgs import GeneratorTorch from problem.higgs import HiggsNLL as NLLComputer", "-m benchmark.VAR.GG.TP import os import logging from config import SEED", "line : # python -m benchmark.VAR.GG.TP import os import logging", "from __future__ import unicode_literals # Command line : # python", "cuda=args.cuda) valid_generator = GeneratorCPU(valid_generator) test_generator = GeneratorCPU(test_generator) # SET MODEL", "visual.misc import set_plot_config set_plot_config() from utils.log import set_logger from utils.log", "ARCHI(n_in=29, n_out=2, n_unit=args.n_unit) args.optimizer = get_optimizer(args) model = get_model(args, TangentPropClassifier)", "from problem.higgs import GeneratorCPU from problem.higgs import GeneratorTorch from problem.higgs", "visual.misc import plot_params from problem.higgs import HiggsConfigTesOnly as Config from", "= TrainGenerator(train_generator, cuda=args.cuda) valid_generator = GeneratorCPU(valid_generator) test_generator = GeneratorCPU(test_generator) #", "import os import logging from config import SEED from config", "os.makedirs(model.results_directory, exist_ok=True) # RUN logger.info(f'Running runs [{args.start_cv},{args.end_cv}[') results = [run(args,", "ARCHI from ...my_argparser import TP_parse_args from collections import OrderedDict from", "exist_ok=True) flush(logger) # TRAINING / LOADING train_or_load_neural_net(model, train_generator, retrain=args.retrain) #", "# coding: utf-8 from __future__ import print_function from __future__ import", "self.data_generator.cpu() self.mu = self.tensor(Config.CALIBRATED.mu, requires_grad=True) self.tes = self.tensor(Config.CALIBRATED.tes, requires_grad=True) self.jes", "config import _TRUTH import numpy as np import pandas as", "n°{}'.format(i_cv)) print_line() # LOAD/GENERATE DATA logger.info('Set up data generator') config", "self.jes, self.tes, self.mu) self.nuisance_params = OrderedDict([ ('tes', self.tes), ('jes', self.jes),", "generate(self, n_samples=None): X, y, w = self.data_generator.diff_generate(*self.params, n_samples=n_samples) return X,", "= build_model(args, -1) os.makedirs(model.results_directory, exist_ok=True) # RUN logger.info(f'Running runs [{args.start_cv},{args.end_cv}[')", "= set_logger() args = TP_parse_args(main_description=\"Training launcher for INFERNO on GG", "i_cv, config, valid_generator, test_generator) print(results) return results if __name__ ==", "model = build_model(args, i_cv) os.makedirs(model.results_path, exist_ok=True) flush(logger) # TRAINING /", "= self.tensor(Config.CALIBRATED.les, requires_grad=True) self.params = (self.tes, self.jes, self.tes, self.mu) self.nuisance_params", "logger.info('Set up data generator') config = Config() seed = SEED", "import get_optimizer from utils.model import train_or_load_neural_net from utils.evaluation import evaluate_summary_computer", "logging.getLogger() print_line() logger.info('Running iter n°{}'.format(i_cv)) print_line() # LOAD/GENERATE DATA logger.info('Set", "import evaluate_summary_computer from utils.images import gather_images from visual.misc import plot_params", "'VAR-'+DATA_NAME N_ITER = 30 class TrainGenerator: def __init__(self, data_generator, cuda=False):", "<gh_stars>1-10 #!/usr/bin/env python # coding: utf-8 from __future__ import print_function", "dtype=dtype) def build_model(args, i_cv): args.net = ARCHI(n_in=29, n_out=2, n_unit=args.n_unit) args.optimizer", "benchmark.VAR.GG.TP import os import logging from config import SEED from", "python # coding: utf-8 from __future__ import print_function from __future__", "get_optimizer from utils.model import train_or_load_neural_net from utils.evaluation import evaluate_summary_computer from", "# INFO model = build_model(args, -1) os.makedirs(model.results_directory, exist_ok=True) # RUN", "RUN logger.info(f'Running runs [{args.start_cv},{args.end_cv}[') results = [run(args, i_cv) for i_cv", "pd from visual.misc import set_plot_config set_plot_config() from utils.log import set_logger", "= OrderedDict([ ('tes', self.tes), ('jes', self.jes), ('les', self.les), ]) def", "import _ERROR from config import _TRUTH import numpy as np", "from utils.log import flush from utils.log import print_line from utils.model", "= self.tensor(Config.CALIBRATED.mu, requires_grad=True) self.tes = self.tensor(Config.CALIBRATED.tes, requires_grad=True) self.jes = self.tensor(Config.CALIBRATED.jes,", "from config import SEED from config import _ERROR from config", "GG benchmark\") logger.info(args) flush(logger) # INFO model = build_model(args, -1)", "model # ===================================================================== # MAIN # ===================================================================== def main(): #", "= (self.tes, self.jes, self.tes, self.mu) self.nuisance_params = OrderedDict([ ('tes', self.tes),", "requires_grad=True) self.params = (self.tes, self.jes, self.tes, self.mu) self.nuisance_params = OrderedDict([", "valid_generator, test_generator = get_generators_torch(seed, cuda=args.cuda) train_generator = TrainGenerator(train_generator, cuda=args.cuda) valid_generator", "self.tensor(Config.CALIBRATED.tes, requires_grad=True) self.jes = self.tensor(Config.CALIBRATED.jes, requires_grad=True) self.les = self.tensor(Config.CALIBRATED.les, requires_grad=True)", "MAIN # ===================================================================== def main(): # BASIC SETUP logger =", "/ LOADING train_or_load_neural_net(model, train_generator, retrain=args.retrain) # MEASUREMENT results = measurement(model,", "import absolute_import from __future__ import unicode_literals # Command line :", "print_line from utils.model import get_model from utils.model import get_optimizer from", "coding: utf-8 from __future__ import print_function from __future__ import division", "from problem.higgs import HiggsNLL as NLLComputer from model.tangent_prop import TangentPropClassifier", "problem.higgs import HiggsNLL as NLLComputer from model.tangent_prop import TangentPropClassifier from", "DATA_NAME = 'HIGGSTES' BENCHMARK_NAME = 'VAR-'+DATA_NAME N_ITER = 30 class", "BASIC SETUP logger = set_logger() args = TP_parse_args(main_description=\"Training launcher for", "self.jes = self.tensor(Config.CALIBRATED.jes, requires_grad=True) self.les = self.tensor(Config.CALIBRATED.les, requires_grad=True) self.params =", "problem.higgs import get_generators_torch from problem.higgs import GeneratorCPU from problem.higgs import", "launcher for INFERNO on GG benchmark\") logger.info(args) flush(logger) # INFO", "5 train_generator, valid_generator, test_generator = get_generators_torch(seed, cuda=args.cuda) train_generator = TrainGenerator(train_generator,", "from utils.model import get_model from utils.model import get_optimizer from utils.model", "division from __future__ import absolute_import from __future__ import unicode_literals #", "data_generator, cuda=False): self.data_generator = data_generator if cuda: self.data_generator.cuda() else: self.data_generator.cpu()", "print_line() logger.info('Running iter n°{}'.format(i_cv)) print_line() # LOAD/GENERATE DATA logger.info('Set up", "LOADING train_or_load_neural_net(model, train_generator, retrain=args.retrain) # MEASUREMENT results = measurement(model, i_cv,", "problem.higgs import GeneratorTorch from problem.higgs import HiggsNLL as NLLComputer from", "self.tes), ('jes', self.jes), ('les', self.les), ]) def generate(self, n_samples=None): X,", "import HiggsNLL as NLLComputer from model.tangent_prop import TangentPropClassifier from archi.classic", "= ARCHI(n_in=29, n_out=2, n_unit=args.n_unit) args.optimizer = get_optimizer(args) model = get_model(args,", "import plot_params from problem.higgs import HiggsConfigTesOnly as Config from problem.higgs", "utils.model import train_or_load_neural_net from utils.evaluation import evaluate_summary_computer from utils.images import", "measurement(model, i_cv, config, valid_generator, test_generator) print(results) return results if __name__", "import SEED from config import _ERROR from config import _TRUTH", "as ARCHI from ...my_argparser import TP_parse_args from collections import OrderedDict", "seed = SEED + i_cv * 5 train_generator, valid_generator, test_generator", "= data_generator if cuda: self.data_generator.cuda() else: self.data_generator.cpu() self.mu = self.tensor(Config.CALIBRATED.mu,", "= pd.concat(results, ignore_index=True) # EVALUATION results.to_csv(os.path.join(model.results_directory, 'threshold.csv')) print(results) print(\"DONE !\")", "import TP_parse_args from collections import OrderedDict from .common import measurement", "main(): # BASIC SETUP logger = set_logger() args = TP_parse_args(main_description=\"Training", "config = Config() seed = SEED + i_cv * 5", "import measurement DATA_NAME = 'HIGGSTES' BENCHMARK_NAME = 'VAR-'+DATA_NAME N_ITER =", "===================================================================== # MAIN # ===================================================================== def main(): # BASIC SETUP", "cuda: self.data_generator.cuda() else: self.data_generator.cpu() self.mu = self.tensor(Config.CALIBRATED.mu, requires_grad=True) self.tes =", "# MEASUREMENT results = measurement(model, i_cv, config, valid_generator, test_generator) print(results)", "else: self.data_generator.cpu() self.mu = self.tensor(Config.CALIBRATED.mu, requires_grad=True) self.tes = self.tensor(Config.CALIBRATED.tes, requires_grad=True)", "(self.tes, self.jes, self.tes, self.mu) self.nuisance_params = OrderedDict([ ('tes', self.tes), ('jes',", "logging from config import SEED from config import _ERROR from", "pandas as pd from visual.misc import set_plot_config set_plot_config() from utils.log", "class TrainGenerator: def __init__(self, data_generator, cuda=False): self.data_generator = data_generator if", "in range(args.start_cv, args.end_cv)] results = pd.concat(results, ignore_index=True) # EVALUATION results.to_csv(os.path.join(model.results_directory,", "get_model from utils.model import get_optimizer from utils.model import train_or_load_neural_net from", "INFO model = build_model(args, -1) os.makedirs(model.results_directory, exist_ok=True) # RUN logger.info(f'Running", "= build_model(args, i_cv) os.makedirs(model.results_path, exist_ok=True) flush(logger) # TRAINING / LOADING", "utils.log import set_logger from utils.log import flush from utils.log import", "def reset(self): self.data_generator.reset() def tensor(self, data, requires_grad=False, dtype=None): return self.data_generator.tensor(data,", "import division from __future__ import absolute_import from __future__ import unicode_literals", "self.les), ]) def generate(self, n_samples=None): X, y, w = self.data_generator.diff_generate(*self.params,", "import numpy as np import pandas as pd from visual.misc", "data, requires_grad=False, dtype=None): return self.data_generator.tensor(data, requires_grad=requires_grad, dtype=dtype) def build_model(args, i_cv):", "y, w = self.data_generator.diff_generate(*self.params, n_samples=n_samples) return X, y, w def", "from __future__ import absolute_import from __future__ import unicode_literals # Command", "# LOAD/GENERATE DATA logger.info('Set up data generator') config = Config()", "def __init__(self, data_generator, cuda=False): self.data_generator = data_generator if cuda: self.data_generator.cuda()", "# python -m benchmark.VAR.GG.TP import os import logging from config", "X, y, w = self.data_generator.diff_generate(*self.params, n_samples=n_samples) return X, y, w", "logger.info(f'Running runs [{args.start_cv},{args.end_cv}[') results = [run(args, i_cv) for i_cv in", "python -m benchmark.VAR.GG.TP import os import logging from config import", "self.tes, self.mu) self.nuisance_params = OrderedDict([ ('tes', self.tes), ('jes', self.jes), ('les',", "GeneratorTorch from problem.higgs import HiggsNLL as NLLComputer from model.tangent_prop import", "for INFERNO on GG benchmark\") logger.info(args) flush(logger) # INFO model", "get_optimizer(args) model = get_model(args, TangentPropClassifier) model.set_info(DATA_NAME, BENCHMARK_NAME, i_cv) return model", "utils.images import gather_images from visual.misc import plot_params from problem.higgs import", "= self.tensor(Config.CALIBRATED.jes, requires_grad=True) self.les = self.tensor(Config.CALIBRATED.les, requires_grad=True) self.params = (self.tes,", "os import logging from config import SEED from config import", "print(\"DONE !\") def run(args, i_cv): logger = logging.getLogger() print_line() logger.info('Running", "n_out=2, n_unit=args.n_unit) args.optimizer = get_optimizer(args) model = get_model(args, TangentPropClassifier) model.set_info(DATA_NAME,", "w = self.data_generator.diff_generate(*self.params, n_samples=n_samples) return X, y, w def reset(self):", "build_model(args, i_cv): args.net = ARCHI(n_in=29, n_out=2, n_unit=args.n_unit) args.optimizer = get_optimizer(args)", "= 'HIGGSTES' BENCHMARK_NAME = 'VAR-'+DATA_NAME N_ITER = 30 class TrainGenerator:", "n_unit=args.n_unit) args.optimizer = get_optimizer(args) model = get_model(args, TangentPropClassifier) model.set_info(DATA_NAME, BENCHMARK_NAME,", "args.end_cv)] results = pd.concat(results, ignore_index=True) # EVALUATION results.to_csv(os.path.join(model.results_directory, 'threshold.csv')) print(results)", "TangentPropClassifier from archi.classic import L4 as ARCHI from ...my_argparser import", "model = build_model(args, -1) os.makedirs(model.results_directory, exist_ok=True) # RUN logger.info(f'Running runs", "logger = logging.getLogger() print_line() logger.info('Running iter n°{}'.format(i_cv)) print_line() # LOAD/GENERATE", "np import pandas as pd from visual.misc import set_plot_config set_plot_config()", "TangentPropClassifier) model.set_info(DATA_NAME, BENCHMARK_NAME, i_cv) return model # ===================================================================== # MAIN", "get_model(args, TangentPropClassifier) model.set_info(DATA_NAME, BENCHMARK_NAME, i_cv) return model # ===================================================================== #", "def run(args, i_cv): logger = logging.getLogger() print_line() logger.info('Running iter n°{}'.format(i_cv))", "'threshold.csv')) print(results) print(\"DONE !\") def run(args, i_cv): logger = logging.getLogger()", "train_or_load_neural_net(model, train_generator, retrain=args.retrain) # MEASUREMENT results = measurement(model, i_cv, config,", "valid_generator = GeneratorCPU(valid_generator) test_generator = GeneratorCPU(test_generator) # SET MODEL logger.info('Set", "data generator') config = Config() seed = SEED + i_cv", "self.params = (self.tes, self.jes, self.tes, self.mu) self.nuisance_params = OrderedDict([ ('tes',", "build_model(args, -1) os.makedirs(model.results_directory, exist_ok=True) # RUN logger.info(f'Running runs [{args.start_cv},{args.end_cv}[') results", "TrainGenerator: def __init__(self, data_generator, cuda=False): self.data_generator = data_generator if cuda:", "DATA logger.info('Set up data generator') config = Config() seed =", "build_model(args, i_cv) os.makedirs(model.results_path, exist_ok=True) flush(logger) # TRAINING / LOADING train_or_load_neural_net(model,", "= logging.getLogger() print_line() logger.info('Running iter n°{}'.format(i_cv)) print_line() # LOAD/GENERATE DATA", "'HIGGSTES' BENCHMARK_NAME = 'VAR-'+DATA_NAME N_ITER = 30 class TrainGenerator: def", "import get_generators_torch from problem.higgs import GeneratorCPU from problem.higgs import GeneratorTorch", "return model # ===================================================================== # MAIN # ===================================================================== def main():", "from config import _TRUTH import numpy as np import pandas", "from utils.images import gather_images from visual.misc import plot_params from problem.higgs", "from problem.higgs import HiggsConfigTesOnly as Config from problem.higgs import get_generators_torch", "('tes', self.tes), ('jes', self.jes), ('les', self.les), ]) def generate(self, n_samples=None):", ".common import measurement DATA_NAME = 'HIGGSTES' BENCHMARK_NAME = 'VAR-'+DATA_NAME N_ITER", "train_generator, valid_generator, test_generator = get_generators_torch(seed, cuda=args.cuda) train_generator = TrainGenerator(train_generator, cuda=args.cuda)", ": # python -m benchmark.VAR.GG.TP import os import logging from", "= Config() seed = SEED + i_cv * 5 train_generator,", "archi.classic import L4 as ARCHI from ...my_argparser import TP_parse_args from", "from archi.classic import L4 as ARCHI from ...my_argparser import TP_parse_args", "requires_grad=True) self.les = self.tensor(Config.CALIBRATED.les, requires_grad=True) self.params = (self.tes, self.jes, self.tes,", "logger.info('Running iter n°{}'.format(i_cv)) print_line() # LOAD/GENERATE DATA logger.info('Set up data", "import OrderedDict from .common import measurement DATA_NAME = 'HIGGSTES' BENCHMARK_NAME", "import gather_images from visual.misc import plot_params from problem.higgs import HiggsConfigTesOnly", "[run(args, i_cv) for i_cv in range(args.start_cv, args.end_cv)] results = pd.concat(results,", "from utils.evaluation import evaluate_summary_computer from utils.images import gather_images from visual.misc", "import unicode_literals # Command line : # python -m benchmark.VAR.GG.TP", "NLLComputer from model.tangent_prop import TangentPropClassifier from archi.classic import L4 as", "import set_plot_config set_plot_config() from utils.log import set_logger from utils.log import", "self.tes = self.tensor(Config.CALIBRATED.tes, requires_grad=True) self.jes = self.tensor(Config.CALIBRATED.jes, requires_grad=True) self.les =", "__init__(self, data_generator, cuda=False): self.data_generator = data_generator if cuda: self.data_generator.cuda() else:", "GeneratorCPU(valid_generator) test_generator = GeneratorCPU(test_generator) # SET MODEL logger.info('Set up classifier')", "GeneratorCPU from problem.higgs import GeneratorTorch from problem.higgs import HiggsNLL as" ]
[ "MagicMock else: from mock import Mock, MagicMock class ADLTest(unittest.TestCase): \"\"\"", "\"baz\"], ) self.ls.assert_called_once_with(\"path/to/file\") def test_read_opens_and_reads_file(self): self.assertEquals( self.adl.read(\"adl://foo_store.azuredatalakestore.net/path/to/file\"), [\"a\", \"b\", \"c\"]", "from unittest.mock import Mock, MagicMock else: from mock import Mock,", "from mock import Mock, MagicMock class ADLTest(unittest.TestCase): \"\"\" Tests for", "self.open = Mock(return_value=self.fakeFile) self.fakeAdapter = Mock(open=self.open, ls=self.ls) self.adl = ADL()", "self.fakeFile.__enter__.return_value = self.fakeFile self.open = Mock(return_value=self.fakeFile) self.fakeAdapter = Mock(open=self.open, ls=self.ls)", "b\"b\", b\"c\"] self.fakeFile.__enter__.return_value = self.fakeFile self.open = Mock(return_value=self.fakeFile) self.fakeAdapter =", "self.fakeAdapter = Mock(open=self.open, ls=self.ls) self.adl = ADL() self.adl._create_adapter = Mock(return_value=self.fakeAdapter)", "Mock(return_value=self.fakeFile) self.fakeAdapter = Mock(open=self.open, ls=self.ls) self.adl = ADL() self.adl._create_adapter =", "self.assertEquals( self.adl.read(\"adl://foo_store.azuredatalakestore.net/path/to/file\"), [\"a\", \"b\", \"c\"] ) self.fakeFile.__iter__.assert_called_once_with() def test_write_opens_file_and_writes_to_it(self): self.adl.write(\"hello", "\"bar/baz\") def test_listdir_calls_ls_on_adl_adapter(self): self.assertEqual( self.adl.listdir(\"adl://foo_store.azuredatalakestore.net/path/to/file\"), [\"foo\", \"bar\", \"baz\"], ) self.ls.assert_called_once_with(\"path/to/file\")", "Tests for `ADL` \"\"\" def setUp(self): self.ls = Mock(return_value=[\"foo\", \"bar\",", "= Mock(open=self.open, ls=self.ls) self.adl = ADL() self.adl._create_adapter = Mock(return_value=self.fakeAdapter) def", "\"\"\" Tests for `ADL` \"\"\" def setUp(self): self.ls = Mock(return_value=[\"foo\",", "ADL._split_url(\"this_is_not_a_valid_url\") self.assertTrue(\"Invalid ADL url 'this_is_not_a_valid_url'\" in str(context.exception)) def test_split_url_splits_valid_url(self): (store_name,", "as context: ADL._split_url(\"this_is_not_a_valid_url\") self.assertTrue(\"Invalid ADL url 'this_is_not_a_valid_url'\" in str(context.exception)) def", "if six.PY3: from unittest.mock import Mock, MagicMock else: from mock", "unittest.mock import Mock, MagicMock else: from mock import Mock, MagicMock", "self.adl._create_adapter = Mock(return_value=self.fakeAdapter) def test_split_url_raises_exception_on_invalid_url(self): with self.assertRaises(Exception) as context: ADL._split_url(\"this_is_not_a_valid_url\")", "def setUp(self): self.ls = Mock(return_value=[\"foo\", \"bar\", \"baz\"]) self.fakeFile = MagicMock()", "= ADL._split_url(\"adl://foo.azuredatalakestore.net/bar/baz\") self.assertEqual(store_name, \"foo\") self.assertEqual(path, \"bar/baz\") def test_listdir_calls_ls_on_adl_adapter(self): self.assertEqual( self.adl.listdir(\"adl://foo_store.azuredatalakestore.net/path/to/file\"),", "test_read_opens_and_reads_file(self): self.assertEquals( self.adl.read(\"adl://foo_store.azuredatalakestore.net/path/to/file\"), [\"a\", \"b\", \"c\"] ) self.fakeFile.__iter__.assert_called_once_with() def test_write_opens_file_and_writes_to_it(self):", "with self.assertRaises(Exception) as context: ADL._split_url(\"this_is_not_a_valid_url\") self.assertTrue(\"Invalid ADL url 'this_is_not_a_valid_url'\" in", "def test_split_url_raises_exception_on_invalid_url(self): with self.assertRaises(Exception) as context: ADL._split_url(\"this_is_not_a_valid_url\") self.assertTrue(\"Invalid ADL url", "Mock, MagicMock class ADLTest(unittest.TestCase): \"\"\" Tests for `ADL` \"\"\" def", "= ADL() self.adl._create_adapter = Mock(return_value=self.fakeAdapter) def test_split_url_raises_exception_on_invalid_url(self): with self.assertRaises(Exception) as", "self.fakeFile.__iter__.return_value = [b\"a\", b\"b\", b\"c\"] self.fakeFile.__enter__.return_value = self.fakeFile self.open =", "b\"c\"] self.fakeFile.__enter__.return_value = self.fakeFile self.open = Mock(return_value=self.fakeFile) self.fakeAdapter = Mock(open=self.open,", "'this_is_not_a_valid_url'\" in str(context.exception)) def test_split_url_splits_valid_url(self): (store_name, path) = ADL._split_url(\"adl://foo.azuredatalakestore.net/bar/baz\") self.assertEqual(store_name,", "path) = ADL._split_url(\"adl://foo.azuredatalakestore.net/bar/baz\") self.assertEqual(store_name, \"foo\") self.assertEqual(path, \"bar/baz\") def test_listdir_calls_ls_on_adl_adapter(self): self.assertEqual(", "from ..adl import ADL import six if six.PY3: from unittest.mock", "= self.fakeFile self.open = Mock(return_value=self.fakeFile) self.fakeAdapter = Mock(open=self.open, ls=self.ls) self.adl", "= MagicMock() self.fakeFile.__iter__.return_value = [b\"a\", b\"b\", b\"c\"] self.fakeFile.__enter__.return_value = self.fakeFile", "ls=self.ls) self.adl = ADL() self.adl._create_adapter = Mock(return_value=self.fakeAdapter) def test_split_url_raises_exception_on_invalid_url(self): with", "import unittest from ..adl import ADL import six if six.PY3:", "mock import Mock, MagicMock class ADLTest(unittest.TestCase): \"\"\" Tests for `ADL`", "`ADL` \"\"\" def setUp(self): self.ls = Mock(return_value=[\"foo\", \"bar\", \"baz\"]) self.fakeFile", "Mock(open=self.open, ls=self.ls) self.adl = ADL() self.adl._create_adapter = Mock(return_value=self.fakeAdapter) def test_split_url_raises_exception_on_invalid_url(self):", "self.assertTrue(\"Invalid ADL url 'this_is_not_a_valid_url'\" in str(context.exception)) def test_split_url_splits_valid_url(self): (store_name, path)", "setUp(self): self.ls = Mock(return_value=[\"foo\", \"bar\", \"baz\"]) self.fakeFile = MagicMock() self.fakeFile.__iter__.return_value", "six if six.PY3: from unittest.mock import Mock, MagicMock else: from", "context: ADL._split_url(\"this_is_not_a_valid_url\") self.assertTrue(\"Invalid ADL url 'this_is_not_a_valid_url'\" in str(context.exception)) def test_split_url_splits_valid_url(self):", "self.ls = Mock(return_value=[\"foo\", \"bar\", \"baz\"]) self.fakeFile = MagicMock() self.fakeFile.__iter__.return_value =", "ADL() self.adl._create_adapter = Mock(return_value=self.fakeAdapter) def test_split_url_raises_exception_on_invalid_url(self): with self.assertRaises(Exception) as context:", "self.assertRaises(Exception) as context: ADL._split_url(\"this_is_not_a_valid_url\") self.assertTrue(\"Invalid ADL url 'this_is_not_a_valid_url'\" in str(context.exception))", "import Mock, MagicMock class ADLTest(unittest.TestCase): \"\"\" Tests for `ADL` \"\"\"", "in str(context.exception)) def test_split_url_splits_valid_url(self): (store_name, path) = ADL._split_url(\"adl://foo.azuredatalakestore.net/bar/baz\") self.assertEqual(store_name, \"foo\")", "\"bar\", \"baz\"]) self.fakeFile = MagicMock() self.fakeFile.__iter__.return_value = [b\"a\", b\"b\", b\"c\"]", "ADLTest(unittest.TestCase): \"\"\" Tests for `ADL` \"\"\" def setUp(self): self.ls =", "str(context.exception)) def test_split_url_splits_valid_url(self): (store_name, path) = ADL._split_url(\"adl://foo.azuredatalakestore.net/bar/baz\") self.assertEqual(store_name, \"foo\") self.assertEqual(path,", "ADL import six if six.PY3: from unittest.mock import Mock, MagicMock", "[\"foo\", \"bar\", \"baz\"], ) self.ls.assert_called_once_with(\"path/to/file\") def test_read_opens_and_reads_file(self): self.assertEquals( self.adl.read(\"adl://foo_store.azuredatalakestore.net/path/to/file\"), [\"a\",", "ADL url 'this_is_not_a_valid_url'\" in str(context.exception)) def test_split_url_splits_valid_url(self): (store_name, path) =", "= Mock(return_value=[\"foo\", \"bar\", \"baz\"]) self.fakeFile = MagicMock() self.fakeFile.__iter__.return_value = [b\"a\",", "[\"a\", \"b\", \"c\"] ) self.fakeFile.__iter__.assert_called_once_with() def test_write_opens_file_and_writes_to_it(self): self.adl.write(\"hello world\", \"adl://foo_store.azuredatalakestore.net/path/to/file\")", "MagicMock() self.fakeFile.__iter__.return_value = [b\"a\", b\"b\", b\"c\"] self.fakeFile.__enter__.return_value = self.fakeFile self.open", "self.assertEqual(path, \"bar/baz\") def test_listdir_calls_ls_on_adl_adapter(self): self.assertEqual( self.adl.listdir(\"adl://foo_store.azuredatalakestore.net/path/to/file\"), [\"foo\", \"bar\", \"baz\"], )", "MagicMock class ADLTest(unittest.TestCase): \"\"\" Tests for `ADL` \"\"\" def setUp(self):", "\"bar\", \"baz\"], ) self.ls.assert_called_once_with(\"path/to/file\") def test_read_opens_and_reads_file(self): self.assertEquals( self.adl.read(\"adl://foo_store.azuredatalakestore.net/path/to/file\"), [\"a\", \"b\",", "test_split_url_raises_exception_on_invalid_url(self): with self.assertRaises(Exception) as context: ADL._split_url(\"this_is_not_a_valid_url\") self.assertTrue(\"Invalid ADL url 'this_is_not_a_valid_url'\"", "\"c\"] ) self.fakeFile.__iter__.assert_called_once_with() def test_write_opens_file_and_writes_to_it(self): self.adl.write(\"hello world\", \"adl://foo_store.azuredatalakestore.net/path/to/file\") self.fakeFile.write.assert_called_once_with(b\"hello world\")", "self.assertEqual( self.adl.listdir(\"adl://foo_store.azuredatalakestore.net/path/to/file\"), [\"foo\", \"bar\", \"baz\"], ) self.ls.assert_called_once_with(\"path/to/file\") def test_read_opens_and_reads_file(self): self.assertEquals(", "url 'this_is_not_a_valid_url'\" in str(context.exception)) def test_split_url_splits_valid_url(self): (store_name, path) = ADL._split_url(\"adl://foo.azuredatalakestore.net/bar/baz\")", "six.PY3: from unittest.mock import Mock, MagicMock else: from mock import", "self.adl = ADL() self.adl._create_adapter = Mock(return_value=self.fakeAdapter) def test_split_url_raises_exception_on_invalid_url(self): with self.assertRaises(Exception)", "else: from mock import Mock, MagicMock class ADLTest(unittest.TestCase): \"\"\" Tests", "ADL._split_url(\"adl://foo.azuredatalakestore.net/bar/baz\") self.assertEqual(store_name, \"foo\") self.assertEqual(path, \"bar/baz\") def test_listdir_calls_ls_on_adl_adapter(self): self.assertEqual( self.adl.listdir(\"adl://foo_store.azuredatalakestore.net/path/to/file\"), [\"foo\",", "self.fakeFile self.open = Mock(return_value=self.fakeFile) self.fakeAdapter = Mock(open=self.open, ls=self.ls) self.adl =", "Mock(return_value=[\"foo\", \"bar\", \"baz\"]) self.fakeFile = MagicMock() self.fakeFile.__iter__.return_value = [b\"a\", b\"b\",", "\"\"\" def setUp(self): self.ls = Mock(return_value=[\"foo\", \"bar\", \"baz\"]) self.fakeFile =", "class ADLTest(unittest.TestCase): \"\"\" Tests for `ADL` \"\"\" def setUp(self): self.ls", "\"foo\") self.assertEqual(path, \"bar/baz\") def test_listdir_calls_ls_on_adl_adapter(self): self.assertEqual( self.adl.listdir(\"adl://foo_store.azuredatalakestore.net/path/to/file\"), [\"foo\", \"bar\", \"baz\"],", "\"baz\"]) self.fakeFile = MagicMock() self.fakeFile.__iter__.return_value = [b\"a\", b\"b\", b\"c\"] self.fakeFile.__enter__.return_value", "import Mock, MagicMock else: from mock import Mock, MagicMock class", ") self.ls.assert_called_once_with(\"path/to/file\") def test_read_opens_and_reads_file(self): self.assertEquals( self.adl.read(\"adl://foo_store.azuredatalakestore.net/path/to/file\"), [\"a\", \"b\", \"c\"] )", "self.ls.assert_called_once_with(\"path/to/file\") def test_read_opens_and_reads_file(self): self.assertEquals( self.adl.read(\"adl://foo_store.azuredatalakestore.net/path/to/file\"), [\"a\", \"b\", \"c\"] ) self.fakeFile.__iter__.assert_called_once_with()", "[b\"a\", b\"b\", b\"c\"] self.fakeFile.__enter__.return_value = self.fakeFile self.open = Mock(return_value=self.fakeFile) self.fakeAdapter", "self.adl.read(\"adl://foo_store.azuredatalakestore.net/path/to/file\"), [\"a\", \"b\", \"c\"] ) self.fakeFile.__iter__.assert_called_once_with() def test_write_opens_file_and_writes_to_it(self): self.adl.write(\"hello world\",", "Mock(return_value=self.fakeAdapter) def test_split_url_raises_exception_on_invalid_url(self): with self.assertRaises(Exception) as context: ADL._split_url(\"this_is_not_a_valid_url\") self.assertTrue(\"Invalid ADL", "def test_read_opens_and_reads_file(self): self.assertEquals( self.adl.read(\"adl://foo_store.azuredatalakestore.net/path/to/file\"), [\"a\", \"b\", \"c\"] ) self.fakeFile.__iter__.assert_called_once_with() def", "test_listdir_calls_ls_on_adl_adapter(self): self.assertEqual( self.adl.listdir(\"adl://foo_store.azuredatalakestore.net/path/to/file\"), [\"foo\", \"bar\", \"baz\"], ) self.ls.assert_called_once_with(\"path/to/file\") def test_read_opens_and_reads_file(self):", "= Mock(return_value=self.fakeFile) self.fakeAdapter = Mock(open=self.open, ls=self.ls) self.adl = ADL() self.adl._create_adapter", "for `ADL` \"\"\" def setUp(self): self.ls = Mock(return_value=[\"foo\", \"bar\", \"baz\"])", "import six if six.PY3: from unittest.mock import Mock, MagicMock else:", "self.assertEqual(store_name, \"foo\") self.assertEqual(path, \"bar/baz\") def test_listdir_calls_ls_on_adl_adapter(self): self.assertEqual( self.adl.listdir(\"adl://foo_store.azuredatalakestore.net/path/to/file\"), [\"foo\", \"bar\",", "\"b\", \"c\"] ) self.fakeFile.__iter__.assert_called_once_with() def test_write_opens_file_and_writes_to_it(self): self.adl.write(\"hello world\", \"adl://foo_store.azuredatalakestore.net/path/to/file\") self.fakeFile.write.assert_called_once_with(b\"hello", "self.adl.listdir(\"adl://foo_store.azuredatalakestore.net/path/to/file\"), [\"foo\", \"bar\", \"baz\"], ) self.ls.assert_called_once_with(\"path/to/file\") def test_read_opens_and_reads_file(self): self.assertEquals( self.adl.read(\"adl://foo_store.azuredatalakestore.net/path/to/file\"),", "def test_listdir_calls_ls_on_adl_adapter(self): self.assertEqual( self.adl.listdir(\"adl://foo_store.azuredatalakestore.net/path/to/file\"), [\"foo\", \"bar\", \"baz\"], ) self.ls.assert_called_once_with(\"path/to/file\") def", "unittest from ..adl import ADL import six if six.PY3: from", "Mock, MagicMock else: from mock import Mock, MagicMock class ADLTest(unittest.TestCase):", "(store_name, path) = ADL._split_url(\"adl://foo.azuredatalakestore.net/bar/baz\") self.assertEqual(store_name, \"foo\") self.assertEqual(path, \"bar/baz\") def test_listdir_calls_ls_on_adl_adapter(self):", "..adl import ADL import six if six.PY3: from unittest.mock import", "import ADL import six if six.PY3: from unittest.mock import Mock,", "= Mock(return_value=self.fakeAdapter) def test_split_url_raises_exception_on_invalid_url(self): with self.assertRaises(Exception) as context: ADL._split_url(\"this_is_not_a_valid_url\") self.assertTrue(\"Invalid", "self.fakeFile = MagicMock() self.fakeFile.__iter__.return_value = [b\"a\", b\"b\", b\"c\"] self.fakeFile.__enter__.return_value =", "def test_split_url_splits_valid_url(self): (store_name, path) = ADL._split_url(\"adl://foo.azuredatalakestore.net/bar/baz\") self.assertEqual(store_name, \"foo\") self.assertEqual(path, \"bar/baz\")", "test_split_url_splits_valid_url(self): (store_name, path) = ADL._split_url(\"adl://foo.azuredatalakestore.net/bar/baz\") self.assertEqual(store_name, \"foo\") self.assertEqual(path, \"bar/baz\") def", "= [b\"a\", b\"b\", b\"c\"] self.fakeFile.__enter__.return_value = self.fakeFile self.open = Mock(return_value=self.fakeFile)" ]
[ "import CustomUserCreationForm class SignUpView(CreateView): form_class = CustomUserCreationForm success_url = reverse_lazy('login')", "django.views.generic import CreateView from django.urls import reverse_lazy from .forms import", "import reverse_lazy from .forms import CustomUserCreationForm class SignUpView(CreateView): form_class =", "SignUpView(CreateView): form_class = CustomUserCreationForm success_url = reverse_lazy('login') template_name = 'signup.html'", "from django.urls import reverse_lazy from .forms import CustomUserCreationForm class SignUpView(CreateView):", "import CreateView from django.urls import reverse_lazy from .forms import CustomUserCreationForm", "<reponame>AnvarKhan/django-python from django.views.generic import CreateView from django.urls import reverse_lazy from", ".forms import CustomUserCreationForm class SignUpView(CreateView): form_class = CustomUserCreationForm success_url =", "CustomUserCreationForm class SignUpView(CreateView): form_class = CustomUserCreationForm success_url = reverse_lazy('login') template_name", "from .forms import CustomUserCreationForm class SignUpView(CreateView): form_class = CustomUserCreationForm success_url", "class SignUpView(CreateView): form_class = CustomUserCreationForm success_url = reverse_lazy('login') template_name =", "from django.views.generic import CreateView from django.urls import reverse_lazy from .forms", "django.urls import reverse_lazy from .forms import CustomUserCreationForm class SignUpView(CreateView): form_class", "CreateView from django.urls import reverse_lazy from .forms import CustomUserCreationForm class", "reverse_lazy from .forms import CustomUserCreationForm class SignUpView(CreateView): form_class = CustomUserCreationForm" ]
[ "typing import * # noqa: F401, F403 except ImportError: from", "try: from typing import * # noqa: F401, F403 except", "from .typing_stubs import * # type: ignore # noqa: F401,", "* # noqa: F401, F403 except ImportError: from .typing_stubs import", "import * # noqa: F401, F403 except ImportError: from .typing_stubs", ".typing_stubs import * # type: ignore # noqa: F401, F403", "F403 except ImportError: from .typing_stubs import * # type: ignore", "ImportError: from .typing_stubs import * # type: ignore # noqa:", "from typing import * # noqa: F401, F403 except ImportError:", "except ImportError: from .typing_stubs import * # type: ignore #", "<filename>st3/package_util/compat/typing.py try: from typing import * # noqa: F401, F403", "# noqa: F401, F403 except ImportError: from .typing_stubs import *", "F401, F403 except ImportError: from .typing_stubs import * # type:", "noqa: F401, F403 except ImportError: from .typing_stubs import * #" ]
[ "not None: res = res + dropmask.float() * replacement return", "(e.g., any inputs to an LSTM layer). Given a batch", "= x.masked_fill(dropmask, 0) if replacement is not None: res =", "x.size()] dropmask = torch.rand(*masksize, device=x.device) < self.dropprob res = x.masked_fill(dropmask,", "class LockedDropout(nn.Module): \"\"\" A variant of dropout layer that consistently", "LSTM layer). Given a batch of embedded inputs, this layer", "res = x.masked_fill(dropmask, 0) if replacement is not None: res", "not self.batch_first: m = x.new_empty(1, x.size(1), x.size(2), requires_grad=False).bernoulli_(1 - self.dropprob)", "\"\"\" def __init__(self, dropprob, batch_first=True): super().__init__() self.dropprob = dropprob self.batch_first", "be a replacement id (usually set to be <UNK>). \"\"\"", "indices. \"\"\" if not self.training or self.dropprob == 0: return", "dropprob): super().__init__() self.dropprob = dropprob def forward(self, x, replacement=None): if", "consistently drops out the same parameters over time. Also known", "layer assumes the last dimension of the input to be", "sequence units (e.g., word sequence, char sequence, etc.). Given a", "indices, this layer randomly set some of them to be", "LockedDropout(nn.Module): \"\"\" A variant of dropout layer that consistently drops", "from the LockedDropout implementation in the flair library (https://github.com/zalandoresearch/flair). \"\"\"", "self.dropprob).expand_as(x) return mask * x def extra_repr(self): return 'p={}'.format(self.dropprob) class", "def __init__(self, dropprob, replacement_id): super().__init__() self.dropprob = dropprob self.replacement_id =", "input to be the hidden dimension of a unit. \"\"\"", "dropprob self.replacement_id = replacement_id def forward(self, x): \"\"\" :param: x", "dropprob, batch_first=True): super().__init__() self.dropprob = dropprob self.batch_first = batch_first def", "for input of sequence units (e.g., word sequence, char sequence,", "not self.training or self.dropprob == 0: return x if not", "requires_grad=False).bernoulli_(1 - self.dropprob) mask = m.div(1 - self.dropprob).expand_as(x) return mask", "(e.g., word sequence, char sequence, etc.). Given a sequence of", "a sequence of unit indices, this layer randomly set some", "\"\"\" :param: x must be a LongTensor of unit indices.", "if not self.training or self.dropprob == 0: return x if", "that consistently drops out the same parameters over time. Also", "out the same parameters over time. Also known as the", "def forward(self, x): if not self.training or self.dropprob == 0:", "= 1 dropmask = torch.rand(*masksize, device=x.device) < self.dropprob res =", "to be a replacement state. Note that this layer assumes", "batch_first=True): super().__init__() self.dropprob = dropprob self.batch_first = batch_first def forward(self,", "else: m = x.new_empty(x.size(0), 1, x.size(2), requires_grad=False).bernoulli_(1 - self.dropprob) mask", "of the input to be the hidden dimension of a", "to an LSTM layer). Given a batch of embedded inputs,", "be the hidden dimension of a unit. \"\"\" def __init__(self,", "or self.dropprob == 0: return x masksize = [y for", "return res def extra_repr(self): return 'p={}'.format(self.dropprob) class LockedDropout(nn.Module): \"\"\" A", "= torch.rand(*masksize, device=x.device) < self.dropprob res = x.masked_fill(dropmask, 0) if", "set to be <UNK>). \"\"\" def __init__(self, dropprob, replacement_id): super().__init__()", "0) if replacement is not None: res = res +", "m = x.new_empty(x.size(0), 1, x.size(2), requires_grad=False).bernoulli_(1 - self.dropprob) mask =", "= torch.rand(*masksize, device=x.device) < self.dropprob res = x.masked_fill(dropmask, self.replacement_id) return", "__init__(self, dropprob): super().__init__() self.dropprob = dropprob def forward(self, x, replacement=None):", "as nn class WordDropout(nn.Module): \"\"\" A word dropout layer that's", "implementation in the flair library (https://github.com/zalandoresearch/flair). \"\"\" def __init__(self, dropprob,", "SequenceUnitDropout(nn.Module): \"\"\" A unit dropout layer that's designed for input", "x): \"\"\" :param: x must be a LongTensor of unit", "of a unit. \"\"\" def __init__(self, dropprob): super().__init__() self.dropprob =", "replacement=None): if not self.training or self.dropprob == 0: return x", "unit. \"\"\" def __init__(self, dropprob): super().__init__() self.dropprob = dropprob def", "self.dropprob == 0: return x if not self.batch_first: m =", "a replacement id (usually set to be <UNK>). \"\"\" def", "device=x.device) < self.dropprob res = x.masked_fill(dropmask, self.replacement_id) return res def", "in the flair library (https://github.com/zalandoresearch/flair). \"\"\" def __init__(self, dropprob, batch_first=True):", "that's designed for input of sequence units (e.g., word sequence,", "of unit indices. \"\"\" if not self.training or self.dropprob ==", "as the variational dropout. This implementation was modified from the", "= [y for y in x.size()] masksize[-1] = 1 dropmask", "x.size(1), x.size(2), requires_grad=False).bernoulli_(1 - self.dropprob) else: m = x.new_empty(x.size(0), 1,", "embedded inputs (e.g., any inputs to an LSTM layer). Given", "A word dropout layer that's designed for embedded inputs (e.g.,", "of them to be a replacement state. Note that this", "x.size(2), requires_grad=False).bernoulli_(1 - self.dropprob) mask = m.div(1 - self.dropprob).expand_as(x) return", "some of them to be a replacement state. Note that", "m = x.new_empty(1, x.size(1), x.size(2), requires_grad=False).bernoulli_(1 - self.dropprob) else: m", "drops out the same parameters over time. Also known as", "m.div(1 - self.dropprob).expand_as(x) return mask * x def extra_repr(self): return", "units (e.g., word sequence, char sequence, etc.). Given a sequence", "\"\"\" A variant of dropout layer that consistently drops out", "self.dropprob res = x.masked_fill(dropmask, 0) if replacement is not None:", "unit indices, this layer randomly set some of them to", "<UNK>). \"\"\" def __init__(self, dropprob, replacement_id): super().__init__() self.dropprob = dropprob", "= dropprob def forward(self, x, replacement=None): if not self.training or", "super().__init__() self.dropprob = dropprob self.replacement_id = replacement_id def forward(self, x):", "sequence, etc.). Given a sequence of unit indices, this layer", "import torch import torch.nn as nn class WordDropout(nn.Module): \"\"\" A", "layer randomly set some of them to be a replacement", "Also known as the variational dropout. This implementation was modified", "self.dropprob res = x.masked_fill(dropmask, self.replacement_id) return res def extra_repr(self): return", "self.dropprob = dropprob def forward(self, x, replacement=None): if not self.training", "of embedded inputs, this layer randomly set some of them", "* replacement return res def extra_repr(self): return 'p={}'.format(self.dropprob) class LockedDropout(nn.Module):", "nn class WordDropout(nn.Module): \"\"\" A word dropout layer that's designed", "in x.size()] masksize[-1] = 1 dropmask = torch.rand(*masksize, device=x.device) <", "dropout layer that consistently drops out the same parameters over", "if not self.batch_first: m = x.new_empty(1, x.size(1), x.size(2), requires_grad=False).bernoulli_(1 -", "self.replacement_id = replacement_id def forward(self, x): \"\"\" :param: x must", "the hidden dimension of a unit. \"\"\" def __init__(self, dropprob):", "def __init__(self, dropprob): super().__init__() self.dropprob = dropprob def forward(self, x,", "x.new_empty(x.size(0), 1, x.size(2), requires_grad=False).bernoulli_(1 - self.dropprob) mask = m.div(1 -", "batch of embedded inputs, this layer randomly set some of", "id (usually set to be <UNK>). \"\"\" def __init__(self, dropprob,", "library (https://github.com/zalandoresearch/flair). \"\"\" def __init__(self, dropprob, batch_first=True): super().__init__() self.dropprob =", "x, replacement=None): if not self.training or self.dropprob == 0: return", "1 dropmask = torch.rand(*masksize, device=x.device) < self.dropprob res = x.masked_fill(dropmask,", "res def extra_repr(self): return 'p={}'.format(self.dropprob) class LockedDropout(nn.Module): \"\"\" A variant", "= dropprob self.replacement_id = replacement_id def forward(self, x): \"\"\" :param:", "them to be a replacement id (usually set to be", "inputs (e.g., any inputs to an LSTM layer). Given a", "the LockedDropout implementation in the flair library (https://github.com/zalandoresearch/flair). \"\"\" def", "forward(self, x): if not self.training or self.dropprob == 0: return", "super().__init__() self.dropprob = dropprob def forward(self, x, replacement=None): if not", "unit indices. \"\"\" if not self.training or self.dropprob == 0:", "return 'p={}'.format(self.dropprob) class LockedDropout(nn.Module): \"\"\" A variant of dropout layer", "= [y for y in x.size()] dropmask = torch.rand(*masksize, device=x.device)", "or self.dropprob == 0: return x if not self.batch_first: m", "__init__(self, dropprob, replacement_id): super().__init__() self.dropprob = dropprob self.replacement_id = replacement_id", "torch.nn as nn class WordDropout(nn.Module): \"\"\" A word dropout layer", "self.batch_first = batch_first def forward(self, x): if not self.training or", "a LongTensor of unit indices. \"\"\" if not self.training or", "self.dropprob) mask = m.div(1 - self.dropprob).expand_as(x) return mask * x", "res = x.masked_fill(dropmask, self.replacement_id) return res def extra_repr(self): return 'p={},", "word sequence, char sequence, etc.). Given a sequence of unit", "def forward(self, x): \"\"\" :param: x must be a LongTensor", "last dimension of the input to be the hidden dimension", "sequence of unit indices, this layer randomly set some of", "be a replacement state. Note that this layer assumes the", "the same parameters over time. Also known as the variational", "\"\"\" A word dropout layer that's designed for embedded inputs", "inputs, this layer randomly set some of them to be", "\"\"\" A unit dropout layer that's designed for input of", "(usually set to be <UNK>). \"\"\" def __init__(self, dropprob, replacement_id):", "if replacement is not None: res = res + dropmask.float()", "- self.dropprob) mask = m.div(1 - self.dropprob).expand_as(x) return mask *", "masksize = [y for y in x.size()] dropmask = torch.rand(*masksize,", "in x.size()] dropmask = torch.rand(*masksize, device=x.device) < self.dropprob res =", "torch.rand(*masksize, device=x.device) < self.dropprob res = x.masked_fill(dropmask, self.replacement_id) return res", "(https://github.com/zalandoresearch/flair). \"\"\" def __init__(self, dropprob, batch_first=True): super().__init__() self.dropprob = dropprob", "y in x.size()] masksize[-1] = 1 dropmask = torch.rand(*masksize, device=x.device)", "to be a replacement id (usually set to be <UNK>).", "dropout layer that's designed for embedded inputs (e.g., any inputs", "dimension of the input to be the hidden dimension of", "0: return x masksize = [y for y in x.size()]", "res = res + dropmask.float() * replacement return res def", "an LSTM layer). Given a batch of embedded inputs, this", "= m.div(1 - self.dropprob).expand_as(x) return mask * x def extra_repr(self):", "for embedded inputs (e.g., any inputs to an LSTM layer).", "< self.dropprob res = x.masked_fill(dropmask, self.replacement_id) return res def extra_repr(self):", "dropprob self.batch_first = batch_first def forward(self, x): if not self.training", "x def extra_repr(self): return 'p={}'.format(self.dropprob) class SequenceUnitDropout(nn.Module): \"\"\" A unit", "time. Also known as the variational dropout. This implementation was", "def forward(self, x, replacement=None): if not self.training or self.dropprob ==", "== 0: return x masksize = [y for y in", "Note that this layer assumes the last dimension of the", "[y for y in x.size()] masksize[-1] = 1 dropmask =", "= res + dropmask.float() * replacement return res def extra_repr(self):", "that's designed for embedded inputs (e.g., any inputs to an", "= x.new_empty(1, x.size(1), x.size(2), requires_grad=False).bernoulli_(1 - self.dropprob) else: m =", "+ dropmask.float() * replacement return res def extra_repr(self): return 'p={}'.format(self.dropprob)", "dropout. This implementation was modified from the LockedDropout implementation in", "def extra_repr(self): return 'p={}'.format(self.dropprob) class SequenceUnitDropout(nn.Module): \"\"\" A unit dropout", "0: return x if not self.batch_first: m = x.new_empty(1, x.size(1),", "'p={}'.format(self.dropprob) class SequenceUnitDropout(nn.Module): \"\"\" A unit dropout layer that's designed", "replacement_id def forward(self, x): \"\"\" :param: x must be a", "known as the variational dropout. This implementation was modified from", "be a LongTensor of unit indices. \"\"\" if not self.training", "<reponame>rasimuvaikas/stanza import torch import torch.nn as nn class WordDropout(nn.Module): \"\"\"", "A variant of dropout layer that consistently drops out the", "= dropprob self.batch_first = batch_first def forward(self, x): if not", "assumes the last dimension of the input to be the", "class SequenceUnitDropout(nn.Module): \"\"\" A unit dropout layer that's designed for", "x must be a LongTensor of unit indices. \"\"\" if", "parameters over time. Also known as the variational dropout. This", "= x.new_empty(x.size(0), 1, x.size(2), requires_grad=False).bernoulli_(1 - self.dropprob) mask = m.div(1", "super().__init__() self.dropprob = dropprob self.batch_first = batch_first def forward(self, x):", "[y for y in x.size()] dropmask = torch.rand(*masksize, device=x.device) <", "y in x.size()] dropmask = torch.rand(*masksize, device=x.device) < self.dropprob res", "device=x.device) < self.dropprob res = x.masked_fill(dropmask, 0) if replacement is", "self.training or self.dropprob == 0: return x masksize = [y", "self.training or self.dropprob == 0: return x if not self.batch_first:", "LongTensor of unit indices. \"\"\" if not self.training or self.dropprob", "that this layer assumes the last dimension of the input", "WordDropout(nn.Module): \"\"\" A word dropout layer that's designed for embedded", "designed for input of sequence units (e.g., word sequence, char", "set some of them to be a replacement state. Note", "replacement_id): super().__init__() self.dropprob = dropprob self.replacement_id = replacement_id def forward(self,", "return x masksize = [y for y in x.size()] dropmask", "to be the hidden dimension of a unit. \"\"\" def", "replacement id (usually set to be <UNK>). \"\"\" def __init__(self,", "of them to be a replacement id (usually set to", "state. Note that this layer assumes the last dimension of", "self.dropprob == 0: return x masksize = [y for y", "batch_first def forward(self, x): if not self.training or self.dropprob ==", "to be <UNK>). \"\"\" def __init__(self, dropprob, replacement_id): super().__init__() self.dropprob", "of unit indices, this layer randomly set some of them", "the variational dropout. This implementation was modified from the LockedDropout", "class WordDropout(nn.Module): \"\"\" A word dropout layer that's designed for", "x): if not self.training or self.dropprob == 0: return x", "A unit dropout layer that's designed for input of sequence", "dimension of a unit. \"\"\" def __init__(self, dropprob): super().__init__() self.dropprob", "x.size()] masksize[-1] = 1 dropmask = torch.rand(*masksize, device=x.device) < self.dropprob", "dropmask = torch.rand(*masksize, device=x.device) < self.dropprob res = x.masked_fill(dropmask, 0)", "is not None: res = res + dropmask.float() * replacement", "flair library (https://github.com/zalandoresearch/flair). \"\"\" def __init__(self, dropprob, batch_first=True): super().__init__() self.dropprob", "sequence, char sequence, etc.). Given a sequence of unit indices,", "this layer assumes the last dimension of the input to", "x.size(2), requires_grad=False).bernoulli_(1 - self.dropprob) else: m = x.new_empty(x.size(0), 1, x.size(2),", "dropmask = torch.rand(*masksize, device=x.device) < self.dropprob res = x.masked_fill(dropmask, self.replacement_id)", "not self.training or self.dropprob == 0: return x masksize =", "dropprob, replacement_id): super().__init__() self.dropprob = dropprob self.replacement_id = replacement_id def", "__init__(self, dropprob, batch_first=True): super().__init__() self.dropprob = dropprob self.batch_first = batch_first", ":param: x must be a LongTensor of unit indices. \"\"\"", "- self.dropprob) else: m = x.new_empty(x.size(0), 1, x.size(2), requires_grad=False).bernoulli_(1 -", "= x.masked_fill(dropmask, self.replacement_id) return res def extra_repr(self): return 'p={}, replacement_id={}'.format(self.dropprob,", "this layer randomly set some of them to be a", "This implementation was modified from the LockedDropout implementation in the", "extra_repr(self): return 'p={}'.format(self.dropprob) class SequenceUnitDropout(nn.Module): \"\"\" A unit dropout layer", "< self.dropprob res = x.masked_fill(dropmask, 0) if replacement is not", "None: res = res + dropmask.float() * replacement return res", "x.masked_fill(dropmask, 0) if replacement is not None: res = res", "dropmask.float() * replacement return res def extra_repr(self): return 'p={}'.format(self.dropprob) class", "the last dimension of the input to be the hidden", "return 'p={}'.format(self.dropprob) class SequenceUnitDropout(nn.Module): \"\"\" A unit dropout layer that's", "unit dropout layer that's designed for input of sequence units", "x.masked_fill(dropmask, self.replacement_id) return res def extra_repr(self): return 'p={}, replacement_id={}'.format(self.dropprob, self.replacement_id)", "mask = m.div(1 - self.dropprob).expand_as(x) return mask * x def", "= replacement_id def forward(self, x): \"\"\" :param: x must be", "import torch.nn as nn class WordDropout(nn.Module): \"\"\" A word dropout", "replacement return res def extra_repr(self): return 'p={}'.format(self.dropprob) class LockedDropout(nn.Module): \"\"\"", "x masksize = [y for y in x.size()] masksize[-1] =", "masksize[-1] = 1 dropmask = torch.rand(*masksize, device=x.device) < self.dropprob res", "must be a LongTensor of unit indices. \"\"\" if not", "the flair library (https://github.com/zalandoresearch/flair). \"\"\" def __init__(self, dropprob, batch_first=True): super().__init__()", "if not self.training or self.dropprob == 0: return x masksize", "Given a sequence of unit indices, this layer randomly set", "forward(self, x): \"\"\" :param: x must be a LongTensor of", "* x def extra_repr(self): return 'p={}'.format(self.dropprob) class SequenceUnitDropout(nn.Module): \"\"\" A", "be <UNK>). \"\"\" def __init__(self, dropprob, replacement_id): super().__init__() self.dropprob =", "return x masksize = [y for y in x.size()] masksize[-1]", "return mask * x def extra_repr(self): return 'p={}'.format(self.dropprob) class SequenceUnitDropout(nn.Module):", "hidden dimension of a unit. \"\"\" def __init__(self, dropprob): super().__init__()", "'p={}'.format(self.dropprob) class LockedDropout(nn.Module): \"\"\" A variant of dropout layer that", "res + dropmask.float() * replacement return res def extra_repr(self): return", "variational dropout. This implementation was modified from the LockedDropout implementation", "set some of them to be a replacement id (usually", "them to be a replacement state. Note that this layer", "any inputs to an LSTM layer). Given a batch of", "replacement state. Note that this layer assumes the last dimension", "a unit. \"\"\" def __init__(self, dropprob): super().__init__() self.dropprob = dropprob", "of sequence units (e.g., word sequence, char sequence, etc.). Given", "embedded inputs, this layer randomly set some of them to", "x masksize = [y for y in x.size()] dropmask =", "some of them to be a replacement id (usually set", "layer that's designed for input of sequence units (e.g., word", "a batch of embedded inputs, this layer randomly set some", "\"\"\" def __init__(self, dropprob, replacement_id): super().__init__() self.dropprob = dropprob self.replacement_id", "self.batch_first: m = x.new_empty(1, x.size(1), x.size(2), requires_grad=False).bernoulli_(1 - self.dropprob) else:", "masksize = [y for y in x.size()] masksize[-1] = 1", "of dropout layer that consistently drops out the same parameters", "== 0: return x if not self.batch_first: m = x.new_empty(1,", "self.dropprob) else: m = x.new_empty(x.size(0), 1, x.size(2), requires_grad=False).bernoulli_(1 - self.dropprob)", "x if not self.batch_first: m = x.new_empty(1, x.size(1), x.size(2), requires_grad=False).bernoulli_(1", "variant of dropout layer that consistently drops out the same", "x.new_empty(1, x.size(1), x.size(2), requires_grad=False).bernoulli_(1 - self.dropprob) else: m = x.new_empty(x.size(0),", "1, x.size(2), requires_grad=False).bernoulli_(1 - self.dropprob) mask = m.div(1 - self.dropprob).expand_as(x)", "randomly set some of them to be a replacement id", "torch.rand(*masksize, device=x.device) < self.dropprob res = x.masked_fill(dropmask, 0) if replacement", "def extra_repr(self): return 'p={}'.format(self.dropprob) class LockedDropout(nn.Module): \"\"\" A variant of", "over time. Also known as the variational dropout. This implementation", "self.dropprob = dropprob self.batch_first = batch_first def forward(self, x): if", "the input to be the hidden dimension of a unit.", "\"\"\" if not self.training or self.dropprob == 0: return x", "a replacement state. Note that this layer assumes the last", "char sequence, etc.). Given a sequence of unit indices, this", "replacement is not None: res = res + dropmask.float() *", "for y in x.size()] dropmask = torch.rand(*masksize, device=x.device) < self.dropprob", "input of sequence units (e.g., word sequence, char sequence, etc.).", "mask * x def extra_repr(self): return 'p={}'.format(self.dropprob) class SequenceUnitDropout(nn.Module): \"\"\"", "layer that consistently drops out the same parameters over time.", "return x if not self.batch_first: m = x.new_empty(1, x.size(1), x.size(2),", "dropout layer that's designed for input of sequence units (e.g.,", "same parameters over time. Also known as the variational dropout.", "word dropout layer that's designed for embedded inputs (e.g., any", "LockedDropout implementation in the flair library (https://github.com/zalandoresearch/flair). \"\"\" def __init__(self,", "inputs to an LSTM layer). Given a batch of embedded", "for y in x.size()] masksize[-1] = 1 dropmask = torch.rand(*masksize,", "was modified from the LockedDropout implementation in the flair library", "torch import torch.nn as nn class WordDropout(nn.Module): \"\"\" A word", "implementation was modified from the LockedDropout implementation in the flair", "dropprob def forward(self, x, replacement=None): if not self.training or self.dropprob", "- self.dropprob).expand_as(x) return mask * x def extra_repr(self): return 'p={}'.format(self.dropprob)", "requires_grad=False).bernoulli_(1 - self.dropprob) else: m = x.new_empty(x.size(0), 1, x.size(2), requires_grad=False).bernoulli_(1", "randomly set some of them to be a replacement state.", "def __init__(self, dropprob, batch_first=True): super().__init__() self.dropprob = dropprob self.batch_first =", "Given a batch of embedded inputs, this layer randomly set", "extra_repr(self): return 'p={}'.format(self.dropprob) class LockedDropout(nn.Module): \"\"\" A variant of dropout", "\"\"\" def __init__(self, dropprob): super().__init__() self.dropprob = dropprob def forward(self,", "designed for embedded inputs (e.g., any inputs to an LSTM", "layer). Given a batch of embedded inputs, this layer randomly", "etc.). Given a sequence of unit indices, this layer randomly", "self.dropprob = dropprob self.replacement_id = replacement_id def forward(self, x): \"\"\"", "forward(self, x, replacement=None): if not self.training or self.dropprob == 0:", "= batch_first def forward(self, x): if not self.training or self.dropprob", "modified from the LockedDropout implementation in the flair library (https://github.com/zalandoresearch/flair).", "layer that's designed for embedded inputs (e.g., any inputs to" ]
[ "'rb') as f: reader = PdfFileReader(f, strict=False) print(reader.numPages) if reader.isEncrypted:", "= PdfFileReader(f, strict=False) print(reader.numPages) if reader.isEncrypted: reader.decrypt('') current_page = reader.getPage(5)", "2018-03-26 \"\"\" from PyPDF2 import PdfFileReader with open('./res/Python课程大纲.pdf', 'rb') as", "PdfFileReader with open('./res/Python课程大纲.pdf', 'rb') as f: reader = PdfFileReader(f, strict=False)", "as f: reader = PdfFileReader(f, strict=False) print(reader.numPages) if reader.isEncrypted: reader.decrypt('')", "BDFD Date: 2018-03-26 \"\"\" from PyPDF2 import PdfFileReader with open('./res/Python课程大纲.pdf',", "strict=False) print(reader.numPages) if reader.isEncrypted: reader.decrypt('') current_page = reader.getPage(5) print(current_page) print(current_page.extractText())", "0.1 Author: BDFD Date: 2018-03-26 \"\"\" from PyPDF2 import PdfFileReader", "Author: BDFD Date: 2018-03-26 \"\"\" from PyPDF2 import PdfFileReader with", "import PdfFileReader with open('./res/Python课程大纲.pdf', 'rb') as f: reader = PdfFileReader(f,", "PdfFileReader(f, strict=False) print(reader.numPages) if reader.isEncrypted: reader.decrypt('') current_page = reader.getPage(5) print(current_page)", "Date: 2018-03-26 \"\"\" from PyPDF2 import PdfFileReader with open('./res/Python课程大纲.pdf', 'rb')", "open('./res/Python课程大纲.pdf', 'rb') as f: reader = PdfFileReader(f, strict=False) print(reader.numPages) if", "<gh_stars>1-10 \"\"\" 读取PDF文件 Version: 0.1 Author: BDFD Date: 2018-03-26 \"\"\"", "\"\"\" from PyPDF2 import PdfFileReader with open('./res/Python课程大纲.pdf', 'rb') as f:", "with open('./res/Python课程大纲.pdf', 'rb') as f: reader = PdfFileReader(f, strict=False) print(reader.numPages)", "Version: 0.1 Author: BDFD Date: 2018-03-26 \"\"\" from PyPDF2 import", "\"\"\" 读取PDF文件 Version: 0.1 Author: BDFD Date: 2018-03-26 \"\"\" from", "f: reader = PdfFileReader(f, strict=False) print(reader.numPages) if reader.isEncrypted: reader.decrypt('') current_page", "读取PDF文件 Version: 0.1 Author: BDFD Date: 2018-03-26 \"\"\" from PyPDF2", "PyPDF2 import PdfFileReader with open('./res/Python课程大纲.pdf', 'rb') as f: reader =", "reader = PdfFileReader(f, strict=False) print(reader.numPages) if reader.isEncrypted: reader.decrypt('') current_page =", "from PyPDF2 import PdfFileReader with open('./res/Python课程大纲.pdf', 'rb') as f: reader" ]
[ "'ipetrash' from PyQt5 import QtWidgets as qtw from PyQt5.QtTest import", "def fill_tree(self, global_number=-1): self.number_total_class = 0 self.tree.clear() t = time.clock()", "sec'.format(self.number_total_class, time.clock() - t) ) def closeEvent(self, e): quit() if", "import QTest import time import requests from bs4 import BeautifulSoup", "== '__main__': app = qtw.QApplication([]) w = MainWindow() w.resize(500, 500)", "return QTest.qWait(1000) indent = ' ' * indent_level rs =", "root = BeautifulSoup(rs.content, 'html.parser') name_class = root.select_one('.context > .title').text.split()[0] inherited_children", "= len(inherited_children) if number_inherited_children > 0: name_class = '{} ({})'.format(name_class,", "= qtw.QTreeWidget() self.tree.setAlternatingRowColors(True) self.tree.setHeaderLabel('NAME') self.setCentralWidget(self.tree) self.number_total_class = 0 def _fill_root(self,", "= 0 def _fill_root(self, node: qtw.QTreeWidgetItem, url: str, global_number: int,", "and self.number_total_class >= global_number: return QTest.qWait(1000) indent = ' '", "super().__init__() self.setWindowTitle('qt__class_tree__parse_and_print__recursively__from__doc_qt_io') self.tree = qtw.QTreeWidget() self.tree.setAlternatingRowColors(True) self.tree.setHeaderLabel('NAME') self.setCentralWidget(self.tree) self.number_total_class =", "= '{} ({})'.format(name_class, number_inherited_children) print(indent + name_class + ':') else:", "closeEvent(self, e): quit() if __name__ == '__main__': app = qtw.QApplication([])", "indent_level=0): if global_number > 0 and self.number_total_class >= global_number: return", "1) def fill_tree(self, global_number=-1): self.number_total_class = 0 self.tree.clear() t =", "node.setExpanded(True) self.number_total_class += 1 for name, url in inherited_children: self._fill_root(item,", "int, indent_level=0): if global_number > 0 and self.number_total_class >= global_number:", "+ 1) def fill_tree(self, global_number=-1): self.number_total_class = 0 self.tree.clear() t", "indent_level rs = requests.get(url) root = BeautifulSoup(rs.content, 'html.parser') name_class =", "> .title').text.split()[0] inherited_children = get_inherited_children(url, root) number_inherited_children = len(inherited_children) if", "{}.\\nElapsed: {:.3f} sec'.format(self.number_total_class, time.clock() - t) ) def closeEvent(self, e):", "QTest.qWait(1000) indent = ' ' * indent_level rs = requests.get(url)", "print(indent + name_class + ':') else: print(indent + name_class) item", "QTest import time import requests from bs4 import BeautifulSoup from", "= 0 self.tree.clear() t = time.clock() self._fill_root(None, ROOT_URL, global_number) qtw.QMessageBox.information(", "url, global_number, indent_level + 1) def fill_tree(self, global_number=-1): self.number_total_class =", "root.select_one('.context > .title').text.split()[0] inherited_children = get_inherited_children(url, root) number_inherited_children = len(inherited_children)", "self.tree.setAlternatingRowColors(True) self.tree.setHeaderLabel('NAME') self.setCentralWidget(self.tree) self.number_total_class = 0 def _fill_root(self, node: qtw.QTreeWidgetItem,", "url: str, global_number: int, indent_level=0): if global_number > 0 and", "':') else: print(indent + name_class) item = qtw.QTreeWidgetItem([name_class]) if not", "QtWidgets as qtw from PyQt5.QtTest import QTest import time import", "- t) ) def closeEvent(self, e): quit() if __name__ ==", "import get_inherited_children, ROOT_URL class MainWindow(qtw.QMainWindow): def __init__(self): super().__init__() self.setWindowTitle('qt__class_tree__parse_and_print__recursively__from__doc_qt_io') self.tree", "t = time.clock() self._fill_root(None, ROOT_URL, global_number) qtw.QMessageBox.information( self, 'Complete!', 'Items:", "qtw.QTreeWidget() self.tree.setAlternatingRowColors(True) self.tree.setHeaderLabel('NAME') self.setCentralWidget(self.tree) self.number_total_class = 0 def _fill_root(self, node:", "qtw from PyQt5.QtTest import QTest import time import requests from", "global_number: return QTest.qWait(1000) indent = ' ' * indent_level rs", "self.setCentralWidget(self.tree) self.number_total_class = 0 def _fill_root(self, node: qtw.QTreeWidgetItem, url: str,", "from PyQt5.QtTest import QTest import time import requests from bs4", "= BeautifulSoup(rs.content, 'html.parser') name_class = root.select_one('.context > .title').text.split()[0] inherited_children =", "= get_inherited_children(url, root) number_inherited_children = len(inherited_children) if number_inherited_children > 0:", "= 'ipetrash' from PyQt5 import QtWidgets as qtw from PyQt5.QtTest", "* indent_level rs = requests.get(url) root = BeautifulSoup(rs.content, 'html.parser') name_class", "node: self.tree.addTopLevelItem(item) else: node.addChild(item) node.setExpanded(True) self.number_total_class += 1 for name,", "requests from bs4 import BeautifulSoup from console import get_inherited_children, ROOT_URL", "self._fill_root(item, url, global_number, indent_level + 1) def fill_tree(self, global_number=-1): self.number_total_class", "fill_tree(self, global_number=-1): self.number_total_class = 0 self.tree.clear() t = time.clock() self._fill_root(None,", "quit() if __name__ == '__main__': app = qtw.QApplication([]) w =", "+ name_class) item = qtw.QTreeWidgetItem([name_class]) if not node: self.tree.addTopLevelItem(item) else:", "self.setWindowTitle('qt__class_tree__parse_and_print__recursively__from__doc_qt_io') self.tree = qtw.QTreeWidget() self.tree.setAlternatingRowColors(True) self.tree.setHeaderLabel('NAME') self.setCentralWidget(self.tree) self.number_total_class = 0", "= requests.get(url) root = BeautifulSoup(rs.content, 'html.parser') name_class = root.select_one('.context >", "time.clock() - t) ) def closeEvent(self, e): quit() if __name__", "qtw.QTreeWidgetItem, url: str, global_number: int, indent_level=0): if global_number > 0", "= root.select_one('.context > .title').text.split()[0] inherited_children = get_inherited_children(url, root) number_inherited_children =", "ROOT_URL class MainWindow(qtw.QMainWindow): def __init__(self): super().__init__() self.setWindowTitle('qt__class_tree__parse_and_print__recursively__from__doc_qt_io') self.tree = qtw.QTreeWidget()", "global_number) qtw.QMessageBox.information( self, 'Complete!', 'Items: {}.\\nElapsed: {:.3f} sec'.format(self.number_total_class, time.clock() -", "qtw.QMessageBox.information( self, 'Complete!', 'Items: {}.\\nElapsed: {:.3f} sec'.format(self.number_total_class, time.clock() - t)", "MainWindow(qtw.QMainWindow): def __init__(self): super().__init__() self.setWindowTitle('qt__class_tree__parse_and_print__recursively__from__doc_qt_io') self.tree = qtw.QTreeWidget() self.tree.setAlternatingRowColors(True) self.tree.setHeaderLabel('NAME')", "'__main__': app = qtw.QApplication([]) w = MainWindow() w.resize(500, 500) w.show()", "'{} ({})'.format(name_class, number_inherited_children) print(indent + name_class + ':') else: print(indent", "_fill_root(self, node: qtw.QTreeWidgetItem, url: str, global_number: int, indent_level=0): if global_number", "__author__ = 'ipetrash' from PyQt5 import QtWidgets as qtw from", "0 def _fill_root(self, node: qtw.QTreeWidgetItem, url: str, global_number: int, indent_level=0):", "class MainWindow(qtw.QMainWindow): def __init__(self): super().__init__() self.setWindowTitle('qt__class_tree__parse_and_print__recursively__from__doc_qt_io') self.tree = qtw.QTreeWidget() self.tree.setAlternatingRowColors(True)", "= qtw.QApplication([]) w = MainWindow() w.resize(500, 500) w.show() w.fill_tree() app.exec()", "self.number_total_class >= global_number: return QTest.qWait(1000) indent = ' ' *", "self.tree.clear() t = time.clock() self._fill_root(None, ROOT_URL, global_number) qtw.QMessageBox.information( self, 'Complete!',", "'html.parser') name_class = root.select_one('.context > .title').text.split()[0] inherited_children = get_inherited_children(url, root)", "if not node: self.tree.addTopLevelItem(item) else: node.addChild(item) node.setExpanded(True) self.number_total_class += 1", "self._fill_root(None, ROOT_URL, global_number) qtw.QMessageBox.information( self, 'Complete!', 'Items: {}.\\nElapsed: {:.3f} sec'.format(self.number_total_class,", "app = qtw.QApplication([]) w = MainWindow() w.resize(500, 500) w.show() w.fill_tree()", "0 self.tree.clear() t = time.clock() self._fill_root(None, ROOT_URL, global_number) qtw.QMessageBox.information( self,", "if __name__ == '__main__': app = qtw.QApplication([]) w = MainWindow()", "self.number_total_class = 0 self.tree.clear() t = time.clock() self._fill_root(None, ROOT_URL, global_number)", "{:.3f} sec'.format(self.number_total_class, time.clock() - t) ) def closeEvent(self, e): quit()", "name_class + ':') else: print(indent + name_class) item = qtw.QTreeWidgetItem([name_class])", "time import requests from bs4 import BeautifulSoup from console import", "+= 1 for name, url in inherited_children: self._fill_root(item, url, global_number,", "name_class = '{} ({})'.format(name_class, number_inherited_children) print(indent + name_class + ':')", "inherited_children: self._fill_root(item, url, global_number, indent_level + 1) def fill_tree(self, global_number=-1):", "get_inherited_children, ROOT_URL class MainWindow(qtw.QMainWindow): def __init__(self): super().__init__() self.setWindowTitle('qt__class_tree__parse_and_print__recursively__from__doc_qt_io') self.tree =", "str, global_number: int, indent_level=0): if global_number > 0 and self.number_total_class", "get_inherited_children(url, root) number_inherited_children = len(inherited_children) if number_inherited_children > 0: name_class", "python3 # -*- coding: utf-8 -*- __author__ = 'ipetrash' from", "-*- coding: utf-8 -*- __author__ = 'ipetrash' from PyQt5 import", "if global_number > 0 and self.number_total_class >= global_number: return QTest.qWait(1000)", "> 0 and self.number_total_class >= global_number: return QTest.qWait(1000) indent =", "__init__(self): super().__init__() self.setWindowTitle('qt__class_tree__parse_and_print__recursively__from__doc_qt_io') self.tree = qtw.QTreeWidget() self.tree.setAlternatingRowColors(True) self.tree.setHeaderLabel('NAME') self.setCentralWidget(self.tree) self.number_total_class", "in inherited_children: self._fill_root(item, url, global_number, indent_level + 1) def fill_tree(self,", "self.tree.addTopLevelItem(item) else: node.addChild(item) node.setExpanded(True) self.number_total_class += 1 for name, url", ">= global_number: return QTest.qWait(1000) indent = ' ' * indent_level", "self.tree = qtw.QTreeWidget() self.tree.setAlternatingRowColors(True) self.tree.setHeaderLabel('NAME') self.setCentralWidget(self.tree) self.number_total_class = 0 def", "name_class) item = qtw.QTreeWidgetItem([name_class]) if not node: self.tree.addTopLevelItem(item) else: node.addChild(item)", "1 for name, url in inherited_children: self._fill_root(item, url, global_number, indent_level", "else: print(indent + name_class) item = qtw.QTreeWidgetItem([name_class]) if not node:", "url in inherited_children: self._fill_root(item, url, global_number, indent_level + 1) def", "len(inherited_children) if number_inherited_children > 0: name_class = '{} ({})'.format(name_class, number_inherited_children)", "from console import get_inherited_children, ROOT_URL class MainWindow(qtw.QMainWindow): def __init__(self): super().__init__()", "global_number: int, indent_level=0): if global_number > 0 and self.number_total_class >=", "item = qtw.QTreeWidgetItem([name_class]) if not node: self.tree.addTopLevelItem(item) else: node.addChild(item) node.setExpanded(True)", "time.clock() self._fill_root(None, ROOT_URL, global_number) qtw.QMessageBox.information( self, 'Complete!', 'Items: {}.\\nElapsed: {:.3f}", "print(indent + name_class) item = qtw.QTreeWidgetItem([name_class]) if not node: self.tree.addTopLevelItem(item)", "number_inherited_children = len(inherited_children) if number_inherited_children > 0: name_class = '{}", "node: qtw.QTreeWidgetItem, url: str, global_number: int, indent_level=0): if global_number >", "#!/usr/bin/env python3 # -*- coding: utf-8 -*- __author__ = 'ipetrash'", "rs = requests.get(url) root = BeautifulSoup(rs.content, 'html.parser') name_class = root.select_one('.context", "e): quit() if __name__ == '__main__': app = qtw.QApplication([]) w", "self.number_total_class = 0 def _fill_root(self, node: qtw.QTreeWidgetItem, url: str, global_number:", "= time.clock() self._fill_root(None, ROOT_URL, global_number) qtw.QMessageBox.information( self, 'Complete!', 'Items: {}.\\nElapsed:", "self.number_total_class += 1 for name, url in inherited_children: self._fill_root(item, url,", "'Items: {}.\\nElapsed: {:.3f} sec'.format(self.number_total_class, time.clock() - t) ) def closeEvent(self,", "> 0: name_class = '{} ({})'.format(name_class, number_inherited_children) print(indent + name_class", "__name__ == '__main__': app = qtw.QApplication([]) w = MainWindow() w.resize(500,", "if number_inherited_children > 0: name_class = '{} ({})'.format(name_class, number_inherited_children) print(indent", "number_inherited_children) print(indent + name_class + ':') else: print(indent + name_class)", "requests.get(url) root = BeautifulSoup(rs.content, 'html.parser') name_class = root.select_one('.context > .title').text.split()[0]", "t) ) def closeEvent(self, e): quit() if __name__ == '__main__':", "BeautifulSoup(rs.content, 'html.parser') name_class = root.select_one('.context > .title').text.split()[0] inherited_children = get_inherited_children(url,", "<reponame>DazEB2/SimplePyScripts<gh_stars>0 #!/usr/bin/env python3 # -*- coding: utf-8 -*- __author__ =", "for name, url in inherited_children: self._fill_root(item, url, global_number, indent_level +", "bs4 import BeautifulSoup from console import get_inherited_children, ROOT_URL class MainWindow(qtw.QMainWindow):", "BeautifulSoup from console import get_inherited_children, ROOT_URL class MainWindow(qtw.QMainWindow): def __init__(self):", "({})'.format(name_class, number_inherited_children) print(indent + name_class + ':') else: print(indent +", "-*- __author__ = 'ipetrash' from PyQt5 import QtWidgets as qtw", "name_class = root.select_one('.context > .title').text.split()[0] inherited_children = get_inherited_children(url, root) number_inherited_children", "' * indent_level rs = requests.get(url) root = BeautifulSoup(rs.content, 'html.parser')", "root) number_inherited_children = len(inherited_children) if number_inherited_children > 0: name_class =", "+ name_class + ':') else: print(indent + name_class) item =", "= qtw.QTreeWidgetItem([name_class]) if not node: self.tree.addTopLevelItem(item) else: node.addChild(item) node.setExpanded(True) self.number_total_class", "ROOT_URL, global_number) qtw.QMessageBox.information( self, 'Complete!', 'Items: {}.\\nElapsed: {:.3f} sec'.format(self.number_total_class, time.clock()", "global_number > 0 and self.number_total_class >= global_number: return QTest.qWait(1000) indent", "not node: self.tree.addTopLevelItem(item) else: node.addChild(item) node.setExpanded(True) self.number_total_class += 1 for", "indent_level + 1) def fill_tree(self, global_number=-1): self.number_total_class = 0 self.tree.clear()", "qtw.QTreeWidgetItem([name_class]) if not node: self.tree.addTopLevelItem(item) else: node.addChild(item) node.setExpanded(True) self.number_total_class +=", "node.addChild(item) node.setExpanded(True) self.number_total_class += 1 for name, url in inherited_children:", "else: node.addChild(item) node.setExpanded(True) self.number_total_class += 1 for name, url in", "' ' * indent_level rs = requests.get(url) root = BeautifulSoup(rs.content,", "global_number, indent_level + 1) def fill_tree(self, global_number=-1): self.number_total_class = 0", "0 and self.number_total_class >= global_number: return QTest.qWait(1000) indent = '", "# -*- coding: utf-8 -*- __author__ = 'ipetrash' from PyQt5", "= ' ' * indent_level rs = requests.get(url) root =", ".title').text.split()[0] inherited_children = get_inherited_children(url, root) number_inherited_children = len(inherited_children) if number_inherited_children", "global_number=-1): self.number_total_class = 0 self.tree.clear() t = time.clock() self._fill_root(None, ROOT_URL,", "import QtWidgets as qtw from PyQt5.QtTest import QTest import time", "0: name_class = '{} ({})'.format(name_class, number_inherited_children) print(indent + name_class +", "def _fill_root(self, node: qtw.QTreeWidgetItem, url: str, global_number: int, indent_level=0): if", "self, 'Complete!', 'Items: {}.\\nElapsed: {:.3f} sec'.format(self.number_total_class, time.clock() - t) )", "console import get_inherited_children, ROOT_URL class MainWindow(qtw.QMainWindow): def __init__(self): super().__init__() self.setWindowTitle('qt__class_tree__parse_and_print__recursively__from__doc_qt_io')", "PyQt5 import QtWidgets as qtw from PyQt5.QtTest import QTest import", "from PyQt5 import QtWidgets as qtw from PyQt5.QtTest import QTest", "import BeautifulSoup from console import get_inherited_children, ROOT_URL class MainWindow(qtw.QMainWindow): def", "utf-8 -*- __author__ = 'ipetrash' from PyQt5 import QtWidgets as", "import time import requests from bs4 import BeautifulSoup from console", "+ ':') else: print(indent + name_class) item = qtw.QTreeWidgetItem([name_class]) if", "name, url in inherited_children: self._fill_root(item, url, global_number, indent_level + 1)", "indent = ' ' * indent_level rs = requests.get(url) root", "PyQt5.QtTest import QTest import time import requests from bs4 import", "number_inherited_children > 0: name_class = '{} ({})'.format(name_class, number_inherited_children) print(indent +", "as qtw from PyQt5.QtTest import QTest import time import requests", "coding: utf-8 -*- __author__ = 'ipetrash' from PyQt5 import QtWidgets", "'Complete!', 'Items: {}.\\nElapsed: {:.3f} sec'.format(self.number_total_class, time.clock() - t) ) def", "inherited_children = get_inherited_children(url, root) number_inherited_children = len(inherited_children) if number_inherited_children >", "import requests from bs4 import BeautifulSoup from console import get_inherited_children,", "def closeEvent(self, e): quit() if __name__ == '__main__': app =", ") def closeEvent(self, e): quit() if __name__ == '__main__': app", "def __init__(self): super().__init__() self.setWindowTitle('qt__class_tree__parse_and_print__recursively__from__doc_qt_io') self.tree = qtw.QTreeWidget() self.tree.setAlternatingRowColors(True) self.tree.setHeaderLabel('NAME') self.setCentralWidget(self.tree)", "self.tree.setHeaderLabel('NAME') self.setCentralWidget(self.tree) self.number_total_class = 0 def _fill_root(self, node: qtw.QTreeWidgetItem, url:", "from bs4 import BeautifulSoup from console import get_inherited_children, ROOT_URL class" ]
[ "Corp. # # # Licensed under the Apache License, Version", "in, # even though it actually doesn't. let's check to", "many users' logged in, # even though it actually doesn't.", "2.0 (the \"License\"); # you may not use this file", "form = \"form=2\" resp = self.submit(form, param) count = 0", "let's check to see if this is the case #", "\"Too many users\" in self.getpage(\"form=2\"): raise OpTestError(\"FSP reports 'Too many", "from .OpTestConstants import OpTestConstants as BMC_CONST from .OpTestError import OpTestError", "form = \"form=%s\" % self.frms['pwr'] return self.getpage(form) def start_debugvtty_session(self, partitionId='0',", "'33'} def getcsrf(self, form): while True: try: myurl = urllib.request.urlopen(self.url+form,", "OpTestError import http.cookiejar import urllib.request import urllib.parse import urllib.error import", "ASM page\") param = {'form': '16', 'exe': 'Execute', 'CSRF_TOKEN': '',", "as BMC_CONST from .OpTestError import OpTestError import http.cookiejar import urllib.request", "self.frms = {'pwr': '60', 'dbg': '79', 'immpwroff': '33'} def getcsrf(self,", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "it actually doesn't. let's check to see if this is", "count += 1 if count == 2: print(msg) return False", "req = urllib.request.Request(self.url+form, data) return urllib.request.urlopen(req) def login(self): if not", "user:{0} and password:{1}\".format( self.user_name, self.password) print(msg) count += 1 if", "clearlogs(self): if not self.login(): raise OpTestError(\"Failed to login ASM page\")", "def ver(self): form = \"form=1\" return self.getpage(form) def execommand(self, cmd):", "'CSRF_TOKEN': ''} form = \"form=30\" self.submit(form, param) self.logout() def powerstat(self):", "'16', 'exe': 'Execute', 'CSRF_TOKEN': '', 'cmd': cmd} form = \"form=16&frm=0\"", "-------------------------------------------------- This class can contains common functions which are useful", "param = {'user': self.user_name, 'password': <PASSWORD>, 'login': 'Log in', 'lang':", "not len(self.cj) == 0: return True param = {'user': self.user_name,", "def enable_err_injct_policy(self): if not self.login(): raise OpTestError(\"Failed to login ASM", "because the FSP has 'too many users' logged in, #", "'Log in', 'lang': '0', 'CSRF_TOKEN': ''} form = \"form=2\" resp", "-F') self.logout() def clearlogs(self): if not self.login(): raise OpTestError(\"Failed to", "request. if \"Too many users\" in self.getpage(\"form=2\"): raise OpTestError(\"FSP reports", "Project # # Contributors Listed Below - COPYRIGHT 2017 #", "This class can contains common functions which are useful for", "use this file except in compliance with the License. #", "param) count = 0 while count < 2: if not", "http.cookiejar import urllib.request import urllib.parse import urllib.error import re import", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "partitionId='0', sessionId='0', sessionTimeout='600'): if not self.login(): raise OpTestError(\"Failed to login", "License. # You may obtain a copy of the License", "OpTestError(\"Failed to login ASM page\") param = {'form': '16', 'exe':", "and limitations under the License. # # IBM_PROLOG_END_TAG ''' OpTestASM:", "can contains common functions which are useful for FSP ASM", "under the License is distributed on an \"AS IS\" BASIS,", "out)[0] else: return '0' def getpage(self, form): myurl = urllib.request.urlopen(self.url+form,", "out: return re.findall('CSRF_TOKEN.*value=\\'(.*)\\'', out)[0] else: return '0' def getpage(self, form):", "if \"Too many users\" in self.getpage(\"form=2\"): raise OpTestError(\"FSP reports 'Too", "'CSRF_TOKEN': '', 'cmd': cmd} form = \"form=16&frm=0\" self.submit(form, param) def", "form): while True: try: myurl = urllib.request.urlopen(self.url+form, timeout=10) except urllib.error.URLError:", "out', 'CSRF_TOKEN': ''} form = \"form=1\" self.submit(form, param) def ver(self):", "'Execute', 'CSRF_TOKEN': '', 'cmd': cmd} form = \"form=16&frm=0\" self.submit(form, param)", "an automatically generated prolog. # # $Source: op-test-framework/common/OpTestASM.py $ #", "def start_debugvtty_session(self, partitionId='0', sessionId='0', sessionTimeout='600'): if not self.login(): raise OpTestError(\"Failed", "urllib.error import re import ssl class OpTestASM: def __init__(self, i_fspIP,", "International Business Machines Corp. # # # Licensed under the", "is the case # by trying a request. if \"Too", "many users\" in self.getpage(\"form=2\"): raise OpTestError(\"FSP reports 'Too many users',", "# Contributors Listed Below - COPYRIGHT 2017 # [+] International", "''} form = \"form=1\" self.submit(form, param) def ver(self): form =", "self.logout() def enable_err_injct_policy(self): if not self.login(): raise OpTestError(\"Failed to login", "\"form=1\" self.submit(form, param) def ver(self): form = \"form=1\" return self.getpage(form)", "IBM_PROLOG_BEGIN_TAG # This is an automatically generated prolog. # #", "0 while count < 2: if not len(self.cj) == 0:", "form = \"form=81\" self.submit(form, param) self.logout() def enable_err_injct_policy(self): if not", "to see if this is the case # by trying", "OF ANY KIND, either express or # implied. See the", "True def logout(self): param = {'submit': 'Log out', 'CSRF_TOKEN': ''}", "\"form=1\" return self.getpage(form) def execommand(self, cmd): if not self.login(): raise", "only accessible through the FSP Web UI (such as progress", "License. # # IBM_PROLOG_END_TAG ''' OpTestASM: Advanced System Management (FSP", "'78', 'immpwroff': '32'} else: self.hrdwr = 'p7' self.frms = {'pwr':", "'immpwroff': '33'} def getcsrf(self, form): while True: try: myurl =", "<PASSWORD> self.url = \"https://%s/cgi-bin/cgi?\" % self.host_name self.cj = http.cookiejar.CookieJar() context", "in compliance with the License. # You may obtain a", "self.ver(): self.hrdwr = 'p8' self.frms = {'pwr': '59', 'dbg': '78',", "raise OpTestError(\"Failed to login ASM page\") param = {'form': '56',", "self.submit(form, param) self.logout() def enable_err_injct_policy(self): if not self.login(): raise OpTestError(\"Failed", "count < 2: if not len(self.cj) == 0: break #", "software # distributed under the License is distributed on an", "permissions and limitations under the License. # # IBM_PROLOG_END_TAG '''", "time.sleep(10) self.submit(form, param) msg = \"Login failed with user:{0} and", "self.host_name self.cj = http.cookiejar.CookieJar() context = ssl.create_default_context() context.check_hostname = False", "= \"Login failed with user:{0} and password:{1}\".format( self.user_name, self.password) print(msg)", "'0' def getpage(self, form): myurl = urllib.request.urlopen(self.url+form, timeout=60) return myurl.read().decode(\"utf-8\")", "== 2: print(msg) return False return True def logout(self): param", "myurl.read().decode(\"utf-8\") if 'CSRF_TOKEN' in out: return re.findall('CSRF_TOKEN.*value=\\'(.*)\\'', out)[0] else: return", "'32'} else: self.hrdwr = 'p7' self.frms = {'pwr': '60', 'dbg':", "import time import subprocess import os import pexpect import sys", "''} form = \"form=81\" self.submit(form, param) self.logout() def enable_err_injct_policy(self): if", "with user:{0} and password:{1}\".format( self.user_name, self.password) print(msg) count += 1", "= urllib.request.Request(self.url+form, data) return urllib.request.urlopen(req) def login(self): if not len(self.cj)", "# # IBM_PROLOG_END_TAG ''' OpTestASM: Advanced System Management (FSP Web", "self.host_name = i_fspIP self.user_name = i_fspUser self.password = <PASSWORD> self.url", "This is an automatically generated prolog. # # $Source: op-test-framework/common/OpTestASM.py", "this is the case # by trying a request. if", "getpage(self, form): myurl = urllib.request.urlopen(self.url+form, timeout=60) return myurl.read().decode(\"utf-8\") def submit(self,", "= i_fspIP self.user_name = i_fspUser self.password = <PASSWORD> self.url =", "self.logout() def powerstat(self): form = \"form=%s\" % self.frms['pwr'] return self.getpage(form)", "= {'user': self.user_name, 'password': <PASSWORD>, 'login': 'Log in', 'lang': '0',", "CONDITIONS OF ANY KIND, either express or # implied. See", "'p': '1', 'submit': 'Save settings', 'CSRF_TOKEN': ''} form = \"form=56\"", "check to see if this is the case # by", "page\") param = {'form': '16', 'exe': 'Execute', 'CSRF_TOKEN': '', 'cmd':", "import ssl class OpTestASM: def __init__(self, i_fspIP, i_fspUser, i_fspPasswd): self.host_name", ".OpTestError import OpTestError import http.cookiejar import urllib.request import urllib.parse import", "page\") param = {'form': '56', 'p': '1', 'submit': 'Save settings',", "self.getpage(form) def execommand(self, cmd): if not self.login(): raise OpTestError(\"Failed to", "urllib.error.URLError: time.sleep(2) continue break out = myurl.read().decode(\"utf-8\") if 'CSRF_TOKEN' in", "2: print(msg) return False return True def logout(self): param =", "encoding=utf8 # IBM_PROLOG_BEGIN_TAG # This is an automatically generated prolog.", "myurl = urllib.request.urlopen(self.url+form, timeout=10) except urllib.error.URLError: time.sleep(2) continue break out", "def getcsrf(self, form): while True: try: myurl = urllib.request.urlopen(self.url+form, timeout=10)", "BMC_CONST from .OpTestError import OpTestError import http.cookiejar import urllib.request import", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "\"form=30\" self.submit(form, param) self.logout() def powerstat(self): form = \"form=%s\" %", "msg = \"Login failed with user:{0} and password:{1}\".format( self.user_name, self.password)", "to login ASM page\") param = {'form': '81', 'p': partitionId,", "FSP needs power cycle\") time.sleep(10) self.submit(form, param) msg = \"Login", "'', 'cmd': cmd} form = \"form=16&frm=0\" self.submit(form, param) def disablefirewall(self):", "to in writing, software # distributed under the License is", "ASM Web page. Some functionality is only accessible through the", "users', FSP needs power cycle\") time.sleep(10) self.submit(form, param) msg =", "0: break # the login can quietly fail because the", "or agreed to in writing, software # distributed under the", "express or # implied. See the License for the specific", "context.check_hostname = False context.verify_mode = ssl.CERT_NONE opener = urllib.request.build_opener(urllib.request.HTTPSHandler(context=context)) opener.addheaders", "required by applicable law or agreed to in writing, software", "param = {'form': '81', 'p': partitionId, 's': sessionId, 't': sessionTimeout,", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "actually doesn't. let's check to see if this is the", "with the License. # You may obtain a copy of", "raise OpTestError(\"Failed to login ASM page\") self.execommand('iptables -F') self.logout() def", "== 0: return True param = {'user': self.user_name, 'password': <PASSWORD>,", "out = myurl.read().decode(\"utf-8\") if 'CSRF_TOKEN' in out: return re.findall('CSRF_TOKEN.*value=\\'(.*)\\'', out)[0]", "login ASM page\") param = {'form': '81', 'p': partitionId, 's':", "limitations under the License. # # IBM_PROLOG_END_TAG ''' OpTestASM: Advanced", "def __init__(self, i_fspIP, i_fspUser, i_fspPasswd): self.host_name = i_fspIP self.user_name =", "param = {'form': '16', 'exe': 'Execute', 'CSRF_TOKEN': '', 'cmd': cmd}", "page\") self.execommand('iptables -F') self.logout() def clearlogs(self): if not self.login(): raise", "if not len(self.cj) == 0: break # the login can", "raise OpTestError(\"Failed to login ASM page\") param = {'form': '16',", "\"Clear all error/event log entries\", 'CSRF_TOKEN': ''} form = \"form=30\"", "fail because the FSP has 'too many users' logged in,", "except urllib.error.URLError: time.sleep(2) continue break out = myurl.read().decode(\"utf-8\") if 'CSRF_TOKEN'", "compliance with the License. # You may obtain a copy", "self.getpage(form) def start_debugvtty_session(self, partitionId='0', sessionId='0', sessionTimeout='600'): if not self.login(): raise", "agreed to in writing, software # distributed under the License", "not self.login(): raise OpTestError(\"Failed to login ASM page\") param =", "% self.host_name self.cj = http.cookiejar.CookieJar() context = ssl.create_default_context() context.check_hostname =", "distributed under the License is distributed on an \"AS IS\"", "self.login(): raise OpTestError(\"Failed to login ASM page\") self.execommand('iptables -F') self.logout()", "self.setforms() def setforms(self): if \"FW860\" in self.ver(): self.hrdwr = 'p8'", "scrape it. ''' import time import subprocess import os import", "functions which are useful for FSP ASM Web page. Some", "IBM_PROLOG_END_TAG ''' OpTestASM: Advanced System Management (FSP Web UI) --------------------------------------------------", "except in compliance with the License. # You may obtain", "self.hrdwr = 'p7' self.frms = {'pwr': '60', 'dbg': '79', 'immpwroff':", "opener = urllib.request.build_opener(urllib.request.HTTPSHandler(context=context)) opener.addheaders = [('User-agent', 'LTCTest')] opener.add_handler(urllib.request.HTTPCookieProcessor(self.cj)) urllib.request.install_opener(opener) self.setforms()", "try: myurl = urllib.request.urlopen(self.url+form, timeout=10) except urllib.error.URLError: time.sleep(2) continue break", "partitionId, 's': sessionId, 't': sessionTimeout, 'Save settings': 'Save settings', 'CSRF_TOKEN':", "import urllib.request import urllib.parse import urllib.error import re import ssl", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "urllib.request import urllib.parse import urllib.error import re import ssl class", "not use this file except in compliance with the License.", "can quietly fail because the FSP has 'too many users'", "login ASM page\") param = {'form': '16', 'exe': 'Execute', 'CSRF_TOKEN':", "writing, software # distributed under the License is distributed on", "UI) -------------------------------------------------- This class can contains common functions which are", "you may not use this file except in compliance with", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "opener.add_handler(urllib.request.HTTPCookieProcessor(self.cj)) urllib.request.install_opener(opener) self.setforms() def setforms(self): if \"FW860\" in self.ver(): self.hrdwr", "in self.ver(): self.hrdwr = 'p8' self.frms = {'pwr': '59', 'dbg':", "# # OpenPOWER Automated Test Project # # Contributors Listed", "myurl.read().decode(\"utf-8\") def submit(self, form, param): param['CSRF_TOKEN'] = self.getcsrf(form) data =", "return True param = {'user': self.user_name, 'password': <PASSWORD>, 'login': 'Log", "trying a request. if \"Too many users\" in self.getpage(\"form=2\"): raise", "'30', 'clear': \"Clear all error/event log entries\", 'CSRF_TOKEN': ''} form", "License for the specific language governing # permissions and limitations", "= \"form=%s\" % self.frms['pwr'] return self.getpage(form) def start_debugvtty_session(self, partitionId='0', sessionId='0',", "# IBM_PROLOG_END_TAG ''' OpTestASM: Advanced System Management (FSP Web UI)", "users' logged in, # even though it actually doesn't. let's", "else: return '0' def getpage(self, form): myurl = urllib.request.urlopen(self.url+form, timeout=60)", "# permissions and limitations under the License. # # IBM_PROLOG_END_TAG", "'p8' self.frms = {'pwr': '59', 'dbg': '78', 'immpwroff': '32'} else:", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "count = 0 while count < 2: if not len(self.cj)", "'Log out', 'CSRF_TOKEN': ''} form = \"form=1\" self.submit(form, param) def", "# [+] International Business Machines Corp. # # # Licensed", "= \"form=1\" return self.getpage(form) def execommand(self, cmd): if not self.login():", "self.submit(form, param) msg = \"Login failed with user:{0} and password:{1}\".format(", "to login ASM page\") param = {'form': '30', 'clear': \"Clear", "has 'too many users' logged in, # even though it", "System Management (FSP Web UI) -------------------------------------------------- This class can contains", "'too many users' logged in, # even though it actually", "self.execommand('iptables -F') self.logout() def clearlogs(self): if not self.login(): raise OpTestError(\"Failed", "return myurl.read().decode(\"utf-8\") def submit(self, form, param): param['CSRF_TOKEN'] = self.getcsrf(form) data", "OpenPOWER Automated Test Project # # Contributors Listed Below -", "specific language governing # permissions and limitations under the License.", "import sys import subprocess from .OpTestConstants import OpTestConstants as BMC_CONST", "'submit': 'Save settings', 'CSRF_TOKEN': ''} form = \"form=56\" self.submit(form, param)", "def logout(self): param = {'submit': 'Log out', 'CSRF_TOKEN': ''} form", "param['CSRF_TOKEN'] = self.getcsrf(form) data = urllib.parse.urlencode(param).encode(\"utf-8\") req = urllib.request.Request(self.url+form, data)", "= {'pwr': '60', 'dbg': '79', 'immpwroff': '33'} def getcsrf(self, form):", "entries\", 'CSRF_TOKEN': ''} form = \"form=30\" self.submit(form, param) self.logout() def", "= \"form=16&frm=0\" self.submit(form, param) def disablefirewall(self): if not self.login(): raise", "ssl class OpTestASM: def __init__(self, i_fspIP, i_fspUser, i_fspPasswd): self.host_name =", "''' import time import subprocess import os import pexpect import", "'dbg': '79', 'immpwroff': '33'} def getcsrf(self, form): while True: try:", "def setforms(self): if \"FW860\" in self.ver(): self.hrdwr = 'p8' self.frms", "import http.cookiejar import urllib.request import urllib.parse import urllib.error import re", "class can contains common functions which are useful for FSP", "the License is distributed on an \"AS IS\" BASIS, #", "See the License for the specific language governing # permissions", "so we scrape it. ''' import time import subprocess import", "urllib.request.build_opener(urllib.request.HTTPSHandler(context=context)) opener.addheaders = [('User-agent', 'LTCTest')] opener.add_handler(urllib.request.HTTPCookieProcessor(self.cj)) urllib.request.install_opener(opener) self.setforms() def setforms(self):", "self.url = \"https://%s/cgi-bin/cgi?\" % self.host_name self.cj = http.cookiejar.CookieJar() context =", "len(self.cj) == 0: break # the login can quietly fail", "1 if count == 2: print(msg) return False return True", "+= 1 if count == 2: print(msg) return False return", "self.password) print(msg) count += 1 if count == 2: print(msg)", "codes), so we scrape it. ''' import time import subprocess", "= <PASSWORD> self.url = \"https://%s/cgi-bin/cgi?\" % self.host_name self.cj = http.cookiejar.CookieJar()", "self.getpage(\"form=2\"): raise OpTestError(\"FSP reports 'Too many users', FSP needs power", "the FSP has 'too many users' logged in, # even", "{'form': '81', 'p': partitionId, 's': sessionId, 't': sessionTimeout, 'Save settings':", "= {'form': '16', 'exe': 'Execute', 'CSRF_TOKEN': '', 'cmd': cmd} form", "i_fspUser self.password = <PASSWORD> self.url = \"https://%s/cgi-bin/cgi?\" % self.host_name self.cj", "opener.addheaders = [('User-agent', 'LTCTest')] opener.add_handler(urllib.request.HTTPCookieProcessor(self.cj)) urllib.request.install_opener(opener) self.setforms() def setforms(self): if", "'cmd': cmd} form = \"form=16&frm=0\" self.submit(form, param) def disablefirewall(self): if", "law or agreed to in writing, software # distributed under", "settings', 'CSRF_TOKEN': ''} form = \"form=81\" self.submit(form, param) self.logout() def", "\"form=16&frm=0\" self.submit(form, param) def disablefirewall(self): if not self.login(): raise OpTestError(\"Failed", "we scrape it. ''' import time import subprocess import os", "True: try: myurl = urllib.request.urlopen(self.url+form, timeout=10) except urllib.error.URLError: time.sleep(2) continue", "self.frms = {'pwr': '59', 'dbg': '78', 'immpwroff': '32'} else: self.hrdwr", "sessionTimeout, 'Save settings': 'Save settings', 'CSRF_TOKEN': ''} form = \"form=81\"", "True param = {'user': self.user_name, 'password': <PASSWORD>, 'login': 'Log in',", "which are useful for FSP ASM Web page. Some functionality", "if \"FW860\" in self.ver(): self.hrdwr = 'p8' self.frms = {'pwr':", "powerstat(self): form = \"form=%s\" % self.frms['pwr'] return self.getpage(form) def start_debugvtty_session(self,", "OpTestError(\"Failed to login ASM page\") param = {'form': '56', 'p':", "it. ''' import time import subprocess import os import pexpect", "time.sleep(2) continue break out = myurl.read().decode(\"utf-8\") if 'CSRF_TOKEN' in out:", "= urllib.request.build_opener(urllib.request.HTTPSHandler(context=context)) opener.addheaders = [('User-agent', 'LTCTest')] opener.add_handler(urllib.request.HTTPCookieProcessor(self.cj)) urllib.request.install_opener(opener) self.setforms() def", "even though it actually doesn't. let's check to see if", "param) msg = \"Login failed with user:{0} and password:{1}\".format( self.user_name,", "accessible through the FSP Web UI (such as progress codes),", "# by trying a request. if \"Too many users\" in", "cycle\") time.sleep(10) self.submit(form, param) msg = \"Login failed with user:{0}", "'t': sessionTimeout, 'Save settings': 'Save settings', 'CSRF_TOKEN': ''} form =", "= 'p7' self.frms = {'pwr': '60', 'dbg': '79', 'immpwroff': '33'}", "param): param['CSRF_TOKEN'] = self.getcsrf(form) data = urllib.parse.urlencode(param).encode(\"utf-8\") req = urllib.request.Request(self.url+form,", "quietly fail because the FSP has 'too many users' logged", "may obtain a copy of the License at # #", "raise OpTestError(\"Failed to login ASM page\") param = {'form': '81',", "if 'CSRF_TOKEN' in out: return re.findall('CSRF_TOKEN.*value=\\'(.*)\\'', out)[0] else: return '0'", "'password': <PASSWORD>, 'login': 'Log in', 'lang': '0', 'CSRF_TOKEN': ''} form", "useful for FSP ASM Web page. Some functionality is only", "Test Project # # Contributors Listed Below - COPYRIGHT 2017", "self.submit(form, param) count = 0 while count < 2: if", "break # the login can quietly fail because the FSP", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "automatically generated prolog. # # $Source: op-test-framework/common/OpTestASM.py $ # #", "self.cj = http.cookiejar.CookieJar() context = ssl.create_default_context() context.check_hostname = False context.verify_mode", "import urllib.parse import urllib.error import re import ssl class OpTestASM:", "time import subprocess import os import pexpect import sys import", "= \"form=81\" self.submit(form, param) self.logout() def enable_err_injct_policy(self): if not self.login():", "param = {'form': '56', 'p': '1', 'submit': 'Save settings', 'CSRF_TOKEN':", "getcsrf(self, form): while True: try: myurl = urllib.request.urlopen(self.url+form, timeout=10) except", "subprocess from .OpTestConstants import OpTestConstants as BMC_CONST from .OpTestError import", "urllib.request.urlopen(req) def login(self): if not len(self.cj) == 0: return True", "may not use this file except in compliance with the", "'CSRF_TOKEN': ''} form = \"form=81\" self.submit(form, param) self.logout() def enable_err_injct_policy(self):", "page\") param = {'form': '30', 'clear': \"Clear all error/event log", "'Save settings', 'CSRF_TOKEN': ''} form = \"form=56\" self.submit(form, param) self.logout()", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "= 0 while count < 2: if not len(self.cj) ==", "= [('User-agent', 'LTCTest')] opener.add_handler(urllib.request.HTTPCookieProcessor(self.cj)) urllib.request.install_opener(opener) self.setforms() def setforms(self): if \"FW860\"", "this file except in compliance with the License. # You", "urllib.parse import urllib.error import re import ssl class OpTestASM: def", "[('User-agent', 'LTCTest')] opener.add_handler(urllib.request.HTTPCookieProcessor(self.cj)) urllib.request.install_opener(opener) self.setforms() def setforms(self): if \"FW860\" in", "self.getcsrf(form) data = urllib.parse.urlencode(param).encode(\"utf-8\") req = urllib.request.Request(self.url+form, data) return urllib.request.urlopen(req)", "<PASSWORD>, 'login': 'Log in', 'lang': '0', 'CSRF_TOKEN': ''} form =", "= 'p8' self.frms = {'pwr': '59', 'dbg': '78', 'immpwroff': '32'}", "import re import ssl class OpTestASM: def __init__(self, i_fspIP, i_fspUser,", "i_fspPasswd): self.host_name = i_fspIP self.user_name = i_fspUser self.password = <PASSWORD>", "python3 # encoding=utf8 # IBM_PROLOG_BEGIN_TAG # This is an automatically", "UI (such as progress codes), so we scrape it. '''", "Contributors Listed Below - COPYRIGHT 2017 # [+] International Business", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "'79', 'immpwroff': '33'} def getcsrf(self, form): while True: try: myurl", "ssl.create_default_context() context.check_hostname = False context.verify_mode = ssl.CERT_NONE opener = urllib.request.build_opener(urllib.request.HTTPSHandler(context=context))", "# # Licensed under the Apache License, Version 2.0 (the", "re.findall('CSRF_TOKEN.*value=\\'(.*)\\'', out)[0] else: return '0' def getpage(self, form): myurl =", "page. Some functionality is only accessible through the FSP Web", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "data = urllib.parse.urlencode(param).encode(\"utf-8\") req = urllib.request.Request(self.url+form, data) return urllib.request.urlopen(req) def", "ASM page\") param = {'form': '56', 'p': '1', 'submit': 'Save", "= urllib.request.urlopen(self.url+form, timeout=10) except urllib.error.URLError: time.sleep(2) continue break out =", "OpTestError(\"Failed to login ASM page\") param = {'form': '30', 'clear':", "= self.getcsrf(form) data = urllib.parse.urlencode(param).encode(\"utf-8\") req = urllib.request.Request(self.url+form, data) return", "= ssl.CERT_NONE opener = urllib.request.build_opener(urllib.request.HTTPSHandler(context=context)) opener.addheaders = [('User-agent', 'LTCTest')] opener.add_handler(urllib.request.HTTPCookieProcessor(self.cj))", "'p7' self.frms = {'pwr': '60', 'dbg': '79', 'immpwroff': '33'} def", "while True: try: myurl = urllib.request.urlopen(self.url+form, timeout=10) except urllib.error.URLError: time.sleep(2)", "# This is an automatically generated prolog. # # $Source:", "class OpTestASM: def __init__(self, i_fspIP, i_fspUser, i_fspPasswd): self.host_name = i_fspIP", "see if this is the case # by trying a", "power cycle\") time.sleep(10) self.submit(form, param) msg = \"Login failed with", "FSP has 'too many users' logged in, # even though", "self.submit(form, param) self.logout() def powerstat(self): form = \"form=%s\" % self.frms['pwr']", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "def execommand(self, cmd): if not self.login(): raise OpTestError(\"Failed to login", "prolog. # # $Source: op-test-framework/common/OpTestASM.py $ # # OpenPOWER Automated", "subprocess import os import pexpect import sys import subprocess from", "logout(self): param = {'submit': 'Log out', 'CSRF_TOKEN': ''} form =", "form = \"form=30\" self.submit(form, param) self.logout() def powerstat(self): form =", "param) def disablefirewall(self): if not self.login(): raise OpTestError(\"Failed to login", "# # # Licensed under the Apache License, Version 2.0", "ASM page\") param = {'form': '30', 'clear': \"Clear all error/event", "def login(self): if not len(self.cj) == 0: return True param", "the login can quietly fail because the FSP has 'too", "$ # # OpenPOWER Automated Test Project # # Contributors", "OpTestASM: Advanced System Management (FSP Web UI) -------------------------------------------------- This class", "self.user_name, self.password) print(msg) count += 1 if count == 2:", "in', 'lang': '0', 'CSRF_TOKEN': ''} form = \"form=2\" resp =", "cmd} form = \"form=16&frm=0\" self.submit(form, param) def disablefirewall(self): if not", "in self.getpage(\"form=2\"): raise OpTestError(\"FSP reports 'Too many users', FSP needs", "self.frms['pwr'] return self.getpage(form) def start_debugvtty_session(self, partitionId='0', sessionId='0', sessionTimeout='600'): if not", "if this is the case # by trying a request.", "op-test-framework/common/OpTestASM.py $ # # OpenPOWER Automated Test Project # #", "= i_fspUser self.password = <PASSWORD> self.url = \"https://%s/cgi-bin/cgi?\" % self.host_name", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "cmd): if not self.login(): raise OpTestError(\"Failed to login ASM page\")", "{'user': self.user_name, 'password': <PASSWORD>, 'login': 'Log in', 'lang': '0', 'CSRF_TOKEN':", "= self.submit(form, param) count = 0 while count < 2:", "form): myurl = urllib.request.urlopen(self.url+form, timeout=60) return myurl.read().decode(\"utf-8\") def submit(self, form,", "{'pwr': '59', 'dbg': '78', 'immpwroff': '32'} else: self.hrdwr = 'p7'", "# the login can quietly fail because the FSP has", "under the License. # # IBM_PROLOG_END_TAG ''' OpTestASM: Advanced System", "# even though it actually doesn't. let's check to see", "'clear': \"Clear all error/event log entries\", 'CSRF_TOKEN': ''} form =", "to login ASM page\") self.execommand('iptables -F') self.logout() def clearlogs(self): if", "myurl = urllib.request.urlopen(self.url+form, timeout=60) return myurl.read().decode(\"utf-8\") def submit(self, form, param):", "Listed Below - COPYRIGHT 2017 # [+] International Business Machines", "if not self.login(): raise OpTestError(\"Failed to login ASM page\") self.execommand('iptables", "self.login(): raise OpTestError(\"Failed to login ASM page\") param = {'form':", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "KIND, either express or # implied. See the License for", "settings': 'Save settings', 'CSRF_TOKEN': ''} form = \"form=81\" self.submit(form, param)", "the FSP Web UI (such as progress codes), so we", "import subprocess import os import pexpect import sys import subprocess", "continue break out = myurl.read().decode(\"utf-8\") if 'CSRF_TOKEN' in out: return", "reports 'Too many users', FSP needs power cycle\") time.sleep(10) self.submit(form,", "context = ssl.create_default_context() context.check_hostname = False context.verify_mode = ssl.CERT_NONE opener", "not len(self.cj) == 0: break # the login can quietly", "if count == 2: print(msg) return False return True def", "return True def logout(self): param = {'submit': 'Log out', 'CSRF_TOKEN':", "ver(self): form = \"form=1\" return self.getpage(form) def execommand(self, cmd): if", "$Source: op-test-framework/common/OpTestASM.py $ # # OpenPOWER Automated Test Project #", "log entries\", 'CSRF_TOKEN': ''} form = \"form=30\" self.submit(form, param) self.logout()", "(the \"License\"); # you may not use this file except", "print(msg) return False return True def logout(self): param = {'submit':", "if not self.login(): raise OpTestError(\"Failed to login ASM page\") param", "urllib.request.urlopen(self.url+form, timeout=60) return myurl.read().decode(\"utf-8\") def submit(self, form, param): param['CSRF_TOKEN'] =", "'56', 'p': '1', 'submit': 'Save settings', 'CSRF_TOKEN': ''} form =", "# you may not use this file except in compliance", "= \"https://%s/cgi-bin/cgi?\" % self.host_name self.cj = http.cookiejar.CookieJar() context = ssl.create_default_context()", "pexpect import sys import subprocess from .OpTestConstants import OpTestConstants as", "FSP Web UI (such as progress codes), so we scrape", "sessionId='0', sessionTimeout='600'): if not self.login(): raise OpTestError(\"Failed to login ASM", "common functions which are useful for FSP ASM Web page.", "return self.getpage(form) def start_debugvtty_session(self, partitionId='0', sessionId='0', sessionTimeout='600'): if not self.login():", "if not len(self.cj) == 0: return True param = {'user':", "form, param): param['CSRF_TOKEN'] = self.getcsrf(form) data = urllib.parse.urlencode(param).encode(\"utf-8\") req =", "re import ssl class OpTestASM: def __init__(self, i_fspIP, i_fspUser, i_fspPasswd):", "= urllib.request.urlopen(self.url+form, timeout=60) return myurl.read().decode(\"utf-8\") def submit(self, form, param): param['CSRF_TOKEN']", "contains common functions which are useful for FSP ASM Web", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or #", "# # Unless required by applicable law or agreed to", "sys import subprocess from .OpTestConstants import OpTestConstants as BMC_CONST from", "to login ASM page\") param = {'form': '16', 'exe': 'Execute',", "login ASM page\") param = {'form': '30', 'clear': \"Clear all", "\"https://%s/cgi-bin/cgi?\" % self.host_name self.cj = http.cookiejar.CookieJar() context = ssl.create_default_context() context.check_hostname", "0: return True param = {'user': self.user_name, 'password': <PASSWORD>, 'login':", "Machines Corp. # # # Licensed under the Apache License,", "urllib.request.install_opener(opener) self.setforms() def setforms(self): if \"FW860\" in self.ver(): self.hrdwr =", "doesn't. let's check to see if this is the case", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "ASM page\") self.execommand('iptables -F') self.logout() def clearlogs(self): if not self.login():", "i_fspIP, i_fspUser, i_fspPasswd): self.host_name = i_fspIP self.user_name = i_fspUser self.password", "def clearlogs(self): if not self.login(): raise OpTestError(\"Failed to login ASM", "= {'form': '56', 'p': '1', 'submit': 'Save settings', 'CSRF_TOKEN': ''}", "[+] International Business Machines Corp. # # # Licensed under", "Version 2.0 (the \"License\"); # you may not use this", "through the FSP Web UI (such as progress codes), so", "import pexpect import sys import subprocess from .OpTestConstants import OpTestConstants", "for FSP ASM Web page. Some functionality is only accessible", "all error/event log entries\", 'CSRF_TOKEN': ''} form = \"form=30\" self.submit(form,", "len(self.cj) == 0: return True param = {'user': self.user_name, 'password':", "case # by trying a request. if \"Too many users\"", "timeout=10) except urllib.error.URLError: time.sleep(2) continue break out = myurl.read().decode(\"utf-8\") if", "\"form=2\" resp = self.submit(form, param) count = 0 while count", "under the Apache License, Version 2.0 (the \"License\"); # you", "return '0' def getpage(self, form): myurl = urllib.request.urlopen(self.url+form, timeout=60) return", "either express or # implied. See the License for the", "False return True def logout(self): param = {'submit': 'Log out',", "self.user_name = i_fspUser self.password = <PASSWORD> self.url = \"https://%s/cgi-bin/cgi?\" %", "Automated Test Project # # Contributors Listed Below - COPYRIGHT", "the case # by trying a request. if \"Too many", "\"form=81\" self.submit(form, param) self.logout() def enable_err_injct_policy(self): if not self.login(): raise", "self.submit(form, param) def ver(self): form = \"form=1\" return self.getpage(form) def", "page\") param = {'form': '81', 'p': partitionId, 's': sessionId, 't':", "in out: return re.findall('CSRF_TOKEN.*value=\\'(.*)\\'', out)[0] else: return '0' def getpage(self,", "urllib.request.urlopen(self.url+form, timeout=10) except urllib.error.URLError: time.sleep(2) continue break out = myurl.read().decode(\"utf-8\")", "by applicable law or agreed to in writing, software #", "to login ASM page\") param = {'form': '56', 'p': '1',", "= urllib.parse.urlencode(param).encode(\"utf-8\") req = urllib.request.Request(self.url+form, data) return urllib.request.urlopen(req) def login(self):", "return urllib.request.urlopen(req) def login(self): if not len(self.cj) == 0: return", "FSP ASM Web page. Some functionality is only accessible through", "'81', 'p': partitionId, 's': sessionId, 't': sessionTimeout, 'Save settings': 'Save", "''' OpTestASM: Advanced System Management (FSP Web UI) -------------------------------------------------- This", "'60', 'dbg': '79', 'immpwroff': '33'} def getcsrf(self, form): while True:", "OR CONDITIONS OF ANY KIND, either express or # implied.", "i_fspUser, i_fspPasswd): self.host_name = i_fspIP self.user_name = i_fspUser self.password =", "is only accessible through the FSP Web UI (such as", "sessionId, 't': sessionTimeout, 'Save settings': 'Save settings', 'CSRF_TOKEN': ''} form", "= False context.verify_mode = ssl.CERT_NONE opener = urllib.request.build_opener(urllib.request.HTTPSHandler(context=context)) opener.addheaders =", "execommand(self, cmd): if not self.login(): raise OpTestError(\"Failed to login ASM", "start_debugvtty_session(self, partitionId='0', sessionId='0', sessionTimeout='600'): if not self.login(): raise OpTestError(\"Failed to", "setforms(self): if \"FW860\" in self.ver(): self.hrdwr = 'p8' self.frms =", "the License for the specific language governing # permissions and", "raise OpTestError(\"Failed to login ASM page\") param = {'form': '30',", "import urllib.error import re import ssl class OpTestASM: def __init__(self,", "are useful for FSP ASM Web page. Some functionality is", "import subprocess from .OpTestConstants import OpTestConstants as BMC_CONST from .OpTestError", "not self.login(): raise OpTestError(\"Failed to login ASM page\") self.execommand('iptables -F')", "(such as progress codes), so we scrape it. ''' import", "= {'pwr': '59', 'dbg': '78', 'immpwroff': '32'} else: self.hrdwr =", "'LTCTest')] opener.add_handler(urllib.request.HTTPCookieProcessor(self.cj)) urllib.request.install_opener(opener) self.setforms() def setforms(self): if \"FW860\" in self.ver():", "'exe': 'Execute', 'CSRF_TOKEN': '', 'cmd': cmd} form = \"form=16&frm=0\" self.submit(form,", "# OpenPOWER Automated Test Project # # Contributors Listed Below", "is an automatically generated prolog. # # $Source: op-test-framework/common/OpTestASM.py $", "count == 2: print(msg) return False return True def logout(self):", "ssl.CERT_NONE opener = urllib.request.build_opener(urllib.request.HTTPSHandler(context=context)) opener.addheaders = [('User-agent', 'LTCTest')] opener.add_handler(urllib.request.HTTPCookieProcessor(self.cj)) urllib.request.install_opener(opener)", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "implied. See the License for the specific language governing #", "def getpage(self, form): myurl = urllib.request.urlopen(self.url+form, timeout=60) return myurl.read().decode(\"utf-8\") def", "2017 # [+] International Business Machines Corp. # # #", "Unless required by applicable law or agreed to in writing,", "self.user_name, 'password': <PASSWORD>, 'login': 'Log in', 'lang': '0', 'CSRF_TOKEN': ''}", "self.logout() def clearlogs(self): if not self.login(): raise OpTestError(\"Failed to login", "from .OpTestError import OpTestError import http.cookiejar import urllib.request import urllib.parse", "'Save settings': 'Save settings', 'CSRF_TOKEN': ''} form = \"form=81\" self.submit(form,", "timeout=60) return myurl.read().decode(\"utf-8\") def submit(self, form, param): param['CSRF_TOKEN'] = self.getcsrf(form)", "generated prolog. # # $Source: op-test-framework/common/OpTestASM.py $ # # OpenPOWER", "{'form': '16', 'exe': 'Execute', 'CSRF_TOKEN': '', 'cmd': cmd} form =", "import OpTestConstants as BMC_CONST from .OpTestError import OpTestError import http.cookiejar", "enable_err_injct_policy(self): if not self.login(): raise OpTestError(\"Failed to login ASM page\")", "'Too many users', FSP needs power cycle\") time.sleep(10) self.submit(form, param)", "applicable law or agreed to in writing, software # distributed", "import OpTestError import http.cookiejar import urllib.request import urllib.parse import urllib.error", "login can quietly fail because the FSP has 'too many", "\"FW860\" in self.ver(): self.hrdwr = 'p8' self.frms = {'pwr': '59',", "Business Machines Corp. # # # Licensed under the Apache", "users\" in self.getpage(\"form=2\"): raise OpTestError(\"FSP reports 'Too many users', FSP", "the specific language governing # permissions and limitations under the", "'immpwroff': '32'} else: self.hrdwr = 'p7' self.frms = {'pwr': '60',", "- COPYRIGHT 2017 # [+] International Business Machines Corp. #", "raise OpTestError(\"FSP reports 'Too many users', FSP needs power cycle\")", "= {'form': '30', 'clear': \"Clear all error/event log entries\", 'CSRF_TOKEN':", "'59', 'dbg': '78', 'immpwroff': '32'} else: self.hrdwr = 'p7' self.frms", "progress codes), so we scrape it. ''' import time import", "in writing, software # distributed under the License is distributed", "{'form': '30', 'clear': \"Clear all error/event log entries\", 'CSRF_TOKEN': ''}", "Some functionality is only accessible through the FSP Web UI", "param) def ver(self): form = \"form=1\" return self.getpage(form) def execommand(self,", "needs power cycle\") time.sleep(10) self.submit(form, param) msg = \"Login failed", "for the specific language governing # permissions and limitations under", "= {'form': '81', 'p': partitionId, 's': sessionId, 't': sessionTimeout, 'Save", "param) self.logout() def enable_err_injct_policy(self): if not self.login(): raise OpTestError(\"Failed to", "return False return True def logout(self): param = {'submit': 'Log", "def submit(self, form, param): param['CSRF_TOKEN'] = self.getcsrf(form) data = urllib.parse.urlencode(param).encode(\"utf-8\")", "submit(self, form, param): param['CSRF_TOKEN'] = self.getcsrf(form) data = urllib.parse.urlencode(param).encode(\"utf-8\") req", "Web UI) -------------------------------------------------- This class can contains common functions which", "OpTestConstants as BMC_CONST from .OpTestError import OpTestError import http.cookiejar import", "i_fspIP self.user_name = i_fspUser self.password = <PASSWORD> self.url = \"https://%s/cgi-bin/cgi?\"", "login(self): if not len(self.cj) == 0: return True param =", "# IBM_PROLOG_BEGIN_TAG # This is an automatically generated prolog. #", "form = \"form=1\" self.submit(form, param) def ver(self): form = \"form=1\"", "ANY KIND, either express or # implied. See the License", "(FSP Web UI) -------------------------------------------------- This class can contains common functions", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "License, Version 2.0 (the \"License\"); # you may not use", "'Save settings', 'CSRF_TOKEN': ''} form = \"form=81\" self.submit(form, param) self.logout()", "# You may obtain a copy of the License at", "param = {'submit': 'Log out', 'CSRF_TOKEN': ''} form = \"form=1\"", "ASM page\") param = {'form': '81', 'p': partitionId, 's': sessionId,", "Management (FSP Web UI) -------------------------------------------------- This class can contains common", "print(msg) count += 1 if count == 2: print(msg) return", "#!/usr/bin/env python3 # encoding=utf8 # IBM_PROLOG_BEGIN_TAG # This is an", "= \"form=30\" self.submit(form, param) self.logout() def powerstat(self): form = \"form=%s\"", "= http.cookiejar.CookieJar() context = ssl.create_default_context() context.check_hostname = False context.verify_mode =", "'login': 'Log in', 'lang': '0', 'CSRF_TOKEN': ''} form = \"form=2\"", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "OpTestError(\"FSP reports 'Too many users', FSP needs power cycle\") time.sleep(10)", "many users', FSP needs power cycle\") time.sleep(10) self.submit(form, param) msg", ".OpTestConstants import OpTestConstants as BMC_CONST from .OpTestError import OpTestError import", "return self.getpage(form) def execommand(self, cmd): if not self.login(): raise OpTestError(\"Failed", "\"Login failed with user:{0} and password:{1}\".format( self.user_name, self.password) print(msg) count", "http.cookiejar.CookieJar() context = ssl.create_default_context() context.check_hostname = False context.verify_mode = ssl.CERT_NONE", "disablefirewall(self): if not self.login(): raise OpTestError(\"Failed to login ASM page\")", "though it actually doesn't. let's check to see if this", "''} form = \"form=30\" self.submit(form, param) self.logout() def powerstat(self): form", "'p': partitionId, 's': sessionId, 't': sessionTimeout, 'Save settings': 'Save settings',", "= \"form=1\" self.submit(form, param) def ver(self): form = \"form=1\" return", "Apache License, Version 2.0 (the \"License\"); # you may not", "'CSRF_TOKEN': ''} form = \"form=2\" resp = self.submit(form, param) count", "= ssl.create_default_context() context.check_hostname = False context.verify_mode = ssl.CERT_NONE opener =", "''} form = \"form=2\" resp = self.submit(form, param) count =", "# # $Source: op-test-framework/common/OpTestASM.py $ # # OpenPOWER Automated Test", "context.verify_mode = ssl.CERT_NONE opener = urllib.request.build_opener(urllib.request.HTTPSHandler(context=context)) opener.addheaders = [('User-agent', 'LTCTest')]", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "form = \"form=1\" return self.getpage(form) def execommand(self, cmd): if not", "sessionTimeout='600'): if not self.login(): raise OpTestError(\"Failed to login ASM page\")", "{'pwr': '60', 'dbg': '79', 'immpwroff': '33'} def getcsrf(self, form): while", "param = {'form': '30', 'clear': \"Clear all error/event log entries\",", "'s': sessionId, 't': sessionTimeout, 'Save settings': 'Save settings', 'CSRF_TOKEN': ''}", "Web page. Some functionality is only accessible through the FSP", "governing # permissions and limitations under the License. # #", "'dbg': '78', 'immpwroff': '32'} else: self.hrdwr = 'p7' self.frms =", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "'lang': '0', 'CSRF_TOKEN': ''} form = \"form=2\" resp = self.submit(form,", "OpTestError(\"Failed to login ASM page\") param = {'form': '81', 'p':", "return re.findall('CSRF_TOKEN.*value=\\'(.*)\\'', out)[0] else: return '0' def getpage(self, form): myurl", "while count < 2: if not len(self.cj) == 0: break", "def disablefirewall(self): if not self.login(): raise OpTestError(\"Failed to login ASM", "'0', 'CSRF_TOKEN': ''} form = \"form=2\" resp = self.submit(form, param)", "logged in, # even though it actually doesn't. let's check", "param) self.logout() def powerstat(self): form = \"form=%s\" % self.frms['pwr'] return", "login ASM page\") param = {'form': '56', 'p': '1', 'submit':", "else: self.hrdwr = 'p7' self.frms = {'pwr': '60', 'dbg': '79',", "self.submit(form, param) def disablefirewall(self): if not self.login(): raise OpTestError(\"Failed to", "functionality is only accessible through the FSP Web UI (such", "login ASM page\") self.execommand('iptables -F') self.logout() def clearlogs(self): if not", "Below - COPYRIGHT 2017 # [+] International Business Machines Corp.", "= \"form=2\" resp = self.submit(form, param) count = 0 while", "resp = self.submit(form, param) count = 0 while count <", "{'form': '56', 'p': '1', 'submit': 'Save settings', 'CSRF_TOKEN': ''} form", "the License. # # IBM_PROLOG_END_TAG ''' OpTestASM: Advanced System Management", "urllib.parse.urlencode(param).encode(\"utf-8\") req = urllib.request.Request(self.url+form, data) return urllib.request.urlopen(req) def login(self): if", "\"License\"); # you may not use this file except in", "self.password = <PASSWORD> self.url = \"https://%s/cgi-bin/cgi?\" % self.host_name self.cj =", "{'submit': 'Log out', 'CSRF_TOKEN': ''} form = \"form=1\" self.submit(form, param)", "data) return urllib.request.urlopen(req) def login(self): if not len(self.cj) == 0:", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "2: if not len(self.cj) == 0: break # the login", "'CSRF_TOKEN' in out: return re.findall('CSRF_TOKEN.*value=\\'(.*)\\'', out)[0] else: return '0' def", "# # Contributors Listed Below - COPYRIGHT 2017 # [+]", "by trying a request. if \"Too many users\" in self.getpage(\"form=2\"):", "OpTestASM: def __init__(self, i_fspIP, i_fspUser, i_fspPasswd): self.host_name = i_fspIP self.user_name", "self.hrdwr = 'p8' self.frms = {'pwr': '59', 'dbg': '78', 'immpwroff':", "# distributed under the License is distributed on an \"AS", "Advanced System Management (FSP Web UI) -------------------------------------------------- This class can", "# Unless required by applicable law or agreed to in", "= {'submit': 'Log out', 'CSRF_TOKEN': ''} form = \"form=1\" self.submit(form,", "# encoding=utf8 # IBM_PROLOG_BEGIN_TAG # This is an automatically generated", "== 0: break # the login can quietly fail because", "form = \"form=16&frm=0\" self.submit(form, param) def disablefirewall(self): if not self.login():", "Web UI (such as progress codes), so we scrape it.", "% self.frms['pwr'] return self.getpage(form) def start_debugvtty_session(self, partitionId='0', sessionId='0', sessionTimeout='600'): if", "# $Source: op-test-framework/common/OpTestASM.py $ # # OpenPOWER Automated Test Project", "# implied. See the License for the specific language governing", "failed with user:{0} and password:{1}\".format( self.user_name, self.password) print(msg) count +=", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "language governing # permissions and limitations under the License. #", "a request. if \"Too many users\" in self.getpage(\"form=2\"): raise OpTestError(\"FSP", "OpTestError(\"Failed to login ASM page\") self.execommand('iptables -F') self.logout() def clearlogs(self):", "error/event log entries\", 'CSRF_TOKEN': ''} form = \"form=30\" self.submit(form, param)", "< 2: if not len(self.cj) == 0: break # the", "'1', 'submit': 'Save settings', 'CSRF_TOKEN': ''} form = \"form=56\" self.submit(form,", "\"form=%s\" % self.frms['pwr'] return self.getpage(form) def start_debugvtty_session(self, partitionId='0', sessionId='0', sessionTimeout='600'):", "password:{1}\".format( self.user_name, self.password) print(msg) count += 1 if count ==", "os import pexpect import sys import subprocess from .OpTestConstants import", "You may obtain a copy of the License at #", "import os import pexpect import sys import subprocess from .OpTestConstants", "def powerstat(self): form = \"form=%s\" % self.frms['pwr'] return self.getpage(form) def", "= myurl.read().decode(\"utf-8\") if 'CSRF_TOKEN' in out: return re.findall('CSRF_TOKEN.*value=\\'(.*)\\'', out)[0] else:", "as progress codes), so we scrape it. ''' import time", "and password:{1}\".format( self.user_name, self.password) print(msg) count += 1 if count", "urllib.request.Request(self.url+form, data) return urllib.request.urlopen(req) def login(self): if not len(self.cj) ==", "or # implied. See the License for the specific language", "COPYRIGHT 2017 # [+] International Business Machines Corp. # #", "break out = myurl.read().decode(\"utf-8\") if 'CSRF_TOKEN' in out: return re.findall('CSRF_TOKEN.*value=\\'(.*)\\'',", "the Apache License, Version 2.0 (the \"License\"); # you may", "__init__(self, i_fspIP, i_fspUser, i_fspPasswd): self.host_name = i_fspIP self.user_name = i_fspUser", "'CSRF_TOKEN': ''} form = \"form=1\" self.submit(form, param) def ver(self): form", "False context.verify_mode = ssl.CERT_NONE opener = urllib.request.build_opener(urllib.request.HTTPSHandler(context=context)) opener.addheaders = [('User-agent'," ]
[ "import storage from modules.service import Service from modules.database import Database", "self.database.add_service(Service()) self.database.name = \"Hey\" storage.write(\"test\", self.database, \"test.db\") database2 = Database()", "__author__ = \"<NAME>\" import sys import os sys.path.append(\"..\") import unittest", "Service() storage.read(\"test\", service2, \"test.service\") self.assertEqual(service2.service_name, self.service.service_name) self.assertEqual(service2.username, self.service.username) self.assertEqual(service2.password, self.service.password)", "\"This\" self.service.password = \"<PASSWORD>\" storage.write(\"test\", self.service, \"test.service\") service2 = Service()", "self.database = Database() open(\"test.service\", \"w+\").close() open(\"test.db\", \"w+\").close() def test_write_read_service(self): self.service.service_name", "Database() storage.read(\"test\", database2, \"test.db\") self.assertEqual(database2.name, self.database.name) for i in range(len(self.database.services)):", "self.service.password = \"<PASSWORD>\" storage.write(\"test\", self.service, \"test.service\") service2 = Service() storage.read(\"test\",", "self.assertEqual(service2.service_name, self.service.service_name) self.assertEqual(service2.username, self.service.username) self.assertEqual(service2.password, self.service.password) def test_write_read_database(self): self.database.add_service(Service()) self.database.add_service(Service())", "import sys import os sys.path.append(\"..\") import unittest from modules import", "modules.database import Database class TestStorage(unittest.TestCase): def setUp(self): self.service = Service()", "self.service, \"test.service\") service2 = Service() storage.read(\"test\", service2, \"test.service\") self.assertEqual(service2.service_name, self.service.service_name)", "\"w+\").close() open(\"test.db\", \"w+\").close() def test_write_read_service(self): self.service.service_name = \"Hello\" self.service.username =", "open(\"test.service\", \"w+\").close() open(\"test.db\", \"w+\").close() def test_write_read_service(self): self.service.service_name = \"Hello\" self.service.username", "test_write_read_database(self): self.database.add_service(Service()) self.database.add_service(Service()) self.database.name = \"Hey\" storage.write(\"test\", self.database, \"test.db\") database2", "self.assertEqual(database2.name, self.database.name) for i in range(len(self.database.services)): self.assertEqual(database2.services[i].service_name, self.database.services[i].service_name) self.assertEqual(database2.services[i].username, self.database.services[i].username)", "self.service.username) self.assertEqual(service2.password, self.service.password) def test_write_read_database(self): self.database.add_service(Service()) self.database.add_service(Service()) self.database.name = \"Hey\"", "#!/usr/bin/python3.4 __author__ = \"<NAME>\" import sys import os sys.path.append(\"..\") import", "Database class TestStorage(unittest.TestCase): def setUp(self): self.service = Service() self.database =", "from modules.database import Database class TestStorage(unittest.TestCase): def setUp(self): self.service =", "from modules.service import Service from modules.database import Database class TestStorage(unittest.TestCase):", "import Database class TestStorage(unittest.TestCase): def setUp(self): self.service = Service() self.database", "service2 = Service() storage.read(\"test\", service2, \"test.service\") self.assertEqual(service2.service_name, self.service.service_name) self.assertEqual(service2.username, self.service.username)", "= Database() storage.read(\"test\", database2, \"test.db\") self.assertEqual(database2.name, self.database.name) for i in", "self.assertEqual(service2.password, self.service.password) def test_write_read_database(self): self.database.add_service(Service()) self.database.add_service(Service()) self.database.name = \"Hey\" storage.write(\"test\",", "= \"Hey\" storage.write(\"test\", self.database, \"test.db\") database2 = Database() storage.read(\"test\", database2,", "self.assertEqual(database2.services[i].service_name, self.database.services[i].service_name) self.assertEqual(database2.services[i].username, self.database.services[i].username) self.assertEqual(database2.services[i].password, self.database.services[i].password) def tearDown(self): os.remove(os.getcwd() +", "unittest from modules import storage from modules.service import Service from", "self.service.service_name) self.assertEqual(service2.username, self.service.username) self.assertEqual(service2.password, self.service.password) def test_write_read_database(self): self.database.add_service(Service()) self.database.add_service(Service()) self.database.name", "self.database.name = \"Hey\" storage.write(\"test\", self.database, \"test.db\") database2 = Database() storage.read(\"test\",", "\"<PASSWORD>\" storage.write(\"test\", self.service, \"test.service\") service2 = Service() storage.read(\"test\", service2, \"test.service\")", "self.database.services[i].service_name) self.assertEqual(database2.services[i].username, self.database.services[i].username) self.assertEqual(database2.services[i].password, self.database.services[i].password) def tearDown(self): os.remove(os.getcwd() + \"/test.service\")", "Service from modules.database import Database class TestStorage(unittest.TestCase): def setUp(self): self.service", "def tearDown(self): os.remove(os.getcwd() + \"/test.service\") os.remove(os.getcwd() + \"/test.db\") if __name__", "= \"<NAME>\" import sys import os sys.path.append(\"..\") import unittest from", "Service() self.database = Database() open(\"test.service\", \"w+\").close() open(\"test.db\", \"w+\").close() def test_write_read_service(self):", "sys.path.append(\"..\") import unittest from modules import storage from modules.service import", "= \"Hello\" self.service.username = \"This\" self.service.password = \"<PASSWORD>\" storage.write(\"test\", self.service,", "for i in range(len(self.database.services)): self.assertEqual(database2.services[i].service_name, self.database.services[i].service_name) self.assertEqual(database2.services[i].username, self.database.services[i].username) self.assertEqual(database2.services[i].password, self.database.services[i].password)", "import Service from modules.database import Database class TestStorage(unittest.TestCase): def setUp(self):", "class TestStorage(unittest.TestCase): def setUp(self): self.service = Service() self.database = Database()", "os sys.path.append(\"..\") import unittest from modules import storage from modules.service", "service2, \"test.service\") self.assertEqual(service2.service_name, self.service.service_name) self.assertEqual(service2.username, self.service.username) self.assertEqual(service2.password, self.service.password) def test_write_read_database(self):", "self.assertEqual(database2.services[i].password, self.database.services[i].password) def tearDown(self): os.remove(os.getcwd() + \"/test.service\") os.remove(os.getcwd() + \"/test.db\")", "self.database.services[i].password) def tearDown(self): os.remove(os.getcwd() + \"/test.service\") os.remove(os.getcwd() + \"/test.db\") if", "= \"<PASSWORD>\" storage.write(\"test\", self.service, \"test.service\") service2 = Service() storage.read(\"test\", service2,", "\"test.service\") self.assertEqual(service2.service_name, self.service.service_name) self.assertEqual(service2.username, self.service.username) self.assertEqual(service2.password, self.service.password) def test_write_read_database(self): self.database.add_service(Service())", "\"test.db\") database2 = Database() storage.read(\"test\", database2, \"test.db\") self.assertEqual(database2.name, self.database.name) for", "def test_write_read_database(self): self.database.add_service(Service()) self.database.add_service(Service()) self.database.name = \"Hey\" storage.write(\"test\", self.database, \"test.db\")", "test_write_read_service(self): self.service.service_name = \"Hello\" self.service.username = \"This\" self.service.password = \"<PASSWORD>\"", "modules.service import Service from modules.database import Database class TestStorage(unittest.TestCase): def", "= Database() open(\"test.service\", \"w+\").close() open(\"test.db\", \"w+\").close() def test_write_read_service(self): self.service.service_name =", "self.service.username = \"This\" self.service.password = \"<PASSWORD>\" storage.write(\"test\", self.service, \"test.service\") service2", "self.assertEqual(service2.username, self.service.username) self.assertEqual(service2.password, self.service.password) def test_write_read_database(self): self.database.add_service(Service()) self.database.add_service(Service()) self.database.name =", "in range(len(self.database.services)): self.assertEqual(database2.services[i].service_name, self.database.services[i].service_name) self.assertEqual(database2.services[i].username, self.database.services[i].username) self.assertEqual(database2.services[i].password, self.database.services[i].password) def tearDown(self):", "tearDown(self): os.remove(os.getcwd() + \"/test.service\") os.remove(os.getcwd() + \"/test.db\") if __name__ ==", "import unittest from modules import storage from modules.service import Service", "self.service.password) def test_write_read_database(self): self.database.add_service(Service()) self.database.add_service(Service()) self.database.name = \"Hey\" storage.write(\"test\", self.database,", "storage.read(\"test\", database2, \"test.db\") self.assertEqual(database2.name, self.database.name) for i in range(len(self.database.services)): self.assertEqual(database2.services[i].service_name,", "storage.write(\"test\", self.database, \"test.db\") database2 = Database() storage.read(\"test\", database2, \"test.db\") self.assertEqual(database2.name,", "= Service() self.database = Database() open(\"test.service\", \"w+\").close() open(\"test.db\", \"w+\").close() def", "self.database, \"test.db\") database2 = Database() storage.read(\"test\", database2, \"test.db\") self.assertEqual(database2.name, self.database.name)", "\"<NAME>\" import sys import os sys.path.append(\"..\") import unittest from modules", "TestStorage(unittest.TestCase): def setUp(self): self.service = Service() self.database = Database() open(\"test.service\",", "setUp(self): self.service = Service() self.database = Database() open(\"test.service\", \"w+\").close() open(\"test.db\",", "\"test.service\") service2 = Service() storage.read(\"test\", service2, \"test.service\") self.assertEqual(service2.service_name, self.service.service_name) self.assertEqual(service2.username,", "from modules import storage from modules.service import Service from modules.database", "range(len(self.database.services)): self.assertEqual(database2.services[i].service_name, self.database.services[i].service_name) self.assertEqual(database2.services[i].username, self.database.services[i].username) self.assertEqual(database2.services[i].password, self.database.services[i].password) def tearDown(self): os.remove(os.getcwd()", "self.service = Service() self.database = Database() open(\"test.service\", \"w+\").close() open(\"test.db\", \"w+\").close()", "i in range(len(self.database.services)): self.assertEqual(database2.services[i].service_name, self.database.services[i].service_name) self.assertEqual(database2.services[i].username, self.database.services[i].username) self.assertEqual(database2.services[i].password, self.database.services[i].password) def", "modules import storage from modules.service import Service from modules.database import", "import os sys.path.append(\"..\") import unittest from modules import storage from", "self.service.service_name = \"Hello\" self.service.username = \"This\" self.service.password = \"<PASSWORD>\" storage.write(\"test\",", "storage.write(\"test\", self.service, \"test.service\") service2 = Service() storage.read(\"test\", service2, \"test.service\") self.assertEqual(service2.service_name,", "self.database.services[i].username) self.assertEqual(database2.services[i].password, self.database.services[i].password) def tearDown(self): os.remove(os.getcwd() + \"/test.service\") os.remove(os.getcwd() +", "database2, \"test.db\") self.assertEqual(database2.name, self.database.name) for i in range(len(self.database.services)): self.assertEqual(database2.services[i].service_name, self.database.services[i].service_name)", "= \"This\" self.service.password = \"<PASSWORD>\" storage.write(\"test\", self.service, \"test.service\") service2 =", "def test_write_read_service(self): self.service.service_name = \"Hello\" self.service.username = \"This\" self.service.password =", "def setUp(self): self.service = Service() self.database = Database() open(\"test.service\", \"w+\").close()", "self.database.name) for i in range(len(self.database.services)): self.assertEqual(database2.services[i].service_name, self.database.services[i].service_name) self.assertEqual(database2.services[i].username, self.database.services[i].username) self.assertEqual(database2.services[i].password,", "+ \"/test.service\") os.remove(os.getcwd() + \"/test.db\") if __name__ == \"__main__\": unittest.main()", "database2 = Database() storage.read(\"test\", database2, \"test.db\") self.assertEqual(database2.name, self.database.name) for i", "= Service() storage.read(\"test\", service2, \"test.service\") self.assertEqual(service2.service_name, self.service.service_name) self.assertEqual(service2.username, self.service.username) self.assertEqual(service2.password,", "self.database.add_service(Service()) self.database.add_service(Service()) self.database.name = \"Hey\" storage.write(\"test\", self.database, \"test.db\") database2 =", "\"Hey\" storage.write(\"test\", self.database, \"test.db\") database2 = Database() storage.read(\"test\", database2, \"test.db\")", "self.assertEqual(database2.services[i].username, self.database.services[i].username) self.assertEqual(database2.services[i].password, self.database.services[i].password) def tearDown(self): os.remove(os.getcwd() + \"/test.service\") os.remove(os.getcwd()", "open(\"test.db\", \"w+\").close() def test_write_read_service(self): self.service.service_name = \"Hello\" self.service.username = \"This\"", "\"test.db\") self.assertEqual(database2.name, self.database.name) for i in range(len(self.database.services)): self.assertEqual(database2.services[i].service_name, self.database.services[i].service_name) self.assertEqual(database2.services[i].username,", "sys import os sys.path.append(\"..\") import unittest from modules import storage", "\"w+\").close() def test_write_read_service(self): self.service.service_name = \"Hello\" self.service.username = \"This\" self.service.password", "Database() open(\"test.service\", \"w+\").close() open(\"test.db\", \"w+\").close() def test_write_read_service(self): self.service.service_name = \"Hello\"", "storage from modules.service import Service from modules.database import Database class", "os.remove(os.getcwd() + \"/test.service\") os.remove(os.getcwd() + \"/test.db\") if __name__ == \"__main__\":", "storage.read(\"test\", service2, \"test.service\") self.assertEqual(service2.service_name, self.service.service_name) self.assertEqual(service2.username, self.service.username) self.assertEqual(service2.password, self.service.password) def", "\"Hello\" self.service.username = \"This\" self.service.password = \"<PASSWORD>\" storage.write(\"test\", self.service, \"test.service\")" ]
[ "vars.update(lookup_template_vars) self._templar.set_available_variables(vars) # do the templating res = self._templar.template(template_data, preserve_trailing_newlines=True,", "in # addition to our original search paths. newsearchpath =", "print statement. default: '{{' version_added: '2.8' type: str variable_end_string: description:", "# set jinja2 internal search path for includes searchpath =", "templating results debug: msg: \"{{ lookup('template', './some_template.j2') }}\" - name:", "import to_bytes, to_text from ansible.template import generate_ansible_template_vars from ansible.utils.display import", "'templates' subdir of each search path in # addition to", "contents of file after templating with Jinja2 description: - Returns", "\"0.9\" short_description: retrieve contents of file after templating with Jinja2", "_raw: description: file(s) content after templating \"\"\" import os from", "variable_end_string # The template will have access to all existing", "from ansible.plugins.lookup import LookupBase from ansible.module_utils._text import to_bytes, to_text from", "paths. newsearchpath = [] for p in searchpath: newsearchpath.append(os.path.join(p, 'templates'))", "variable_start_string: description: The string marking the beginning of a print", "to_text(b_template_data, errors='surrogate_or_strict') # set jinja2 internal search path for includes", "description: list of files to template convert_data: type: bool description:", "# Copyright: (c) 2012, <NAME> <<EMAIL>> # Copyright: (c) 2012-17,", "\"\"\" lookup: template author: <NAME> <<EMAIL>> version_added: \"0.9\" short_description: retrieve", "= to_text(b_template_data, errors='surrogate_or_strict') # set jinja2 internal search path for", "# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)", "template_vars= # argument. vars = variables.copy() vars.update(generate_ansible_template_vars(lookupfile)) vars.update(lookup_template_vars) self._templar.set_available_variables(vars) #", "name: show templating results with different variable start and end", "\"{{ lookup('template', './some_template.j2') }}\" - name: show templating results with", "term in terms: display.debug(\"File lookup term: %s\" % term) lookupfile", "variables, # plus some added by ansible (e.g., template_{path,mtime}), #", "# plus some added by ansible (e.g., template_{path,mtime}), # plus", "import Display display = Display() class LookupModule(LookupBase): def run(self, terms,", "print statement. default: '}}' version_added: '2.8' type: str \"\"\" EXAMPLES", "Display() class LookupModule(LookupBase): def run(self, terms, variables, **kwargs): convert_data_p =", "'2.8' type: str \"\"\" EXAMPLES = \"\"\" - name: show", "msg: \"{{ lookup('template', './some_template.j2', variable_start_string='[%', variable_end_string='%]') }}\" \"\"\" RETURN =", "If False, strings that are YAML will be left untouched.", "start and end string debug: msg: \"{{ lookup('template', './some_template.j2', variable_start_string='[%',", "search path for includes searchpath = variables.get('ansible_search_path', []) if searchpath:", "variables.get('ansible_search_path', []) if searchpath: # our search paths aren't actually", "in, returns a string containing the results of processing that", "file\" % lookupfile) if lookupfile: b_template_data, show_data = self._loader._get_file_contents(lookupfile) template_data", "LookupModule(LookupBase): def run(self, terms, variables, **kwargs): convert_data_p = kwargs.get('convert_data', True)", "vars.update(generate_ansible_template_vars(lookupfile)) vars.update(lookup_template_vars) self._templar.set_available_variables(vars) # do the templating res = self._templar.template(template_data,", "os.path.dirname(lookupfile)) self._templar.environment.loader.searchpath = searchpath if variable_start_string is not None: self._templar.environment.variable_start_string", "of files to template convert_data: type: bool description: whether to", "Display display = Display() class LookupModule(LookupBase): def run(self, terms, variables,", "{}) ret = [] variable_start_string = kwargs.get('variable_start_string', None) variable_end_string =", "newsearchpath.append(os.path.join(p, 'templates')) newsearchpath.append(p) searchpath = newsearchpath searchpath.insert(0, os.path.dirname(lookupfile)) self._templar.environment.loader.searchpath =", "RETURN = \"\"\" _raw: description: file(s) content after templating \"\"\"", "res = self._templar.template(template_data, preserve_trailing_newlines=True, convert_data=convert_data_p, escape_backslashes=False) ret.append(res) else: raise AnsibleError(\"the", "division, print_function) __metaclass__ = type DOCUMENTATION = \"\"\" lookup: template", "in terms: display.debug(\"File lookup term: %s\" % term) lookupfile =", "searchpath: newsearchpath.append(os.path.join(p, 'templates')) newsearchpath.append(p) searchpath = newsearchpath searchpath.insert(0, os.path.dirname(lookupfile)) self._templar.environment.loader.searchpath", "our search paths aren't actually the proper ones for jinja", "Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import", "list of files to template convert_data: type: bool description: whether", "<<EMAIL>> # Copyright: (c) 2012-17, Ansible Project # GNU General", "the list of templates you pass in, returns a string", "if lookupfile: b_template_data, show_data = self._loader._get_file_contents(lookupfile) template_data = to_text(b_template_data, errors='surrogate_or_strict')", "% lookupfile) if lookupfile: b_template_data, show_data = self._loader._get_file_contents(lookupfile) template_data =", "General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__", "after templating with Jinja2 description: - Returns a list of", "convert_data: type: bool description: whether to convert YAML into data.", "lookup: template author: <NAME> <<EMAIL>> version_added: \"0.9\" short_description: retrieve contents", "= [] for p in searchpath: newsearchpath.append(os.path.join(p, 'templates')) newsearchpath.append(p) searchpath", "ansible.utils.display import Display display = Display() class LookupModule(LookupBase): def run(self,", "= kwargs.get('convert_data', True) lookup_template_vars = kwargs.get('template_vars', {}) ret = []", "version_added: '2.8' type: str variable_end_string: description: The string marking the", "= self._loader._get_file_contents(lookupfile) template_data = to_text(b_template_data, errors='surrogate_or_strict') # set jinja2 internal", "show templating results with different variable start and end string", "import os from ansible.errors import AnsibleError from ansible.plugins.lookup import LookupBase", "# addition to our original search paths. newsearchpath = []", "debug: msg: \"{{ lookup('template', './some_template.j2', variable_start_string='[%', variable_end_string='%]') }}\" \"\"\" RETURN", "strings; for each template in the list of templates you", "template_data = to_text(b_template_data, errors='surrogate_or_strict') # set jinja2 internal search path", "templates you pass in, returns a string containing the results", "variable_end_string is not None: self._templar.environment.variable_end_string = variable_end_string # The template", "addition to our original search paths. newsearchpath = [] for", "untouched. variable_start_string: description: The string marking the beginning of a", "proper ones for jinja includes. # We want to search", "# do the templating res = self._templar.template(template_data, preserve_trailing_newlines=True, convert_data=convert_data_p, escape_backslashes=False)", "msg: \"{{ lookup('template', './some_template.j2') }}\" - name: show templating results", "the lookup with the template_vars= # argument. vars = variables.copy()", "'./some_template.j2', variable_start_string='[%', variable_end_string='%]') }}\" \"\"\" RETURN = \"\"\" _raw: description:", "lookup('template', './some_template.j2') }}\" - name: show templating results with different", "LookupBase from ansible.module_utils._text import to_bytes, to_text from ansible.template import generate_ansible_template_vars", "processing that template. options: _terms: description: list of files to", "= \"\"\" _raw: description: file(s) content after templating \"\"\" import", "lookup_template_vars = kwargs.get('template_vars', {}) ret = [] variable_start_string = kwargs.get('variable_start_string',", "different variable start and end string debug: msg: \"{{ lookup('template',", "= variable_end_string # The template will have access to all", "run(self, terms, variables, **kwargs): convert_data_p = kwargs.get('convert_data', True) lookup_template_vars =", "COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import (absolute_import, division, print_function) __metaclass__", "newsearchpath = [] for p in searchpath: newsearchpath.append(os.path.join(p, 'templates')) newsearchpath.append(p)", "Copyright: (c) 2012-17, Ansible Project # GNU General Public License", "the templating res = self._templar.template(template_data, preserve_trailing_newlines=True, convert_data=convert_data_p, escape_backslashes=False) ret.append(res) else:", "options: _terms: description: list of files to template convert_data: type:", "generate_ansible_template_vars from ansible.utils.display import Display display = Display() class LookupModule(LookupBase):", "of strings; for each template in the list of templates", "newsearchpath searchpath.insert(0, os.path.dirname(lookupfile)) self._templar.environment.loader.searchpath = searchpath if variable_start_string is not", "description: The string marking the end of a print statement.", "data. If False, strings that are YAML will be left", "a print statement. default: '}}' version_added: '2.8' type: str \"\"\"", "= \"\"\" lookup: template author: <NAME> <<EMAIL>> version_added: \"0.9\" short_description:", "existing variables, # plus some added by ansible (e.g., template_{path,mtime}),", "'2.8' type: str variable_end_string: description: The string marking the end", "results with different variable start and end string debug: msg:", "paths aren't actually the proper ones for jinja includes. #", "self._templar.environment.loader.searchpath = searchpath if variable_start_string is not None: self._templar.environment.variable_start_string =", "returns a string containing the results of processing that template.", "that are YAML will be left untouched. variable_start_string: description: The", "with different variable start and end string debug: msg: \"{{", "template author: <NAME> <<EMAIL>> version_added: \"0.9\" short_description: retrieve contents of", "= kwargs.get('variable_start_string', None) variable_end_string = kwargs.get('variable_end_string', None) for term in", "Project # GNU General Public License v3.0+ (see COPYING or", "import generate_ansible_template_vars from ansible.utils.display import Display display = Display() class", "lookup using %s as file\" % lookupfile) if lookupfile: b_template_data,", "# our search paths aren't actually the proper ones for", "will have access to all existing variables, # plus some", "file %s could not be found for the lookup\" %", "}}\" - name: show templating results with different variable start", "you pass in, returns a string containing the results of", "%s as file\" % lookupfile) if lookupfile: b_template_data, show_data =", "self._templar.environment.variable_start_string = variable_start_string if variable_end_string is not None: self._templar.environment.variable_end_string =", "description: The string marking the beginning of a print statement.", "variable_end_string='%]') }}\" \"\"\" RETURN = \"\"\" _raw: description: file(s) content", "description: whether to convert YAML into data. If False, strings", "self._templar.set_available_variables(vars) # do the templating res = self._templar.template(template_data, preserve_trailing_newlines=True, convert_data=convert_data_p,", "<NAME> <<EMAIL>> # Copyright: (c) 2012-17, Ansible Project # GNU", "# argument. vars = variables.copy() vars.update(generate_ansible_template_vars(lookupfile)) vars.update(lookup_template_vars) self._templar.set_available_variables(vars) # do", "short_description: retrieve contents of file after templating with Jinja2 description:", "from ansible.errors import AnsibleError from ansible.plugins.lookup import LookupBase from ansible.module_utils._text", "= [] variable_start_string = kwargs.get('variable_start_string', None) variable_end_string = kwargs.get('variable_end_string', None)", "template_{path,mtime}), # plus anything passed to the lookup with the", "type DOCUMENTATION = \"\"\" lookup: template author: <NAME> <<EMAIL>> version_added:", "all existing variables, # plus some added by ansible (e.g.,", "\"\"\" _raw: description: file(s) content after templating \"\"\" import os", "description: file(s) content after templating \"\"\" import os from ansible.errors", "whether to convert YAML into data. If False, strings that", "added by ansible (e.g., template_{path,mtime}), # plus anything passed to", "the template_vars= # argument. vars = variables.copy() vars.update(generate_ansible_template_vars(lookupfile)) vars.update(lookup_template_vars) self._templar.set_available_variables(vars)", "marking the end of a print statement. default: '}}' version_added:", "searchpath = variables.get('ansible_search_path', []) if searchpath: # our search paths", "search path in # addition to our original search paths.", "statement. default: '}}' version_added: '2.8' type: str \"\"\" EXAMPLES =", "self._loader._get_file_contents(lookupfile) template_data = to_text(b_template_data, errors='surrogate_or_strict') # set jinja2 internal search", "'}}' version_added: '2.8' type: str \"\"\" EXAMPLES = \"\"\" -", "internal search path for includes searchpath = variables.get('ansible_search_path', []) if", "will be left untouched. variable_start_string: description: The string marking the", "pass in, returns a string containing the results of processing", "The string marking the end of a print statement. default:", "'templates')) newsearchpath.append(p) searchpath = newsearchpath searchpath.insert(0, os.path.dirname(lookupfile)) self._templar.environment.loader.searchpath = searchpath", "None: self._templar.environment.variable_end_string = variable_end_string # The template will have access", "or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import (absolute_import, division, print_function) __metaclass__ =", "convert_data=convert_data_p, escape_backslashes=False) ret.append(res) else: raise AnsibleError(\"the template file %s could", "__metaclass__ = type DOCUMENTATION = \"\"\" lookup: template author: <NAME>", "lookup('template', './some_template.j2', variable_start_string='[%', variable_end_string='%]') }}\" \"\"\" RETURN = \"\"\" _raw:", "file after templating with Jinja2 description: - Returns a list", "template convert_data: type: bool description: whether to convert YAML into", "import (absolute_import, division, print_function) __metaclass__ = type DOCUMENTATION = \"\"\"", "We want to search into the 'templates' subdir of each", "could not be found for the lookup\" % term) return", "True) lookup_template_vars = kwargs.get('template_vars', {}) ret = [] variable_start_string =", "class LookupModule(LookupBase): def run(self, terms, variables, **kwargs): convert_data_p = kwargs.get('convert_data',", "our original search paths. newsearchpath = [] for p in", "kwargs.get('variable_start_string', None) variable_end_string = kwargs.get('variable_end_string', None) for term in terms:", "(see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import (absolute_import, division, print_function)", "each search path in # addition to our original search", "= Display() class LookupModule(LookupBase): def run(self, terms, variables, **kwargs): convert_data_p", "[]) if searchpath: # our search paths aren't actually the", "<filename>virt/ansible-latest/lib/python2.7/site-packages/ansible/plugins/lookup/template.py # Copyright: (c) 2012, <NAME> <<EMAIL>> # Copyright: (c)", "newsearchpath.append(p) searchpath = newsearchpath searchpath.insert(0, os.path.dirname(lookupfile)) self._templar.environment.loader.searchpath = searchpath if", "ones for jinja includes. # We want to search into", "search paths aren't actually the proper ones for jinja includes.", "for jinja includes. # We want to search into the", "False, strings that are YAML will be left untouched. variable_start_string:", "results debug: msg: \"{{ lookup('template', './some_template.j2') }}\" - name: show", "for includes searchpath = variables.get('ansible_search_path', []) if searchpath: # our", "string debug: msg: \"{{ lookup('template', './some_template.j2', variable_start_string='[%', variable_end_string='%]') }}\" \"\"\"", "searchpath = newsearchpath searchpath.insert(0, os.path.dirname(lookupfile)) self._templar.environment.loader.searchpath = searchpath if variable_start_string", "to_bytes, to_text from ansible.template import generate_ansible_template_vars from ansible.utils.display import Display", "term: %s\" % term) lookupfile = self.find_file_in_search_path(variables, 'templates', term) display.vvvv(\"File", "retrieve contents of file after templating with Jinja2 description: -", "%s could not be found for the lookup\" % term)", "in searchpath: newsearchpath.append(os.path.join(p, 'templates')) newsearchpath.append(p) searchpath = newsearchpath searchpath.insert(0, os.path.dirname(lookupfile))", "kwargs.get('variable_end_string', None) for term in terms: display.debug(\"File lookup term: %s\"", "the beginning of a print statement. default: '{{' version_added: '2.8'", "author: <NAME> <<EMAIL>> version_added: \"0.9\" short_description: retrieve contents of file", "YAML into data. If False, strings that are YAML will", "self._templar.environment.variable_end_string = variable_end_string # The template will have access to", "show_data = self._loader._get_file_contents(lookupfile) template_data = to_text(b_template_data, errors='surrogate_or_strict') # set jinja2", "YAML will be left untouched. variable_start_string: description: The string marking", "kwargs.get('template_vars', {}) ret = [] variable_start_string = kwargs.get('variable_start_string', None) variable_end_string", "[] for p in searchpath: newsearchpath.append(os.path.join(p, 'templates')) newsearchpath.append(p) searchpath =", "= newsearchpath searchpath.insert(0, os.path.dirname(lookupfile)) self._templar.environment.loader.searchpath = searchpath if variable_start_string is", "to search into the 'templates' subdir of each search path", "os from ansible.errors import AnsibleError from ansible.plugins.lookup import LookupBase from", "lookupfile = self.find_file_in_search_path(variables, 'templates', term) display.vvvv(\"File lookup using %s as", "type: str \"\"\" EXAMPLES = \"\"\" - name: show templating", "search into the 'templates' subdir of each search path in", "do the templating res = self._templar.template(template_data, preserve_trailing_newlines=True, convert_data=convert_data_p, escape_backslashes=False) ret.append(res)", "templating results with different variable start and end string debug:", "p in searchpath: newsearchpath.append(os.path.join(p, 'templates')) newsearchpath.append(p) searchpath = newsearchpath searchpath.insert(0,", "default: '{{' version_added: '2.8' type: str variable_end_string: description: The string", "access to all existing variables, # plus some added by", "lookupfile) if lookupfile: b_template_data, show_data = self._loader._get_file_contents(lookupfile) template_data = to_text(b_template_data,", "}}\" \"\"\" RETURN = \"\"\" _raw: description: file(s) content after", "ansible.template import generate_ansible_template_vars from ansible.utils.display import Display display = Display()", "for term in terms: display.debug(\"File lookup term: %s\" % term)", "for p in searchpath: newsearchpath.append(os.path.join(p, 'templates')) newsearchpath.append(p) searchpath = newsearchpath", "lookup with the template_vars= # argument. vars = variables.copy() vars.update(generate_ansible_template_vars(lookupfile))", "templating res = self._templar.template(template_data, preserve_trailing_newlines=True, convert_data=convert_data_p, escape_backslashes=False) ret.append(res) else: raise", "of templates you pass in, returns a string containing the", "variable_start_string='[%', variable_end_string='%]') }}\" \"\"\" RETURN = \"\"\" _raw: description: file(s)", "jinja2 internal search path for includes searchpath = variables.get('ansible_search_path', [])", "ansible.plugins.lookup import LookupBase from ansible.module_utils._text import to_bytes, to_text from ansible.template", "\"\"\" import os from ansible.errors import AnsibleError from ansible.plugins.lookup import", "display = Display() class LookupModule(LookupBase): def run(self, terms, variables, **kwargs):", "plus anything passed to the lookup with the template_vars= #", "raise AnsibleError(\"the template file %s could not be found for", "AnsibleError from ansible.plugins.lookup import LookupBase from ansible.module_utils._text import to_bytes, to_text", "list of templates you pass in, returns a string containing", "content after templating \"\"\" import os from ansible.errors import AnsibleError", "v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import (absolute_import, division,", "# The template will have access to all existing variables,", "type: bool description: whether to convert YAML into data. If", "if variable_end_string is not None: self._templar.environment.variable_end_string = variable_end_string # The", "left untouched. variable_start_string: description: The string marking the beginning of", "statement. default: '{{' version_added: '2.8' type: str variable_end_string: description: The", "of file after templating with Jinja2 description: - Returns a", "**kwargs): convert_data_p = kwargs.get('convert_data', True) lookup_template_vars = kwargs.get('template_vars', {}) ret", "https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import (absolute_import, division, print_function) __metaclass__ = type", "of each search path in # addition to our original", "in the list of templates you pass in, returns a", "(absolute_import, division, print_function) __metaclass__ = type DOCUMENTATION = \"\"\" lookup:", "<<EMAIL>> version_added: \"0.9\" short_description: retrieve contents of file after templating", "passed to the lookup with the template_vars= # argument. vars", "argument. vars = variables.copy() vars.update(generate_ansible_template_vars(lookupfile)) vars.update(lookup_template_vars) self._templar.set_available_variables(vars) # do the", "<NAME> <<EMAIL>> version_added: \"0.9\" short_description: retrieve contents of file after", "template file %s could not be found for the lookup\"", "__future__ import (absolute_import, division, print_function) __metaclass__ = type DOCUMENTATION =", "and end string debug: msg: \"{{ lookup('template', './some_template.j2', variable_start_string='[%', variable_end_string='%]')", "with Jinja2 description: - Returns a list of strings; for", "\"\"\" - name: show templating results debug: msg: \"{{ lookup('template',", "version_added: '2.8' type: str \"\"\" EXAMPLES = \"\"\" - name:", "= self.find_file_in_search_path(variables, 'templates', term) display.vvvv(\"File lookup using %s as file\"", "The string marking the beginning of a print statement. default:", "variable_start_string if variable_end_string is not None: self._templar.environment.variable_end_string = variable_end_string #", "2012-17, Ansible Project # GNU General Public License v3.0+ (see", "= \"\"\" - name: show templating results debug: msg: \"{{", "None: self._templar.environment.variable_start_string = variable_start_string if variable_end_string is not None: self._templar.environment.variable_end_string", "= type DOCUMENTATION = \"\"\" lookup: template author: <NAME> <<EMAIL>>", "= variables.copy() vars.update(generate_ansible_template_vars(lookupfile)) vars.update(lookup_template_vars) self._templar.set_available_variables(vars) # do the templating res", "to template convert_data: type: bool description: whether to convert YAML", "= kwargs.get('variable_end_string', None) for term in terms: display.debug(\"File lookup term:", "from ansible.module_utils._text import to_bytes, to_text from ansible.template import generate_ansible_template_vars from", "that template. options: _terms: description: list of files to template", "ret.append(res) else: raise AnsibleError(\"the template file %s could not be", "templating with Jinja2 description: - Returns a list of strings;", "% term) lookupfile = self.find_file_in_search_path(variables, 'templates', term) display.vvvv(\"File lookup using", "searchpath if variable_start_string is not None: self._templar.environment.variable_start_string = variable_start_string if", "the results of processing that template. options: _terms: description: list", "= variable_start_string if variable_end_string is not None: self._templar.environment.variable_end_string = variable_end_string", "string marking the end of a print statement. default: '}}'", "EXAMPLES = \"\"\" - name: show templating results debug: msg:", "# We want to search into the 'templates' subdir of", "[] variable_start_string = kwargs.get('variable_start_string', None) variable_end_string = kwargs.get('variable_end_string', None) for", "original search paths. newsearchpath = [] for p in searchpath:", "the proper ones for jinja includes. # We want to", "a string containing the results of processing that template. options:", "beginning of a print statement. default: '{{' version_added: '2.8' type:", "if searchpath: # our search paths aren't actually the proper", "License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import (absolute_import,", "_terms: description: list of files to template convert_data: type: bool", "templating \"\"\" import os from ansible.errors import AnsibleError from ansible.plugins.lookup", "variable_end_string = kwargs.get('variable_end_string', None) for term in terms: display.debug(\"File lookup", "subdir of each search path in # addition to our", "terms, variables, **kwargs): convert_data_p = kwargs.get('convert_data', True) lookup_template_vars = kwargs.get('template_vars',", "DOCUMENTATION = \"\"\" lookup: template author: <NAME> <<EMAIL>> version_added: \"0.9\"", "list of strings; for each template in the list of", "b_template_data, show_data = self._loader._get_file_contents(lookupfile) template_data = to_text(b_template_data, errors='surrogate_or_strict') # set", "to all existing variables, # plus some added by ansible", "from ansible.utils.display import Display display = Display() class LookupModule(LookupBase): def", "for each template in the list of templates you pass", "terms: display.debug(\"File lookup term: %s\" % term) lookupfile = self.find_file_in_search_path(variables,", "= searchpath if variable_start_string is not None: self._templar.environment.variable_start_string = variable_start_string", "be left untouched. variable_start_string: description: The string marking the beginning", "display.debug(\"File lookup term: %s\" % term) lookupfile = self.find_file_in_search_path(variables, 'templates',", "None) variable_end_string = kwargs.get('variable_end_string', None) for term in terms: display.debug(\"File", "convert YAML into data. If False, strings that are YAML", "into the 'templates' subdir of each search path in #", "file(s) content after templating \"\"\" import os from ansible.errors import", "= self._templar.template(template_data, preserve_trailing_newlines=True, convert_data=convert_data_p, escape_backslashes=False) ret.append(res) else: raise AnsibleError(\"the template", "variables, **kwargs): convert_data_p = kwargs.get('convert_data', True) lookup_template_vars = kwargs.get('template_vars', {})", "type: str variable_end_string: description: The string marking the end of", "results of processing that template. options: _terms: description: list of", "ansible.errors import AnsibleError from ansible.plugins.lookup import LookupBase from ansible.module_utils._text import", "with the template_vars= # argument. vars = variables.copy() vars.update(generate_ansible_template_vars(lookupfile)) vars.update(lookup_template_vars)", "show templating results debug: msg: \"{{ lookup('template', './some_template.j2') }}\" -", "if variable_start_string is not None: self._templar.environment.variable_start_string = variable_start_string if variable_end_string", "to_text from ansible.template import generate_ansible_template_vars from ansible.utils.display import Display display", "not None: self._templar.environment.variable_start_string = variable_start_string if variable_end_string is not None:", "not be found for the lookup\" % term) return ret", "template in the list of templates you pass in, returns", "a print statement. default: '{{' version_added: '2.8' type: str variable_end_string:", "includes searchpath = variables.get('ansible_search_path', []) if searchpath: # our search", "The template will have access to all existing variables, #", "using %s as file\" % lookupfile) if lookupfile: b_template_data, show_data", "\"{{ lookup('template', './some_template.j2', variable_start_string='[%', variable_end_string='%]') }}\" \"\"\" RETURN = \"\"\"", "string containing the results of processing that template. options: _terms:", "is not None: self._templar.environment.variable_start_string = variable_start_string if variable_end_string is not", "preserve_trailing_newlines=True, convert_data=convert_data_p, escape_backslashes=False) ret.append(res) else: raise AnsibleError(\"the template file %s", "from ansible.template import generate_ansible_template_vars from ansible.utils.display import Display display =", "each template in the list of templates you pass in,", "term) display.vvvv(\"File lookup using %s as file\" % lookupfile) if", "jinja includes. # We want to search into the 'templates'", "string marking the beginning of a print statement. default: '{{'", "else: raise AnsibleError(\"the template file %s could not be found", "is not None: self._templar.environment.variable_end_string = variable_end_string # The template will", "self.find_file_in_search_path(variables, 'templates', term) display.vvvv(\"File lookup using %s as file\" %", "convert_data_p = kwargs.get('convert_data', True) lookup_template_vars = kwargs.get('template_vars', {}) ret =", "not None: self._templar.environment.variable_end_string = variable_end_string # The template will have", "Jinja2 description: - Returns a list of strings; for each", "self._templar.template(template_data, preserve_trailing_newlines=True, convert_data=convert_data_p, escape_backslashes=False) ret.append(res) else: raise AnsibleError(\"the template file", "display.vvvv(\"File lookup using %s as file\" % lookupfile) if lookupfile:", "into data. If False, strings that are YAML will be", "to our original search paths. newsearchpath = [] for p", "files to template convert_data: type: bool description: whether to convert", "of a print statement. default: '}}' version_added: '2.8' type: str", "variable_end_string: description: The string marking the end of a print", "lookupfile: b_template_data, show_data = self._loader._get_file_contents(lookupfile) template_data = to_text(b_template_data, errors='surrogate_or_strict') #", "variable start and end string debug: msg: \"{{ lookup('template', './some_template.j2',", "str variable_end_string: description: The string marking the end of a", "kwargs.get('convert_data', True) lookup_template_vars = kwargs.get('template_vars', {}) ret = [] variable_start_string", "Returns a list of strings; for each template in the", "plus some added by ansible (e.g., template_{path,mtime}), # plus anything", "(e.g., template_{path,mtime}), # plus anything passed to the lookup with", "of a print statement. default: '{{' version_added: '2.8' type: str", "str \"\"\" EXAMPLES = \"\"\" - name: show templating results", "path for includes searchpath = variables.get('ansible_search_path', []) if searchpath: #", "variable_start_string = kwargs.get('variable_start_string', None) variable_end_string = kwargs.get('variable_end_string', None) for term", "a list of strings; for each template in the list", "name: show templating results debug: msg: \"{{ lookup('template', './some_template.j2') }}\"", "(c) 2012, <NAME> <<EMAIL>> # Copyright: (c) 2012-17, Ansible Project", "have access to all existing variables, # plus some added", "# Copyright: (c) 2012-17, Ansible Project # GNU General Public", "bool description: whether to convert YAML into data. If False,", "the 'templates' subdir of each search path in # addition", "variables.copy() vars.update(generate_ansible_template_vars(lookupfile)) vars.update(lookup_template_vars) self._templar.set_available_variables(vars) # do the templating res =", "import AnsibleError from ansible.plugins.lookup import LookupBase from ansible.module_utils._text import to_bytes,", "GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from", "marking the beginning of a print statement. default: '{{' version_added:", "searchpath: # our search paths aren't actually the proper ones", "aren't actually the proper ones for jinja includes. # We", "escape_backslashes=False) ret.append(res) else: raise AnsibleError(\"the template file %s could not", "\"\"\" EXAMPLES = \"\"\" - name: show templating results debug:", "includes. # We want to search into the 'templates' subdir", "lookup term: %s\" % term) lookupfile = self.find_file_in_search_path(variables, 'templates', term)", "ansible (e.g., template_{path,mtime}), # plus anything passed to the lookup", "- name: show templating results with different variable start and", "end of a print statement. default: '}}' version_added: '2.8' type:", "import LookupBase from ansible.module_utils._text import to_bytes, to_text from ansible.template import", "'templates', term) display.vvvv(\"File lookup using %s as file\" % lookupfile)", "(c) 2012-17, Ansible Project # GNU General Public License v3.0+", "description: - Returns a list of strings; for each template", "searchpath.insert(0, os.path.dirname(lookupfile)) self._templar.environment.loader.searchpath = searchpath if variable_start_string is not None:", "'./some_template.j2') }}\" - name: show templating results with different variable", "ansible.module_utils._text import to_bytes, to_text from ansible.template import generate_ansible_template_vars from ansible.utils.display", "# plus anything passed to the lookup with the template_vars=", "end string debug: msg: \"{{ lookup('template', './some_template.j2', variable_start_string='[%', variable_end_string='%]') }}\"", "None) for term in terms: display.debug(\"File lookup term: %s\" %", "the end of a print statement. default: '}}' version_added: '2.8'", "\"\"\" RETURN = \"\"\" _raw: description: file(s) content after templating", "vars = variables.copy() vars.update(generate_ansible_template_vars(lookupfile)) vars.update(lookup_template_vars) self._templar.set_available_variables(vars) # do the templating", "path in # addition to our original search paths. newsearchpath", "Ansible Project # GNU General Public License v3.0+ (see COPYING", "- name: show templating results debug: msg: \"{{ lookup('template', './some_template.j2')", "template. options: _terms: description: list of files to template convert_data:", "anything passed to the lookup with the template_vars= # argument.", "Copyright: (c) 2012, <NAME> <<EMAIL>> # Copyright: (c) 2012-17, Ansible", "from __future__ import (absolute_import, division, print_function) __metaclass__ = type DOCUMENTATION", "of processing that template. options: _terms: description: list of files", "are YAML will be left untouched. variable_start_string: description: The string", "print_function) __metaclass__ = type DOCUMENTATION = \"\"\" lookup: template author:", "AnsibleError(\"the template file %s could not be found for the", "default: '}}' version_added: '2.8' type: str \"\"\" EXAMPLES = \"\"\"", "term) lookupfile = self.find_file_in_search_path(variables, 'templates', term) display.vvvv(\"File lookup using %s", "errors='surrogate_or_strict') # set jinja2 internal search path for includes searchpath", "version_added: \"0.9\" short_description: retrieve contents of file after templating with", "= variables.get('ansible_search_path', []) if searchpath: # our search paths aren't", "want to search into the 'templates' subdir of each search", "%s\" % term) lookupfile = self.find_file_in_search_path(variables, 'templates', term) display.vvvv(\"File lookup", "template will have access to all existing variables, # plus", "as file\" % lookupfile) if lookupfile: b_template_data, show_data = self._loader._get_file_contents(lookupfile)", "= kwargs.get('template_vars', {}) ret = [] variable_start_string = kwargs.get('variable_start_string', None)", "some added by ansible (e.g., template_{path,mtime}), # plus anything passed", "'{{' version_added: '2.8' type: str variable_end_string: description: The string marking", "set jinja2 internal search path for includes searchpath = variables.get('ansible_search_path',", "by ansible (e.g., template_{path,mtime}), # plus anything passed to the", "search paths. newsearchpath = [] for p in searchpath: newsearchpath.append(os.path.join(p,", "- Returns a list of strings; for each template in", "strings that are YAML will be left untouched. variable_start_string: description:", "def run(self, terms, variables, **kwargs): convert_data_p = kwargs.get('convert_data', True) lookup_template_vars", "ret = [] variable_start_string = kwargs.get('variable_start_string', None) variable_end_string = kwargs.get('variable_end_string',", "debug: msg: \"{{ lookup('template', './some_template.j2') }}\" - name: show templating", "variable_start_string is not None: self._templar.environment.variable_start_string = variable_start_string if variable_end_string is", "to the lookup with the template_vars= # argument. vars =", "to convert YAML into data. If False, strings that are", "after templating \"\"\" import os from ansible.errors import AnsibleError from", "actually the proper ones for jinja includes. # We want", "containing the results of processing that template. options: _terms: description:", "2012, <NAME> <<EMAIL>> # Copyright: (c) 2012-17, Ansible Project #" ]
[ "join, dirname setup( name=\"fant_sizer\", version=\"0.7\", author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"fant_sizer command-line file-information\",", "Python :: 3.6' ], packages=find_packages(), long_description=open(join(dirname(__file__), \"README.rst\")).read(), entry_points={ \"console_scripts\": ['fant_sizer", "author_email=\"<EMAIL>\", description=\"fant_sizer command-line file-information\", url=\"https://github.com/ripiuk/fant_sizer\", keywords=\"file command-line information size tool", "description=\"fant_sizer command-line file-information\", url=\"https://github.com/ripiuk/fant_sizer\", keywords=\"file command-line information size tool recursively\",", "size tool recursively\", license=\"MIT\", classifiers=[ 'Topic :: Utilities', 'Environment ::", "OSI Approved :: MIT License', 'Intended Audience :: Developers', 'Intended", "Technology', 'Development Status :: 5 - Production/Stable', 'Programming Language ::", "import setup, find_packages from os.path import join, dirname setup( name=\"fant_sizer\",", "from setuptools import setup, find_packages from os.path import join, dirname", "url=\"https://github.com/ripiuk/fant_sizer\", keywords=\"file command-line information size tool recursively\", license=\"MIT\", classifiers=[ 'Topic", "'Environment :: Console', 'Natural Language :: English', 'License :: OSI", "Approved :: MIT License', 'Intended Audience :: Developers', 'Intended Audience", "'Development Status :: 5 - Production/Stable', 'Programming Language :: Python", "recursively\", license=\"MIT\", classifiers=[ 'Topic :: Utilities', 'Environment :: Console', 'Natural", "file-information\", url=\"https://github.com/ripiuk/fant_sizer\", keywords=\"file command-line information size tool recursively\", license=\"MIT\", classifiers=[", "setup, find_packages from os.path import join, dirname setup( name=\"fant_sizer\", version=\"0.7\",", "'Topic :: Utilities', 'Environment :: Console', 'Natural Language :: English',", "MIT License', 'Intended Audience :: Developers', 'Intended Audience :: Information", "Developers', 'Intended Audience :: Information Technology', 'Development Status :: 5", "5 - Production/Stable', 'Programming Language :: Python :: 3.6' ],", ":: OSI Approved :: MIT License', 'Intended Audience :: Developers',", ":: Information Technology', 'Development Status :: 5 - Production/Stable', 'Programming", "from os.path import join, dirname setup( name=\"fant_sizer\", version=\"0.7\", author=\"<NAME>\", author_email=\"<EMAIL>\",", "Audience :: Information Technology', 'Development Status :: 5 - Production/Stable',", "'Programming Language :: Python :: 3.6' ], packages=find_packages(), long_description=open(join(dirname(__file__), \"README.rst\")).read(),", "License', 'Intended Audience :: Developers', 'Intended Audience :: Information Technology',", ":: MIT License', 'Intended Audience :: Developers', 'Intended Audience ::", "import join, dirname setup( name=\"fant_sizer\", version=\"0.7\", author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"fant_sizer command-line", "Audience :: Developers', 'Intended Audience :: Information Technology', 'Development Status", "Production/Stable', 'Programming Language :: Python :: 3.6' ], packages=find_packages(), long_description=open(join(dirname(__file__),", "'License :: OSI Approved :: MIT License', 'Intended Audience ::", "3.6' ], packages=find_packages(), long_description=open(join(dirname(__file__), \"README.rst\")).read(), entry_points={ \"console_scripts\": ['fant_sizer = fant_sizer.fant_sizer:_main'],", "setuptools import setup, find_packages from os.path import join, dirname setup(", "English', 'License :: OSI Approved :: MIT License', 'Intended Audience", "Information Technology', 'Development Status :: 5 - Production/Stable', 'Programming Language", "dirname setup( name=\"fant_sizer\", version=\"0.7\", author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"fant_sizer command-line file-information\", url=\"https://github.com/ripiuk/fant_sizer\",", "name=\"fant_sizer\", version=\"0.7\", author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"fant_sizer command-line file-information\", url=\"https://github.com/ripiuk/fant_sizer\", keywords=\"file command-line", ":: Python :: 3.6' ], packages=find_packages(), long_description=open(join(dirname(__file__), \"README.rst\")).read(), entry_points={ \"console_scripts\":", "setup( name=\"fant_sizer\", version=\"0.7\", author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"fant_sizer command-line file-information\", url=\"https://github.com/ripiuk/fant_sizer\", keywords=\"file", "tool recursively\", license=\"MIT\", classifiers=[ 'Topic :: Utilities', 'Environment :: Console',", "command-line information size tool recursively\", license=\"MIT\", classifiers=[ 'Topic :: Utilities',", "Language :: English', 'License :: OSI Approved :: MIT License',", ":: English', 'License :: OSI Approved :: MIT License', 'Intended", "Console', 'Natural Language :: English', 'License :: OSI Approved ::", "os.path import join, dirname setup( name=\"fant_sizer\", version=\"0.7\", author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"fant_sizer", "'Intended Audience :: Developers', 'Intended Audience :: Information Technology', 'Development", "classifiers=[ 'Topic :: Utilities', 'Environment :: Console', 'Natural Language ::", "command-line file-information\", url=\"https://github.com/ripiuk/fant_sizer\", keywords=\"file command-line information size tool recursively\", license=\"MIT\",", "Utilities', 'Environment :: Console', 'Natural Language :: English', 'License ::", "information size tool recursively\", license=\"MIT\", classifiers=[ 'Topic :: Utilities', 'Environment", "version=\"0.7\", author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"fant_sizer command-line file-information\", url=\"https://github.com/ripiuk/fant_sizer\", keywords=\"file command-line information", "find_packages from os.path import join, dirname setup( name=\"fant_sizer\", version=\"0.7\", author=\"<NAME>\",", ":: Developers', 'Intended Audience :: Information Technology', 'Development Status ::", ":: Utilities', 'Environment :: Console', 'Natural Language :: English', 'License", "'Intended Audience :: Information Technology', 'Development Status :: 5 -", "'Natural Language :: English', 'License :: OSI Approved :: MIT", "Status :: 5 - Production/Stable', 'Programming Language :: Python ::", "author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"fant_sizer command-line file-information\", url=\"https://github.com/ripiuk/fant_sizer\", keywords=\"file command-line information size", "license=\"MIT\", classifiers=[ 'Topic :: Utilities', 'Environment :: Console', 'Natural Language", ":: Console', 'Natural Language :: English', 'License :: OSI Approved", ":: 3.6' ], packages=find_packages(), long_description=open(join(dirname(__file__), \"README.rst\")).read(), entry_points={ \"console_scripts\": ['fant_sizer =", "packages=find_packages(), long_description=open(join(dirname(__file__), \"README.rst\")).read(), entry_points={ \"console_scripts\": ['fant_sizer = fant_sizer.fant_sizer:_main'], }, )", "- Production/Stable', 'Programming Language :: Python :: 3.6' ], packages=find_packages(),", "Language :: Python :: 3.6' ], packages=find_packages(), long_description=open(join(dirname(__file__), \"README.rst\")).read(), entry_points={", "], packages=find_packages(), long_description=open(join(dirname(__file__), \"README.rst\")).read(), entry_points={ \"console_scripts\": ['fant_sizer = fant_sizer.fant_sizer:_main'], },", ":: 5 - Production/Stable', 'Programming Language :: Python :: 3.6'", "keywords=\"file command-line information size tool recursively\", license=\"MIT\", classifiers=[ 'Topic ::" ]
[ "accum = 0 for i, r in enumerate(choco_row): accum +=", "i in range(R): row = input() for j in range(C):", "num_choco/(H+1) num_choco_v = num_choco/(V+1) accum = 0 for i, r", "accum += c if accum == num_choco_v: accum = 0", "V cuts if num_choco == 0: return 'POSSIBLE' H_idx, V_idx", "C) choco_row, choco_col = [0]*R, [0]*C num_choco = 0 for", "= num_choco/(H+1) num_choco_v = num_choco/(V+1) accum = 0 for i,", "in enumerate(choco_row): accum += r if accum == num_choco_h: accum", "c in V_idx: num = 0 for i in range(r_from,", "flag = False break if not flag: return 'IMPOSSIBLE' accum", "== '__main__': T = int(input()) for t in range(T): print('Case", "0 for i, c in enumerate(choco_col): accum += c if", "choco[i][j] = 1 choco_row[i] = row.count('@') num_choco += choco_row[i] #", "in range(r_from, r+1): for j in range(c_from, c+1): num +=", "None for r in H_idx: c_from = 0 for c", "+= choco[i][j] if num_prev is None: num_prev = num elif", "0 H_idx.append(i) elif accum > num_choco_h: flag = False break", "1 choco_row[i] = row.count('@') num_choco += choco_row[i] # Find H", "0: return 'POSSIBLE' H_idx, V_idx = [], [] flag =", "r_from = r+1 return 'POSSIBLE' if __name__ == '__main__': T", "0 for i in range(R): row = input() for j", "r_from = 0 num_prev = None for r in H_idx:", "input() for j in range(C): if row[j] == '@': choco_col[j]", "in range(R): choco.append([0] * C) choco_row, choco_col = [0]*R, [0]*C", "is None: num_prev = num elif num_prev != num: return", "None: num_prev = num elif num_prev != num: return 'IMPOSSIBLE'", "range(R): row = input() for j in range(C): if row[j]", "else: return 'IMPOSSIBLE' # Check each piece r_from = 0", "and num_choco%(V+1)==0: num_choco_h = num_choco/(H+1) num_choco_v = num_choco/(V+1) accum =", "break if not flag: return 'IMPOSSIBLE' accum = 0 for", "num += choco[i][j] if num_prev is None: num_prev = num", "if accum == num_choco_h: accum = 0 H_idx.append(i) elif accum", "H_idx.append(i) elif accum > num_choco_h: flag = False break if", "1 choco[i][j] = 1 choco_row[i] = row.count('@') num_choco += choco_row[i]", "i in range(r_from, r+1): for j in range(c_from, c+1): num", "row = input() for j in range(C): if row[j] ==", "row.count('@') num_choco += choco_row[i] # Find H and V cuts", "in H_idx: c_from = 0 for c in V_idx: num", "flag: return 'IMPOSSIBLE' else: return 'IMPOSSIBLE' # Check each piece", "[0]*R, [0]*C num_choco = 0 for i in range(R): row", "C, H, V = map(int, input().split()) choco = [] for", "and V cuts if num_choco == 0: return 'POSSIBLE' H_idx,", "R, C, H, V = map(int, input().split()) choco = []", "return 'POSSIBLE' H_idx, V_idx = [], [] flag = True", "elif num_prev != num: return 'IMPOSSIBLE' c_from = c+1 r_from", "num: return 'IMPOSSIBLE' c_from = c+1 r_from = r+1 return", "enumerate(choco_row): accum += r if accum == num_choco_h: accum =", "i, r in enumerate(choco_row): accum += r if accum ==", "= None for r in H_idx: c_from = 0 for", "in range(C): if row[j] == '@': choco_col[j] += 1 choco[i][j]", "__name__ == '__main__': T = int(input()) for t in range(T):", "for _ in range(R): choco.append([0] * C) choco_row, choco_col =", "if num_prev is None: num_prev = num elif num_prev !=", "range(c_from, c+1): num += choco[i][j] if num_prev is None: num_prev", "'__main__': T = int(input()) for t in range(T): print('Case #{}:", "return 'POSSIBLE' if __name__ == '__main__': T = int(input()) for", "== num_choco_h: accum = 0 H_idx.append(i) elif accum > num_choco_h:", "return 'IMPOSSIBLE' # Check each piece r_from = 0 num_prev", "+= c if accum == num_choco_v: accum = 0 V_idx.append(i)", "0 num_prev = None for r in H_idx: c_from =", "Read input R, C, H, V = map(int, input().split()) choco", "enumerate(choco_col): accum += c if accum == num_choco_v: accum =", "r in enumerate(choco_row): accum += r if accum == num_choco_h:", "if accum == num_choco_v: accum = 0 V_idx.append(i) elif accum", "= num elif num_prev != num: return 'IMPOSSIBLE' c_from =", "'@': choco_col[j] += 1 choco[i][j] = 1 choco_row[i] = row.count('@')", "num_prev is None: num_prev = num elif num_prev != num:", "= r+1 return 'POSSIBLE' if __name__ == '__main__': T =", "in enumerate(choco_col): accum += c if accum == num_choco_v: accum", "num elif num_prev != num: return 'IMPOSSIBLE' c_from = c+1", "in range(c_from, c+1): num += choco[i][j] if num_prev is None:", "= 0 H_idx.append(i) elif accum > num_choco_h: flag = False", "num_prev = None for r in H_idx: c_from = 0", "cuts if num_choco == 0: return 'POSSIBLE' H_idx, V_idx =", "num = 0 for i in range(r_from, r+1): for j", "0 for c in V_idx: num = 0 for i", "for i in range(r_from, r+1): for j in range(c_from, c+1):", "return 'IMPOSSIBLE' else: return 'IMPOSSIBLE' # Check each piece r_from", "'POSSIBLE' H_idx, V_idx = [], [] flag = True if", "choco_col = [0]*R, [0]*C num_choco = 0 for i in", "choco_row, choco_col = [0]*R, [0]*C num_choco = 0 for i", "range(C): if row[j] == '@': choco_col[j] += 1 choco[i][j] =", "map(int, input().split()) choco = [] for _ in range(R): choco.append([0]", "in V_idx: num = 0 for i in range(r_from, r+1):", "accum = 0 V_idx.append(i) elif accum > num_choco_v: flag =", "= c+1 r_from = r+1 return 'POSSIBLE' if __name__ ==", "num_choco_v: accum = 0 V_idx.append(i) elif accum > num_choco_v: flag", "if not flag: return 'IMPOSSIBLE' accum = 0 for i,", "c+1): num += choco[i][j] if num_prev is None: num_prev =", "num_choco_h: flag = False break if not flag: return 'IMPOSSIBLE'", "[0]*C num_choco = 0 for i in range(R): row =", "elif accum > num_choco_v: flag = False break if not", "+= choco_row[i] # Find H and V cuts if num_choco", "for c in V_idx: num = 0 for i in", "elif accum > num_choco_h: flag = False break if not", "in range(R): row = input() for j in range(C): if", "= [] for _ in range(R): choco.append([0] * C) choco_row,", "= map(int, input().split()) choco = [] for _ in range(R):", "choco = [] for _ in range(R): choco.append([0] * C)", "Check each piece r_from = 0 num_prev = None for", "input().split()) choco = [] for _ in range(R): choco.append([0] *", "for i in range(R): row = input() for j in", "False break if not flag: return 'IMPOSSIBLE' else: return 'IMPOSSIBLE'", "H_idx, V_idx = [], [] flag = True if num_choco%(H+1)==0", "for i, r in enumerate(choco_row): accum += r if accum", "break if not flag: return 'IMPOSSIBLE' else: return 'IMPOSSIBLE' #", "choco_row[i] = row.count('@') num_choco += choco_row[i] # Find H and", "flag = False break if not flag: return 'IMPOSSIBLE' else:", "accum = 0 for i, c in enumerate(choco_col): accum +=", "r in H_idx: c_from = 0 for c in V_idx:", "num_choco += choco_row[i] # Find H and V cuts if", "V_idx: num = 0 for i in range(r_from, r+1): for", "num_choco_v = num_choco/(V+1) accum = 0 for i, r in", "input R, C, H, V = map(int, input().split()) choco =", "* C) choco_row, choco_col = [0]*R, [0]*C num_choco = 0", "= [], [] flag = True if num_choco%(H+1)==0 and num_choco%(V+1)==0:", "for r in H_idx: c_from = 0 for c in", "= 0 for i in range(r_from, r+1): for j in", "r+1): for j in range(c_from, c+1): num += choco[i][j] if", "choco_row[i] # Find H and V cuts if num_choco ==", "def solve(): # Read input R, C, H, V =", "r+1 return 'POSSIBLE' if __name__ == '__main__': T = int(input())", "not flag: return 'IMPOSSIBLE' accum = 0 for i, c", "+= r if accum == num_choco_h: accum = 0 H_idx.append(i)", "accum = 0 H_idx.append(i) elif accum > num_choco_h: flag =", "= input() for j in range(C): if row[j] == '@':", "'IMPOSSIBLE' accum = 0 for i, c in enumerate(choco_col): accum", "accum > num_choco_h: flag = False break if not flag:", "= 0 for c in V_idx: num = 0 for", "False break if not flag: return 'IMPOSSIBLE' accum = 0", "accum > num_choco_v: flag = False break if not flag:", "r if accum == num_choco_h: accum = 0 H_idx.append(i) elif", "accum == num_choco_v: accum = 0 V_idx.append(i) elif accum >", "'IMPOSSIBLE' # Check each piece r_from = 0 num_prev =", "num_choco = 0 for i in range(R): row = input()", "num_choco_h = num_choco/(H+1) num_choco_v = num_choco/(V+1) accum = 0 for", "for j in range(c_from, c+1): num += choco[i][j] if num_prev", "V_idx.append(i) elif accum > num_choco_v: flag = False break if", "choco_col[j] += 1 choco[i][j] = 1 choco_row[i] = row.count('@') num_choco", "num_prev = num elif num_prev != num: return 'IMPOSSIBLE' c_from", "j in range(C): if row[j] == '@': choco_col[j] += 1", "num_choco_h: accum = 0 H_idx.append(i) elif accum > num_choco_h: flag", "True if num_choco%(H+1)==0 and num_choco%(V+1)==0: num_choco_h = num_choco/(H+1) num_choco_v =", "= 0 for i, c in enumerate(choco_col): accum += c", "= 0 V_idx.append(i) elif accum > num_choco_v: flag = False", "T = int(input()) for t in range(T): print('Case #{}: {}'.format(t+1,", "= True if num_choco%(H+1)==0 and num_choco%(V+1)==0: num_choco_h = num_choco/(H+1) num_choco_v", "row[j] == '@': choco_col[j] += 1 choco[i][j] = 1 choco_row[i]", "num_choco%(V+1)==0: num_choco_h = num_choco/(H+1) num_choco_v = num_choco/(V+1) accum = 0", "num_choco == 0: return 'POSSIBLE' H_idx, V_idx = [], []", "c_from = c+1 r_from = r+1 return 'POSSIBLE' if __name__", "range(r_from, r+1): for j in range(c_from, c+1): num += choco[i][j]", "0 for i, r in enumerate(choco_row): accum += r if", "> num_choco_h: flag = False break if not flag: return", "0 V_idx.append(i) elif accum > num_choco_v: flag = False break", "V = map(int, input().split()) choco = [] for _ in", "H and V cuts if num_choco == 0: return 'POSSIBLE'", "if num_choco%(H+1)==0 and num_choco%(V+1)==0: num_choco_h = num_choco/(H+1) num_choco_v = num_choco/(V+1)", "num_choco/(V+1) accum = 0 for i, r in enumerate(choco_row): accum", "== '@': choco_col[j] += 1 choco[i][j] = 1 choco_row[i] =", "for j in range(C): if row[j] == '@': choco_col[j] +=", "_ in range(R): choco.append([0] * C) choco_row, choco_col = [0]*R,", "[], [] flag = True if num_choco%(H+1)==0 and num_choco%(V+1)==0: num_choco_h", "if __name__ == '__main__': T = int(input()) for t in", "'IMPOSSIBLE' c_from = c+1 r_from = r+1 return 'POSSIBLE' if", "H, V = map(int, input().split()) choco = [] for _", "= False break if not flag: return 'IMPOSSIBLE' accum =", "+= 1 choco[i][j] = 1 choco_row[i] = row.count('@') num_choco +=", "num_prev != num: return 'IMPOSSIBLE' c_from = c+1 r_from =", "if not flag: return 'IMPOSSIBLE' else: return 'IMPOSSIBLE' # Check", "c if accum == num_choco_v: accum = 0 V_idx.append(i) elif", "i, c in enumerate(choco_col): accum += c if accum ==", "each piece r_from = 0 num_prev = None for r", "for i, c in enumerate(choco_col): accum += c if accum", "piece r_from = 0 num_prev = None for r in", "!= num: return 'IMPOSSIBLE' c_from = c+1 r_from = r+1", "accum += r if accum == num_choco_h: accum = 0", "if row[j] == '@': choco_col[j] += 1 choco[i][j] = 1", "[] for _ in range(R): choco.append([0] * C) choco_row, choco_col", "j in range(c_from, c+1): num += choco[i][j] if num_prev is", "= row.count('@') num_choco += choco_row[i] # Find H and V", "== num_choco_v: accum = 0 V_idx.append(i) elif accum > num_choco_v:", "# Find H and V cuts if num_choco == 0:", "0 for i in range(r_from, r+1): for j in range(c_from,", "range(R): choco.append([0] * C) choco_row, choco_col = [0]*R, [0]*C num_choco", "# Read input R, C, H, V = map(int, input().split())", "Find H and V cuts if num_choco == 0: return", "[] flag = True if num_choco%(H+1)==0 and num_choco%(V+1)==0: num_choco_h =", "> num_choco_v: flag = False break if not flag: return", "H_idx: c_from = 0 for c in V_idx: num =", "flag: return 'IMPOSSIBLE' accum = 0 for i, c in", "= int(input()) for t in range(T): print('Case #{}: {}'.format(t+1, solve()))", "= False break if not flag: return 'IMPOSSIBLE' else: return", "= [0]*R, [0]*C num_choco = 0 for i in range(R):", "== 0: return 'POSSIBLE' H_idx, V_idx = [], [] flag", "= 0 for i, r in enumerate(choco_row): accum += r", "c in enumerate(choco_col): accum += c if accum == num_choco_v:", "= num_choco/(V+1) accum = 0 for i, r in enumerate(choco_row):", "# Check each piece r_from = 0 num_prev = None", "= 0 num_prev = None for r in H_idx: c_from", "flag = True if num_choco%(H+1)==0 and num_choco%(V+1)==0: num_choco_h = num_choco/(H+1)", "V_idx = [], [] flag = True if num_choco%(H+1)==0 and", "choco.append([0] * C) choco_row, choco_col = [0]*R, [0]*C num_choco =", "= 1 choco_row[i] = row.count('@') num_choco += choco_row[i] # Find", "num_choco%(H+1)==0 and num_choco%(V+1)==0: num_choco_h = num_choco/(H+1) num_choco_v = num_choco/(V+1) accum", "not flag: return 'IMPOSSIBLE' else: return 'IMPOSSIBLE' # Check each", "if num_choco == 0: return 'POSSIBLE' H_idx, V_idx = [],", "return 'IMPOSSIBLE' c_from = c+1 r_from = r+1 return 'POSSIBLE'", "accum == num_choco_h: accum = 0 H_idx.append(i) elif accum >", "c+1 r_from = r+1 return 'POSSIBLE' if __name__ == '__main__':", "c_from = 0 for c in V_idx: num = 0", "'POSSIBLE' if __name__ == '__main__': T = int(input()) for t", "solve(): # Read input R, C, H, V = map(int,", "'IMPOSSIBLE' else: return 'IMPOSSIBLE' # Check each piece r_from =", "num_choco_v: flag = False break if not flag: return 'IMPOSSIBLE'", "choco[i][j] if num_prev is None: num_prev = num elif num_prev", "return 'IMPOSSIBLE' accum = 0 for i, c in enumerate(choco_col):", "= 0 for i in range(R): row = input() for" ]
[ "assert_equal({ 'jobTracker': 'curacao:8032', 'nameNode': 'hdfs://curacao:8020' }, submission.properties) def test_get_logical_properties(self): submission", "fs = cluster.get_hdfs() jt = cluster.get_next_ha_mrcluster()[1] final_properties = properties.copy() final_properties.update({", "cluster.fs.stats(deployment_dir + '/udf3.jar')['fileId']) assert_equal(stats_udf4['fileId'], cluster.fs.stats(deployment_dir + '/udf4.jar')['fileId']) finally: try: cluster.fs.rmtree(prefix)", "'' class MockJt(): def __init__(self, logical_name=None): self.logical_name = logical_name if", "this work for additional information # regarding copyright ownership. Cloudera,", "+ '/udf4.jar'), list_dir_deployement) stats_udf1 = cluster.fs.stats(deployment_dir + '/udf1.jar') stats_udf2 =", "more contributor license agreements. See the NOTICE file # distributed", "'', 'jobTracker': 'localhost:8021', 'oozie.wf.application.path': '${nameNode}/user/${user.name}/${examplesRoot}/apps/pig', 'examplesRoot': 'examples', 'output': '', 'nameNode':", "'hue', 'test.1': 'http://localhost/test?test1=test&test2=test', 'nameNode': 'hdfs://curacao:8020', 'jobTracker': 'jtaddress' } final_properties =", "assert_true(cluster.fs.exists(deployment_dir + '/udf1.jar'), list_dir_workspace) assert_true(cluster.fs.exists(deployment_dir + '/udf2.jar'), list_dir_workspace) assert_true(cluster.fs.exists(deployment_dir +", "'deployment-directory') assert_equal(final_properties, submission.properties) finally: clear_sys_caches() for reset in finish: reset()", "= Submission(self.user, fs=MockFs()) assert_equal({}, submission.properties) submission._update_properties('curacao:8032', '/deployment_dir') assert_equal({ 'jobTracker': 'curacao:8032',", "logical_name else '' class MockJt(): def __init__(self, logical_name=None): self.logical_name =", "submission = Submission(None, properties=properties, oozie_id='test', fs=MockFs()) assert_equal(properties, submission.properties) submission._update_properties('jtaddress', 'deployment-directory')", "} final_properties = properties.copy() submission = Submission(None, properties=properties, oozie_id='test', fs=MockFs())", "= cluster.fs.stats(deployment_dir + '/udf4.jar') submission._copy_files('%s/workspace' % prefix, \"<xml>My XML</xml>\") assert_not_equal(stats_udf1['fileId'],", "\"\"\" properties = \"\"\" # # Licensed to the Hue", "assert_equal({'oozie.use.system.libpath': 'true', 'input': '', 'jobTracker': 'localhost:8021', 'oozie.wf.application.path': '${nameNode}/user/${user.name}/${examplesRoot}/apps/pig', 'examplesRoot': 'examples',", "from desktop.lib.test_utils import clear_sys_caches from desktop.lib.django_test_util import make_logged_in_client LOG =", "2.0 (the # \"License\"); you may not use this file", "cluster.fs.rmtree(prefix) except: LOG.exception('failed to remove %s' % prefix) class MockFs():", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "regarding copyright ownership. Cloudera, Inc. licenses this file # to", "submission.properties) def test_get_logical_properties(self): submission = Submission(self.user, fs=MockFs(logical_name='fsname'), jt=MockJt(logical_name='jtname')) assert_equal({}, submission.properties)", "external_deployment_dir + '/lib' list_dir_workspace = cluster.fs.listdir(deployment_dir) list_dir_deployement = cluster.fs.listdir(external_deployment_dir) #", "under the License. import logging from django.contrib.auth.models import User from", "\"\"\" # # Licensed to the Hue # nameNode=hdfs://localhost:8020 jobTracker=localhost:8021", "= '%s/deployment' % prefix jar_1 = '%s/udf1.jar' % prefix jar_2", "= cluster.fs.listdir(external_deployment_dir) # All destinations there assert_true(cluster.fs.exists(deployment_dir + '/udf1.jar'), list_dir_workspace)", "assert_equal({ 'jobTracker': 'jtname', 'nameNode': 'fsname' }, submission.properties) def test_update_properties(self): finish", "logical_name else '' class TestSubmission(OozieMockBase): def test_get_properties(self): submission = Submission(self.user,", "% prefix jar_1 = '%s/udf1.jar' % prefix jar_2 = '%s/lib/udf2.jar'", "distributed with this work for additional information # regarding copyright", "test_copy_files(): cluster = pseudo_hdfs4.shared_cluster() try: c = make_logged_in_client() user =", "examplesRoot=examples oozie.use.system.libpath=true oozie.wf.application.path=${nameNode}/user/${user.name}/${examplesRoot}/apps/pig \"\"\" parameters = Submission(self.user)._get_external_parameters(xml, properties) assert_equal({'oozie.use.system.libpath': 'true',", "import cluster, pseudo_hdfs4 from hadoop.conf import HDFS_CLUSTERS, MR_CLUSTERS, YARN_CLUSTERS from", "LOG = logging.getLogger(__name__) @attr('requires_hadoop') def test_copy_files(): cluster = pseudo_hdfs4.shared_cluster() try:", "oozie.use.system.libpath=true oozie.wf.application.path=${nameNode}/user/${user.name}/${examplesRoot}/apps/pig \"\"\" parameters = Submission(self.user)._get_external_parameters(xml, properties) assert_equal({'oozie.use.system.libpath': 'true', 'input':", "import OozieMockBase from desktop.lib.test_utils import clear_sys_caches from desktop.lib.django_test_util import make_logged_in_client", "'/lib' external_deployment_dir = external_deployment_dir + '/lib' list_dir_workspace = cluster.fs.listdir(deployment_dir) list_dir_deployement", "properties=properties, oozie_id='test', fs=MockFs()) assert_equal(properties, submission.properties) submission._update_properties('jtaddress', 'deployment-directory') assert_equal(final_properties, submission.properties) clear_sys_caches()", "% prefix, \"<xml>My XML</xml>\") assert_not_equal(stats_udf1['fileId'], cluster.fs.stats(deployment_dir + '/udf1.jar')['fileId']) assert_not_equal(stats_udf2['fileId'], cluster.fs.stats(deployment_dir", "stats_udf3 = cluster.fs.stats(deployment_dir + '/udf3.jar') stats_udf4 = cluster.fs.stats(deployment_dir + '/udf4.jar')", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "Submission(self.user, fs=MockFs(logical_name='fsname'), jt=MockJt(logical_name='jtname')) assert_equal({}, submission.properties) submission._update_properties('curacao:8032', '/deployment_dir') assert_equal({ 'jobTracker': 'jtname',", "cluster.get_hdfs() jt = cluster.get_next_ha_mrcluster()[1] final_properties = properties.copy() final_properties.update({ 'jobTracker': 'jobtracker',", "+ '/lib' list_dir_workspace = cluster.fs.listdir(deployment_dir) list_dir_deployement = cluster.fs.listdir(external_deployment_dir) # All", "additional information # regarding copyright ownership. Cloudera, Inc. licenses this", "oozie.tests import OozieMockBase from desktop.lib.test_utils import clear_sys_caches from desktop.lib.django_test_util import", "= make_logged_in_client() user = User.objects.get(username='test') prefix = '/tmp/test_copy_files' if cluster.fs.exists(prefix):", "= cluster.fs.stats(deployment_dir + '/udf3.jar') stats_udf4 = cluster.fs.stats(deployment_dir + '/udf4.jar') submission._copy_files('%s/workspace'", "prefix) class MockFs(): def __init__(self, logical_name=None): self.fs_defaultfs = 'hdfs://curacao:8020' self.logical_name", "prefix jar_1 = '%s/udf1.jar' % prefix jar_2 = '%s/lib/udf2.jar' %", "def __init__(self, jar_path): self.jar_path = jar_path class MockJob(): def __init__(self):", "Hue # nameNode=hdfs://localhost:8020 jobTracker=localhost:8021 queueName=default examplesRoot=examples oozie.use.system.libpath=true oozie.wf.application.path=${nameNode}/user/${user.name}/${examplesRoot}/apps/pig \"\"\" parameters", "under the License is distributed on an \"AS IS\" BASIS,", "'oozie.wf.application.path': '${nameNode}/user/${user.name}/${examplesRoot}/apps/pig', 'examplesRoot': 'examples', 'output': '', 'nameNode': 'hdfs://localhost:8020', 'queueName': 'default'", "file # distributed with this work for additional information #", "# Licensed to Cloudera, Inc. under one # or more", "<configuration> <property> <name>mapred.input.format.class</name> <value>org.apache.hadoop.examples.SleepJob$SleepInputFormat</value> </property> </configuration> </pig> <ok to=\"end\"/> <error", "License for the specific language governing permissions and # limitations", "list_dir_workspace) assert_true(cluster.fs.exists(deployment_dir + '/udf2.jar'), list_dir_workspace) assert_true(cluster.fs.exists(deployment_dir + '/udf3.jar'), list_dir_workspace) assert_true(cluster.fs.exists(deployment_dir", "from hadoop.conf import HDFS_CLUSTERS, MR_CLUSTERS, YARN_CLUSTERS from liboozie.submittion import Submission", "MockNode(jar_2), MockNode(jar_3), MockNode(jar_4), ] def get_application_filename(self): return 'workflow.xml' submission =", "self.jar_path = jar_path class MockJob(): def __init__(self): self.node_list = [", "'/udf3.jar')['fileId']) assert_equal(stats_udf4['fileId'], cluster.fs.stats(deployment_dir + '/udf4.jar')['fileId']) finally: try: cluster.fs.rmtree(prefix) except: LOG.exception('failed", "__init__(self, logical_name=None): self.logical_name = logical_name if logical_name else '' class", "}, submission.properties) def test_get_logical_properties(self): submission = Submission(self.user, fs=MockFs(logical_name='fsname'), jt=MockJt(logical_name='jtname')) assert_equal({},", "submission._update_properties('curacao:8032', '/deployment_dir') assert_equal({ 'jobTracker': 'jtname', 'nameNode': 'fsname' }, submission.properties) def", "finish.append(HDFS_CLUSTERS['default'].LOGICAL_NAME.set_for_testing('namenode')) finish.append(MR_CLUSTERS['default'].LOGICAL_NAME.set_for_testing('jobtracker')) clear_sys_caches() fs = cluster.get_hdfs() jt = cluster.get_next_ha_mrcluster()[1] final_properties", "finish.append(MR_CLUSTERS['default'].LOGICAL_NAME.set_for_testing('jobtracker')) clear_sys_caches() fs = cluster.get_hdfs() jt = cluster.get_next_ha_mrcluster()[1] final_properties =", "'jobTracker': 'jobtracker', 'nameNode': 'namenode' }) submission = Submission(None, properties=properties, oozie_id='test',", "submission.properties) finally: clear_sys_caches() for reset in finish: reset() def test_get_external_parameters(self):", "cluster.fs.listdir(deployment_dir) list_dir_deployement = cluster.fs.listdir(external_deployment_dir) # All destinations there assert_true(cluster.fs.exists(deployment_dir +", "to you under the Apache License, Version 2.0 (the #", "cluster.fs.stats(deployment_dir + '/udf1.jar')['fileId']) assert_not_equal(stats_udf2['fileId'], cluster.fs.stats(deployment_dir + '/udf2.jar')['fileId']) assert_not_equal(stats_udf3['fileId'], cluster.fs.stats(deployment_dir +", "LOG.exception('failed to remove %s' % prefix) class MockFs(): def __init__(self,", "desktop.lib.django_test_util import make_logged_in_client LOG = logging.getLogger(__name__) @attr('requires_hadoop') def test_copy_files(): cluster", "= cluster.fs.stats(deployment_dir + '/udf2.jar') stats_udf3 = cluster.fs.stats(deployment_dir + '/udf3.jar') stats_udf4", "Licensed to Cloudera, Inc. under one # or more contributor", "<action name=\"Pig\"> <pig> <job-tracker>${jobTracker}</job-tracker> <name-node>${nameNode}</name-node> <prepare> <delete path=\"${output}\"/> </prepare> <script>aggregate.pig</script>", "may not use this file except in compliance # with", "submission = Submission(self.user, fs=MockFs()) assert_equal({}, submission.properties) submission._update_properties('curacao:8032', '/deployment_dir') assert_equal({ 'jobTracker':", "<ok to=\"end\"/> <error to=\"kill\"/> </action> <kill name=\"kill\"> <message>Action failed, error", "software # distributed under the License is distributed on an", "class MockJob(): def __init__(self): self.node_list = [ MockNode(jar_1), MockNode(jar_2), MockNode(jar_3),", "class TestSubmission(OozieMockBase): def test_get_properties(self): submission = Submission(self.user, fs=MockFs()) assert_equal({}, submission.properties)", "and # limitations under the License. import logging from django.contrib.auth.models", "assert_equal(final_properties, submission.properties) clear_sys_caches() fs = cluster.get_hdfs() jt = cluster.get_next_ha_mrcluster()[1] final_properties", "'localhost:8021', 'oozie.wf.application.path': '${nameNode}/user/${user.name}/${examplesRoot}/apps/pig', 'examplesRoot': 'examples', 'output': '', 'nameNode': 'hdfs://localhost:8020', 'queueName':", "'${nameNode}/user/${user.name}/${examplesRoot}/apps/pig', 'examplesRoot': 'examples', 'output': '', 'nameNode': 'hdfs://localhost:8020', 'queueName': 'default' },", "final_properties = properties.copy() submission = Submission(None, properties=properties, oozie_id='test', fs=MockFs()) assert_equal(properties,", "@attr('requires_hadoop') def test_copy_files(): cluster = pseudo_hdfs4.shared_cluster() try: c = make_logged_in_client()", "submission = Submission(self.user, fs=MockFs(logical_name='fsname'), jt=MockJt(logical_name='jtname')) assert_equal({}, submission.properties) submission._update_properties('curacao:8032', '/deployment_dir') assert_equal({", "from liboozie.submittion import Submission from oozie.tests import OozieMockBase from desktop.lib.test_utils", "submission.properties) submission._update_properties('jtaddress', 'deployment-directory') assert_equal(final_properties, submission.properties) finally: clear_sys_caches() for reset in", "% prefix external_deployment_dir = '%s/deployment' % prefix jar_1 = '%s/udf1.jar'", "License, Version 2.0 (the # \"License\"); you may not use", "'/udf1.jar')['fileId']) assert_not_equal(stats_udf2['fileId'], cluster.fs.stats(deployment_dir + '/udf2.jar')['fileId']) assert_not_equal(stats_udf3['fileId'], cluster.fs.stats(deployment_dir + '/udf3.jar')['fileId']) assert_equal(stats_udf4['fileId'],", "fs.fs_defaultfs }) submission = Submission(None, properties=properties, oozie_id='test', fs=fs, jt=jt) assert_equal(properties,", "<argument>-param</argument> <argument>OUTPUT=${output}</argument> <configuration> <property> <name>mapred.input.format.class</name> <value>org.apache.hadoop.examples.SleepJob$SleepInputFormat</value> </property> </configuration> </pig> <ok", "All sources still there assert_true(cluster.fs.exists(jar_1)) assert_true(cluster.fs.exists(jar_2)) assert_true(cluster.fs.exists(jar_3)) assert_true(cluster.fs.exists(jar_4)) deployment_dir =", "'jobTracker': 'jtname', 'nameNode': 'fsname' }, submission.properties) def test_update_properties(self): finish =", "<name>mapred.input.format.class</name> <value>org.apache.hadoop.examples.SleepJob$SleepInputFormat</value> </property> </configuration> </pig> <ok to=\"end\"/> <error to=\"kill\"/> </action>", "reset in finish: reset() def test_get_external_parameters(self): xml = \"\"\" <workflow-app", "make_logged_in_client LOG = logging.getLogger(__name__) @attr('requires_hadoop') def test_copy_files(): cluster = pseudo_hdfs4.shared_cluster()", "clear_sys_caches() fs = cluster.get_hdfs() jt = cluster.get_next_ha_mrcluster()[1] final_properties = properties.copy()", "# nameNode=hdfs://localhost:8020 jobTracker=localhost:8021 queueName=default examplesRoot=examples oozie.use.system.libpath=true oozie.wf.application.path=${nameNode}/user/${user.name}/${examplesRoot}/apps/pig \"\"\" parameters =", "+ '/lib' external_deployment_dir = external_deployment_dir + '/lib' list_dir_workspace = cluster.fs.listdir(deployment_dir)", "submission._update_properties('curacao:8032', '/deployment_dir') assert_equal({ 'jobTracker': 'curacao:8032', 'nameNode': 'hdfs://curacao:8020' }, submission.properties) def", "'examplesRoot': 'examples', 'output': '', 'nameNode': 'hdfs://localhost:8020', 'queueName': 'default' }, parameters)", "</kill> <end name=\"end\"/> </workflow-app> \"\"\" properties = \"\"\" # #", "fs=MockFs()) assert_equal({}, submission.properties) submission._update_properties('curacao:8032', '/deployment_dir') assert_equal({ 'jobTracker': 'curacao:8032', 'nameNode': 'hdfs://curacao:8020'", "User from nose.plugins.attrib import attr from nose.tools import assert_equal, assert_true,", "finish: reset() def test_get_external_parameters(self): xml = \"\"\" <workflow-app name=\"Pig\" xmlns=\"uri:oozie:workflow:0.4\">", "See the NOTICE file # distributed with this work for", "cluster.fs.exists(prefix): cluster.fs.rmtree(prefix) # Jars in various locations deployment_dir = '%s/workspace'", "'%s/udf1.jar' % prefix jar_2 = '%s/lib/udf2.jar' % prefix jar_3 =", "assert_equal({}, submission.properties) submission._update_properties('curacao:8032', '/deployment_dir') assert_equal({ 'jobTracker': 'curacao:8032', 'nameNode': 'hdfs://curacao:8020' },", "final_properties.update({ 'jobTracker': 'jtaddress', 'nameNode': fs.fs_defaultfs }) submission = Submission(None, properties=properties,", "'jtaddress', 'nameNode': fs.fs_defaultfs }) submission = Submission(None, properties=properties, oozie_id='test', fs=fs,", "to the Hue # nameNode=hdfs://localhost:8020 jobTracker=localhost:8021 queueName=default examplesRoot=examples oozie.use.system.libpath=true oozie.wf.application.path=${nameNode}/user/${user.name}/${examplesRoot}/apps/pig", "submission._copy_files(external_deployment_dir, \"<xml>My XML</xml>\") # All sources still there assert_true(cluster.fs.exists(jar_1)) assert_true(cluster.fs.exists(jar_2))", "= cluster.get_next_ha_mrcluster()[1] final_properties = properties.copy() final_properties.update({ 'jobTracker': 'jobtracker', 'nameNode': 'namenode'", "% prefix jar_3 = '%s/udf3.jar' % deployment_dir jar_4 = '%s/lib/udf4.jar'", "if logical_name else '' class TestSubmission(OozieMockBase): def test_get_properties(self): submission =", "</workflow-app> \"\"\" properties = \"\"\" # # Licensed to the", "if cluster.fs.exists(prefix): cluster.fs.rmtree(prefix) # Jars in various locations deployment_dir =", "Apache License, Version 2.0 (the # \"License\"); you may not", "<job-tracker>${jobTracker}</job-tracker> <name-node>${nameNode}</name-node> <prepare> <delete path=\"${output}\"/> </prepare> <script>aggregate.pig</script> <argument>-param</argument> <argument>INPUT=${input}</argument> <argument>-param</argument>", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "\"\"\" parameters = Submission(self.user)._get_external_parameters(xml, properties) assert_equal({'oozie.use.system.libpath': 'true', 'input': '', 'jobTracker':", "ownership. Cloudera, Inc. licenses this file # to you under", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "= logging.getLogger(__name__) @attr('requires_hadoop') def test_copy_files(): cluster = pseudo_hdfs4.shared_cluster() try: c", "error message[${wf:errorMessage(wf:lastErrorNode())}]</message> </kill> <end name=\"end\"/> </workflow-app> \"\"\" properties = \"\"\"", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "list_dir_workspace) assert_true(cluster.fs.exists(deployment_dir + '/udf4.jar'), list_dir_workspace) assert_true(cluster.fs.exists(external_deployment_dir + '/udf1.jar'), list_dir_deployement) assert_true(cluster.fs.exists(external_deployment_dir", "name=\"kill\"> <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message> </kill> <end name=\"end\"/> </workflow-app> \"\"\"", "properties.copy() final_properties.update({ 'jobTracker': 'jobtracker', 'nameNode': 'namenode' }) submission = Submission(None,", "file except in compliance # with the License. You may", "to in writing, software # distributed under the License is", "submission = Submission(user, job=MockJob(), fs=cluster.fs, jt=cluster.jt) submission._copy_files(deployment_dir, \"<xml>My XML</xml>\") submission._copy_files(external_deployment_dir,", "the Hue # nameNode=hdfs://localhost:8020 jobTracker=localhost:8021 queueName=default examplesRoot=examples oozie.use.system.libpath=true oozie.wf.application.path=${nameNode}/user/${user.name}/${examplesRoot}/apps/pig \"\"\"", "# See the License for the specific language governing permissions", "Submission(None, properties=properties, oozie_id='test', fs=fs, jt=jt) assert_equal(properties, submission.properties) submission._update_properties('jtaddress', 'deployment-directory') assert_equal(final_properties,", "or agreed to in writing, software # distributed under the", "you may not use this file except in compliance #", "desktop.lib.test_utils import clear_sys_caches from desktop.lib.django_test_util import make_logged_in_client LOG = logging.getLogger(__name__)", "locations deployment_dir = '%s/workspace' % prefix external_deployment_dir = '%s/deployment' %", "required by applicable law or agreed to in writing, software", "'/udf4.jar') submission._copy_files('%s/workspace' % prefix, \"<xml>My XML</xml>\") assert_not_equal(stats_udf1['fileId'], cluster.fs.stats(deployment_dir + '/udf1.jar')['fileId'])", "name=\"end\"/> </workflow-app> \"\"\" properties = \"\"\" # # Licensed to", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "deployment_dir = '%s/workspace' % prefix external_deployment_dir = '%s/deployment' % prefix", "MockJob(): def __init__(self): self.node_list = [ MockNode(jar_1), MockNode(jar_2), MockNode(jar_3), MockNode(jar_4),", "use this file except in compliance # with the License.", "'/udf4.jar')['fileId']) finally: try: cluster.fs.rmtree(prefix) except: LOG.exception('failed to remove %s' %", "= '/tmp/test_copy_files' if cluster.fs.exists(prefix): cluster.fs.rmtree(prefix) # Jars in various locations", "Submission(self.user, fs=MockFs()) assert_equal({}, submission.properties) submission._update_properties('curacao:8032', '/deployment_dir') assert_equal({ 'jobTracker': 'curacao:8032', 'nameNode':", "jobTracker=localhost:8021 queueName=default examplesRoot=examples oozie.use.system.libpath=true oozie.wf.application.path=${nameNode}/user/${user.name}/${examplesRoot}/apps/pig \"\"\" parameters = Submission(self.user)._get_external_parameters(xml, properties)", "contributor license agreements. See the NOTICE file # distributed with", "= logical_name if logical_name else '' class TestSubmission(OozieMockBase): def test_get_properties(self):", "nose.tools import assert_equal, assert_true, assert_not_equal from hadoop import cluster, pseudo_hdfs4", "Submission from oozie.tests import OozieMockBase from desktop.lib.test_utils import clear_sys_caches from", "[] finish.append(MR_CLUSTERS.set_for_testing({'default': {}})) finish.append(MR_CLUSTERS['default'].SUBMIT_TO.set_for_testing(True)) finish.append(YARN_CLUSTERS.set_for_testing({'default': {}})) finish.append(YARN_CLUSTERS['default'].SUBMIT_TO.set_for_testing(True)) try: properties =", "in finish: reset() def test_get_external_parameters(self): xml = \"\"\" <workflow-app name=\"Pig\"", "prefix jar_3 = '%s/udf3.jar' % deployment_dir jar_4 = '%s/lib/udf4.jar' %", "agreed to in writing, software # distributed under the License", "the License. import logging from django.contrib.auth.models import User from nose.plugins.attrib", "from hadoop import cluster, pseudo_hdfs4 from hadoop.conf import HDFS_CLUSTERS, MR_CLUSTERS,", "distributed under the License is distributed on an \"AS IS\"", "'/lib' list_dir_workspace = cluster.fs.listdir(deployment_dir) list_dir_deployement = cluster.fs.listdir(external_deployment_dir) # All destinations", "'hdfs://curacao:8020' }, submission.properties) def test_get_logical_properties(self): submission = Submission(self.user, fs=MockFs(logical_name='fsname'), jt=MockJt(logical_name='jtname'))", "import attr from nose.tools import assert_equal, assert_true, assert_not_equal from hadoop", "with this work for additional information # regarding copyright ownership.", "parameters = Submission(self.user)._get_external_parameters(xml, properties) assert_equal({'oozie.use.system.libpath': 'true', 'input': '', 'jobTracker': 'localhost:8021',", "<reponame>vinaymundada27/Hue #!/usr/bin/env python # Licensed to Cloudera, Inc. under one", "'workflow.xml' submission = Submission(user, job=MockJob(), fs=cluster.fs, jt=cluster.jt) submission._copy_files(deployment_dir, \"<xml>My XML</xml>\")", "<argument>-param</argument> <argument>INPUT=${input}</argument> <argument>-param</argument> <argument>OUTPUT=${output}</argument> <configuration> <property> <name>mapred.input.format.class</name> <value>org.apache.hadoop.examples.SleepJob$SleepInputFormat</value> </property> </configuration>", "MockNode(): def __init__(self, jar_path): self.jar_path = jar_path class MockJob(): def", "list_dir_deployement) assert_true(cluster.fs.exists(external_deployment_dir + '/udf3.jar'), list_dir_deployement) assert_true(cluster.fs.exists(external_deployment_dir + '/udf4.jar'), list_dir_deployement) stats_udf1", "deployment_dir jar_4 = '%s/lib/udf4.jar' % deployment_dir # Never move cluster.fs.mkdir(prefix)", "express or implied. # See the License for the specific", "nose.plugins.attrib import attr from nose.tools import assert_equal, assert_true, assert_not_equal from", "+ '/udf1.jar') stats_udf2 = cluster.fs.stats(deployment_dir + '/udf2.jar') stats_udf3 = cluster.fs.stats(deployment_dir", "Submission(None, properties=properties, oozie_id='test', fs=MockFs()) assert_equal(properties, submission.properties) submission._update_properties('jtaddress', 'deployment-directory') assert_equal(final_properties, submission.properties)", "test_get_logical_properties(self): submission = Submission(self.user, fs=MockFs(logical_name='fsname'), jt=MockJt(logical_name='jtname')) assert_equal({}, submission.properties) submission._update_properties('curacao:8032', '/deployment_dir')", "+ '/udf3.jar')['fileId']) assert_equal(stats_udf4['fileId'], cluster.fs.stats(deployment_dir + '/udf4.jar')['fileId']) finally: try: cluster.fs.rmtree(prefix) except:", "liboozie.submittion import Submission from oozie.tests import OozieMockBase from desktop.lib.test_utils import", "writing, software # distributed under the License is distributed on", "stats_udf4 = cluster.fs.stats(deployment_dir + '/udf4.jar') submission._copy_files('%s/workspace' % prefix, \"<xml>My XML</xml>\")", "finish = [] finish.append(MR_CLUSTERS.set_for_testing({'default': {}})) finish.append(MR_CLUSTERS['default'].SUBMIT_TO.set_for_testing(True)) finish.append(YARN_CLUSTERS.set_for_testing({'default': {}})) finish.append(YARN_CLUSTERS['default'].SUBMIT_TO.set_for_testing(True)) try:", "Licensed to the Hue # nameNode=hdfs://localhost:8020 jobTracker=localhost:8021 queueName=default examplesRoot=examples oozie.use.system.libpath=true", "list_dir_deployement) assert_true(cluster.fs.exists(external_deployment_dir + '/udf2.jar'), list_dir_deployement) assert_true(cluster.fs.exists(external_deployment_dir + '/udf3.jar'), list_dir_deployement) assert_true(cluster.fs.exists(external_deployment_dir", "the License. You may obtain a copy of the License", "# # Licensed to the Hue # nameNode=hdfs://localhost:8020 jobTracker=localhost:8021 queueName=default", "oozie_id='test', fs=fs, jt=jt) assert_equal(properties, submission.properties) submission._update_properties('jtaddress', 'deployment-directory') assert_equal(final_properties, submission.properties) finally:", "+ '/udf2.jar') stats_udf3 = cluster.fs.stats(deployment_dir + '/udf3.jar') stats_udf4 = cluster.fs.stats(deployment_dir", "final_properties = properties.copy() final_properties.update({ 'jobTracker': 'jtaddress', 'nameNode': fs.fs_defaultfs }) submission", "Jars in various locations deployment_dir = '%s/workspace' % prefix external_deployment_dir", "License. import logging from django.contrib.auth.models import User from nose.plugins.attrib import", "under the Apache License, Version 2.0 (the # \"License\"); you", "assert_true(cluster.fs.exists(deployment_dir + '/udf3.jar'), list_dir_workspace) assert_true(cluster.fs.exists(deployment_dir + '/udf4.jar'), list_dir_workspace) assert_true(cluster.fs.exists(external_deployment_dir +", "CONDITIONS OF ANY KIND, either express or implied. # See", "oozie_id='test', fs=MockFs()) assert_equal(properties, submission.properties) submission._update_properties('jtaddress', 'deployment-directory') assert_equal(final_properties, submission.properties) clear_sys_caches() fs", "+ '/udf3.jar'), list_dir_workspace) assert_true(cluster.fs.exists(deployment_dir + '/udf4.jar'), list_dir_workspace) assert_true(cluster.fs.exists(external_deployment_dir + '/udf1.jar'),", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "OozieMockBase from desktop.lib.test_utils import clear_sys_caches from desktop.lib.django_test_util import make_logged_in_client LOG", "cluster.get_next_ha_mrcluster()[1] final_properties = properties.copy() final_properties.update({ 'jobTracker': 'jobtracker', 'nameNode': 'namenode' })", "XML</xml>\") submission._copy_files(external_deployment_dir, \"<xml>My XML</xml>\") # All sources still there assert_true(cluster.fs.exists(jar_1))", "[ MockNode(jar_1), MockNode(jar_2), MockNode(jar_3), MockNode(jar_4), ] def get_application_filename(self): return 'workflow.xml'", "= Submission(None, properties=properties, oozie_id='test', fs=fs, jt=jt) assert_equal(properties, submission.properties) submission._update_properties('jtaddress', 'deployment-directory')", "= external_deployment_dir + '/lib' list_dir_workspace = cluster.fs.listdir(deployment_dir) list_dir_deployement = cluster.fs.listdir(external_deployment_dir)", "'/udf4.jar'), list_dir_workspace) assert_true(cluster.fs.exists(external_deployment_dir + '/udf1.jar'), list_dir_deployement) assert_true(cluster.fs.exists(external_deployment_dir + '/udf2.jar'), list_dir_deployement)", "MockJt(): def __init__(self, logical_name=None): self.logical_name = logical_name if logical_name else", "<value>org.apache.hadoop.examples.SleepJob$SleepInputFormat</value> </property> </configuration> </pig> <ok to=\"end\"/> <error to=\"kill\"/> </action> <kill", "fs=cluster.fs, jt=cluster.jt) submission._copy_files(deployment_dir, \"<xml>My XML</xml>\") submission._copy_files(external_deployment_dir, \"<xml>My XML</xml>\") # All", "or more contributor license agreements. See the NOTICE file #", "= logical_name if logical_name else '' class MockJt(): def __init__(self,", "jt=jt) assert_equal(properties, submission.properties) submission._update_properties('jtaddress', 'deployment-directory') assert_equal(final_properties, submission.properties) finish.append(HDFS_CLUSTERS['default'].LOGICAL_NAME.set_for_testing('namenode')) finish.append(MR_CLUSTERS['default'].LOGICAL_NAME.set_for_testing('jobtracker')) clear_sys_caches()", "logging from django.contrib.auth.models import User from nose.plugins.attrib import attr from", "logical_name if logical_name else '' class TestSubmission(OozieMockBase): def test_get_properties(self): submission", "Submission(user, job=MockJob(), fs=cluster.fs, jt=cluster.jt) submission._copy_files(deployment_dir, \"<xml>My XML</xml>\") submission._copy_files(external_deployment_dir, \"<xml>My XML</xml>\")", "name=\"Pig\"> <pig> <job-tracker>${jobTracker}</job-tracker> <name-node>${nameNode}</name-node> <prepare> <delete path=\"${output}\"/> </prepare> <script>aggregate.pig</script> <argument>-param</argument>", "+ '/udf4.jar') submission._copy_files('%s/workspace' % prefix, \"<xml>My XML</xml>\") assert_not_equal(stats_udf1['fileId'], cluster.fs.stats(deployment_dir +", "the NOTICE file # distributed with this work for additional", "'/udf2.jar') stats_udf3 = cluster.fs.stats(deployment_dir + '/udf3.jar') stats_udf4 = cluster.fs.stats(deployment_dir +", "'jtaddress' } final_properties = properties.copy() submission = Submission(None, properties=properties, oozie_id='test',", "'namenode' }) submission = Submission(None, properties=properties, oozie_id='test', fs=fs, jt=jt) assert_equal(properties,", "XML</xml>\") # All sources still there assert_true(cluster.fs.exists(jar_1)) assert_true(cluster.fs.exists(jar_2)) assert_true(cluster.fs.exists(jar_3)) assert_true(cluster.fs.exists(jar_4))", "<property> <name>mapred.input.format.class</name> <value>org.apache.hadoop.examples.SleepJob$SleepInputFormat</value> </property> </configuration> </pig> <ok to=\"end\"/> <error to=\"kill\"/>", "'deployment-directory') assert_equal(final_properties, submission.properties) finish.append(HDFS_CLUSTERS['default'].LOGICAL_NAME.set_for_testing('namenode')) finish.append(MR_CLUSTERS['default'].LOGICAL_NAME.set_for_testing('jobtracker')) clear_sys_caches() fs = cluster.get_hdfs() jt", "'nameNode': 'hdfs://curacao:8020', 'jobTracker': 'jtaddress' } final_properties = properties.copy() submission =", "make_logged_in_client() user = User.objects.get(username='test') prefix = '/tmp/test_copy_files' if cluster.fs.exists(prefix): cluster.fs.rmtree(prefix)", "to=\"kill\"/> </action> <kill name=\"kill\"> <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message> </kill> <end", "YARN_CLUSTERS from liboozie.submittion import Submission from oozie.tests import OozieMockBase from", "properties.copy() submission = Submission(None, properties=properties, oozie_id='test', fs=MockFs()) assert_equal(properties, submission.properties) submission._update_properties('jtaddress',", "logical_name if logical_name else '' class MockJt(): def __init__(self, logical_name=None):", "OR CONDITIONS OF ANY KIND, either express or implied. #", "assert_equal(properties, submission.properties) submission._update_properties('jtaddress', 'deployment-directory') assert_equal(final_properties, submission.properties) finally: clear_sys_caches() for reset", "<error to=\"kill\"/> </action> <kill name=\"kill\"> <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message> </kill>", "the License is distributed on an \"AS IS\" BASIS, #", "jt=MockJt(logical_name='jtname')) assert_equal({}, submission.properties) submission._update_properties('curacao:8032', '/deployment_dir') assert_equal({ 'jobTracker': 'jtname', 'nameNode': 'fsname'", "from oozie.tests import OozieMockBase from desktop.lib.test_utils import clear_sys_caches from desktop.lib.django_test_util", "+ '/udf1.jar'), list_dir_deployement) assert_true(cluster.fs.exists(external_deployment_dir + '/udf2.jar'), list_dir_deployement) assert_true(cluster.fs.exists(external_deployment_dir + '/udf3.jar'),", "TestSubmission(OozieMockBase): def test_get_properties(self): submission = Submission(self.user, fs=MockFs()) assert_equal({}, submission.properties) submission._update_properties('curacao:8032',", "import assert_equal, assert_true, assert_not_equal from hadoop import cluster, pseudo_hdfs4 from", "assert_equal(properties, submission.properties) submission._update_properties('jtaddress', 'deployment-directory') assert_equal(final_properties, submission.properties) clear_sys_caches() fs = cluster.get_hdfs()", "cluster.fs.stats(deployment_dir + '/udf1.jar') stats_udf2 = cluster.fs.stats(deployment_dir + '/udf2.jar') stats_udf3 =", "= cluster.fs.listdir(deployment_dir) list_dir_deployement = cluster.fs.listdir(external_deployment_dir) # All destinations there assert_true(cluster.fs.exists(deployment_dir", "governing permissions and # limitations under the License. import logging", "+ '/udf4.jar'), list_dir_workspace) assert_true(cluster.fs.exists(external_deployment_dir + '/udf1.jar'), list_dir_deployement) assert_true(cluster.fs.exists(external_deployment_dir + '/udf2.jar'),", "+ '/udf2.jar')['fileId']) assert_not_equal(stats_udf3['fileId'], cluster.fs.stats(deployment_dir + '/udf3.jar')['fileId']) assert_equal(stats_udf4['fileId'], cluster.fs.stats(deployment_dir + '/udf4.jar')['fileId'])", "jar_2 = '%s/lib/udf2.jar' % prefix jar_3 = '%s/udf3.jar' % deployment_dir", "'%s/deployment' % prefix jar_1 = '%s/udf1.jar' % prefix jar_2 =", "Never move cluster.fs.mkdir(prefix) cluster.fs.create(jar_1) cluster.fs.create(jar_2) cluster.fs.create(jar_3) cluster.fs.create(jar_4) class MockNode(): def", "def test_update_properties(self): finish = [] finish.append(MR_CLUSTERS.set_for_testing({'default': {}})) finish.append(MR_CLUSTERS['default'].SUBMIT_TO.set_for_testing(True)) finish.append(YARN_CLUSTERS.set_for_testing({'default': {}}))", "assert_true(cluster.fs.exists(deployment_dir + '/udf4.jar'), list_dir_workspace) assert_true(cluster.fs.exists(external_deployment_dir + '/udf1.jar'), list_dir_deployement) assert_true(cluster.fs.exists(external_deployment_dir +", "MockFs(): def __init__(self, logical_name=None): self.fs_defaultfs = 'hdfs://curacao:8020' self.logical_name = logical_name", "jar_4 = '%s/lib/udf4.jar' % deployment_dir # Never move cluster.fs.mkdir(prefix) cluster.fs.create(jar_1)", "logical_name=None): self.fs_defaultfs = 'hdfs://curacao:8020' self.logical_name = logical_name if logical_name else", "= cluster.get_hdfs() jt = cluster.get_next_ha_mrcluster()[1] final_properties = properties.copy() final_properties.update({ 'jobTracker':", "cluster.get_next_ha_mrcluster()[1] final_properties = properties.copy() final_properties.update({ 'jobTracker': 'jtaddress', 'nameNode': fs.fs_defaultfs })", "law or agreed to in writing, software # distributed under", "test_update_properties(self): finish = [] finish.append(MR_CLUSTERS.set_for_testing({'default': {}})) finish.append(MR_CLUSTERS['default'].SUBMIT_TO.set_for_testing(True)) finish.append(YARN_CLUSTERS.set_for_testing({'default': {}})) finish.append(YARN_CLUSTERS['default'].SUBMIT_TO.set_for_testing(True))", "properties = \"\"\" # # Licensed to the Hue #", "pseudo_hdfs4.shared_cluster() try: c = make_logged_in_client() user = User.objects.get(username='test') prefix =", "cluster = pseudo_hdfs4.shared_cluster() try: c = make_logged_in_client() user = User.objects.get(username='test')", "remove %s' % prefix) class MockFs(): def __init__(self, logical_name=None): self.fs_defaultfs", "cluster.get_hdfs() jt = cluster.get_next_ha_mrcluster()[1] final_properties = properties.copy() final_properties.update({ 'jobTracker': 'jtaddress',", "logging.getLogger(__name__) @attr('requires_hadoop') def test_copy_files(): cluster = pseudo_hdfs4.shared_cluster() try: c =", "assert_not_equal from hadoop import cluster, pseudo_hdfs4 from hadoop.conf import HDFS_CLUSTERS,", "'nameNode': 'fsname' }, submission.properties) def test_update_properties(self): finish = [] finish.append(MR_CLUSTERS.set_for_testing({'default':", "= [ MockNode(jar_1), MockNode(jar_2), MockNode(jar_3), MockNode(jar_4), ] def get_application_filename(self): return", "# regarding copyright ownership. Cloudera, Inc. licenses this file #", "def test_get_logical_properties(self): submission = Submission(self.user, fs=MockFs(logical_name='fsname'), jt=MockJt(logical_name='jtname')) assert_equal({}, submission.properties) submission._update_properties('curacao:8032',", "<argument>OUTPUT=${output}</argument> <configuration> <property> <name>mapred.input.format.class</name> <value>org.apache.hadoop.examples.SleepJob$SleepInputFormat</value> </property> </configuration> </pig> <ok to=\"end\"/>", "cluster.fs.create(jar_3) cluster.fs.create(jar_4) class MockNode(): def __init__(self, jar_path): self.jar_path = jar_path", "= { 'user.name': 'hue', 'test.1': 'http://localhost/test?test1=test&test2=test', 'nameNode': 'hdfs://curacao:8020', 'jobTracker': 'jtaddress'", "</configuration> </pig> <ok to=\"end\"/> <error to=\"kill\"/> </action> <kill name=\"kill\"> <message>Action", "finish.append(YARN_CLUSTERS['default'].SUBMIT_TO.set_for_testing(True)) try: properties = { 'user.name': 'hue', 'test.1': 'http://localhost/test?test1=test&test2=test', 'nameNode':", "cluster.fs.create(jar_1) cluster.fs.create(jar_2) cluster.fs.create(jar_3) cluster.fs.create(jar_4) class MockNode(): def __init__(self, jar_path): self.jar_path", "assert_true(cluster.fs.exists(jar_3)) assert_true(cluster.fs.exists(jar_4)) deployment_dir = deployment_dir + '/lib' external_deployment_dir = external_deployment_dir", "may obtain a copy of the License at # #", "cluster.fs.stats(deployment_dir + '/udf4.jar')['fileId']) finally: try: cluster.fs.rmtree(prefix) except: LOG.exception('failed to remove", "cluster.fs.stats(deployment_dir + '/udf2.jar')['fileId']) assert_not_equal(stats_udf3['fileId'], cluster.fs.stats(deployment_dir + '/udf3.jar')['fileId']) assert_equal(stats_udf4['fileId'], cluster.fs.stats(deployment_dir +", "'jtname', 'nameNode': 'fsname' }, submission.properties) def test_update_properties(self): finish = []", "= properties.copy() final_properties.update({ 'jobTracker': 'jtaddress', 'nameNode': fs.fs_defaultfs }) submission =", "submission.properties) finish.append(HDFS_CLUSTERS['default'].LOGICAL_NAME.set_for_testing('namenode')) finish.append(MR_CLUSTERS['default'].LOGICAL_NAME.set_for_testing('jobtracker')) clear_sys_caches() fs = cluster.get_hdfs() jt = cluster.get_next_ha_mrcluster()[1]", "# limitations under the License. import logging from django.contrib.auth.models import", "deployment_dir # Never move cluster.fs.mkdir(prefix) cluster.fs.create(jar_1) cluster.fs.create(jar_2) cluster.fs.create(jar_3) cluster.fs.create(jar_4) class", "name=\"Pig\" xmlns=\"uri:oozie:workflow:0.4\"> <start to=\"Pig\"/> <action name=\"Pig\"> <pig> <job-tracker>${jobTracker}</job-tracker> <name-node>${nameNode}</name-node> <prepare>", "final_properties = properties.copy() final_properties.update({ 'jobTracker': 'jobtracker', 'nameNode': 'namenode' }) submission", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "'%s/workspace' % prefix external_deployment_dir = '%s/deployment' % prefix jar_1 =", "'%s/lib/udf2.jar' % prefix jar_3 = '%s/udf3.jar' % deployment_dir jar_4 =", "fs=fs, jt=jt) assert_equal(properties, submission.properties) submission._update_properties('jtaddress', 'deployment-directory') assert_equal(final_properties, submission.properties) finish.append(HDFS_CLUSTERS['default'].LOGICAL_NAME.set_for_testing('namenode')) finish.append(MR_CLUSTERS['default'].LOGICAL_NAME.set_for_testing('jobtracker'))", "<argument>INPUT=${input}</argument> <argument>-param</argument> <argument>OUTPUT=${output}</argument> <configuration> <property> <name>mapred.input.format.class</name> <value>org.apache.hadoop.examples.SleepJob$SleepInputFormat</value> </property> </configuration> </pig>", "XML</xml>\") assert_not_equal(stats_udf1['fileId'], cluster.fs.stats(deployment_dir + '/udf1.jar')['fileId']) assert_not_equal(stats_udf2['fileId'], cluster.fs.stats(deployment_dir + '/udf2.jar')['fileId']) assert_not_equal(stats_udf3['fileId'],", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "various locations deployment_dir = '%s/workspace' % prefix external_deployment_dir = '%s/deployment'", "<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message> </kill> <end name=\"end\"/> </workflow-app> \"\"\" properties", "Submission(self.user)._get_external_parameters(xml, properties) assert_equal({'oozie.use.system.libpath': 'true', 'input': '', 'jobTracker': 'localhost:8021', 'oozie.wf.application.path': '${nameNode}/user/${user.name}/${examplesRoot}/apps/pig',", "properties = { 'user.name': 'hue', 'test.1': 'http://localhost/test?test1=test&test2=test', 'nameNode': 'hdfs://curacao:8020', 'jobTracker':", "user = User.objects.get(username='test') prefix = '/tmp/test_copy_files' if cluster.fs.exists(prefix): cluster.fs.rmtree(prefix) #", "jar_path class MockJob(): def __init__(self): self.node_list = [ MockNode(jar_1), MockNode(jar_2),", "import HDFS_CLUSTERS, MR_CLUSTERS, YARN_CLUSTERS from liboozie.submittion import Submission from oozie.tests", "for additional information # regarding copyright ownership. Cloudera, Inc. licenses", "clear_sys_caches from desktop.lib.django_test_util import make_logged_in_client LOG = logging.getLogger(__name__) @attr('requires_hadoop') def", "queueName=default examplesRoot=examples oozie.use.system.libpath=true oozie.wf.application.path=${nameNode}/user/${user.name}/${examplesRoot}/apps/pig \"\"\" parameters = Submission(self.user)._get_external_parameters(xml, properties) assert_equal({'oozie.use.system.libpath':", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "'deployment-directory') assert_equal(final_properties, submission.properties) clear_sys_caches() fs = cluster.get_hdfs() jt = cluster.get_next_ha_mrcluster()[1]", "django.contrib.auth.models import User from nose.plugins.attrib import attr from nose.tools import", "= '%s/udf1.jar' % prefix jar_2 = '%s/lib/udf2.jar' % prefix jar_3", "'hdfs://curacao:8020' self.logical_name = logical_name if logical_name else '' class MockJt():", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "assert_equal(final_properties, submission.properties) finally: clear_sys_caches() for reset in finish: reset() def", "Inc. under one # or more contributor license agreements. See", "properties.copy() final_properties.update({ 'jobTracker': 'jtaddress', 'nameNode': fs.fs_defaultfs }) submission = Submission(None,", "from desktop.lib.django_test_util import make_logged_in_client LOG = logging.getLogger(__name__) @attr('requires_hadoop') def test_copy_files():", "submission.properties) def test_update_properties(self): finish = [] finish.append(MR_CLUSTERS.set_for_testing({'default': {}})) finish.append(MR_CLUSTERS['default'].SUBMIT_TO.set_for_testing(True)) finish.append(YARN_CLUSTERS.set_for_testing({'default':", "for reset in finish: reset() def test_get_external_parameters(self): xml = \"\"\"", "from django.contrib.auth.models import User from nose.plugins.attrib import attr from nose.tools", "try: cluster.fs.rmtree(prefix) except: LOG.exception('failed to remove %s' % prefix) class", "in compliance # with the License. You may obtain a", "# to you under the Apache License, Version 2.0 (the", "assert_true(cluster.fs.exists(external_deployment_dir + '/udf3.jar'), list_dir_deployement) assert_true(cluster.fs.exists(external_deployment_dir + '/udf4.jar'), list_dir_deployement) stats_udf1 =", "'curacao:8032', 'nameNode': 'hdfs://curacao:8020' }, submission.properties) def test_get_logical_properties(self): submission = Submission(self.user,", "= Submission(self.user, fs=MockFs(logical_name='fsname'), jt=MockJt(logical_name='jtname')) assert_equal({}, submission.properties) submission._update_properties('curacao:8032', '/deployment_dir') assert_equal({ 'jobTracker':", "<kill name=\"kill\"> <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message> </kill> <end name=\"end\"/> </workflow-app>", "import make_logged_in_client LOG = logging.getLogger(__name__) @attr('requires_hadoop') def test_copy_files(): cluster =", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "list_dir_deployement) stats_udf1 = cluster.fs.stats(deployment_dir + '/udf1.jar') stats_udf2 = cluster.fs.stats(deployment_dir +", "'jobTracker': 'curacao:8032', 'nameNode': 'hdfs://curacao:8020' }, submission.properties) def test_get_logical_properties(self): submission =", "assert_not_equal(stats_udf3['fileId'], cluster.fs.stats(deployment_dir + '/udf3.jar')['fileId']) assert_equal(stats_udf4['fileId'], cluster.fs.stats(deployment_dir + '/udf4.jar')['fileId']) finally: try:", "'/udf3.jar'), list_dir_workspace) assert_true(cluster.fs.exists(deployment_dir + '/udf4.jar'), list_dir_workspace) assert_true(cluster.fs.exists(external_deployment_dir + '/udf1.jar'), list_dir_deployement)", "submission._update_properties('jtaddress', 'deployment-directory') assert_equal(final_properties, submission.properties) finally: clear_sys_caches() for reset in finish:", "assert_true(cluster.fs.exists(deployment_dir + '/udf2.jar'), list_dir_workspace) assert_true(cluster.fs.exists(deployment_dir + '/udf3.jar'), list_dir_workspace) assert_true(cluster.fs.exists(deployment_dir +", "__init__(self, logical_name=None): self.fs_defaultfs = 'hdfs://curacao:8020' self.logical_name = logical_name if logical_name", "submission.properties) submission._update_properties('jtaddress', 'deployment-directory') assert_equal(final_properties, submission.properties) finish.append(HDFS_CLUSTERS['default'].LOGICAL_NAME.set_for_testing('namenode')) finish.append(MR_CLUSTERS['default'].LOGICAL_NAME.set_for_testing('jobtracker')) clear_sys_caches() fs =", "this file # to you under the Apache License, Version", "get_application_filename(self): return 'workflow.xml' submission = Submission(user, job=MockJob(), fs=cluster.fs, jt=cluster.jt) submission._copy_files(deployment_dir,", "prefix = '/tmp/test_copy_files' if cluster.fs.exists(prefix): cluster.fs.rmtree(prefix) # Jars in various", "return 'workflow.xml' submission = Submission(user, job=MockJob(), fs=cluster.fs, jt=cluster.jt) submission._copy_files(deployment_dir, \"<xml>My", "= '%s/lib/udf4.jar' % deployment_dir # Never move cluster.fs.mkdir(prefix) cluster.fs.create(jar_1) cluster.fs.create(jar_2)", "= \"\"\" # # Licensed to the Hue # nameNode=hdfs://localhost:8020", "language governing permissions and # limitations under the License. import", "= properties.copy() submission = Submission(None, properties=properties, oozie_id='test', fs=MockFs()) assert_equal(properties, submission.properties)", "nameNode=hdfs://localhost:8020 jobTracker=localhost:8021 queueName=default examplesRoot=examples oozie.use.system.libpath=true oozie.wf.application.path=${nameNode}/user/${user.name}/${examplesRoot}/apps/pig \"\"\" parameters = Submission(self.user)._get_external_parameters(xml,", "'jobTracker': 'jtaddress', 'nameNode': fs.fs_defaultfs }) submission = Submission(None, properties=properties, oozie_id='test',", "'/udf3.jar') stats_udf4 = cluster.fs.stats(deployment_dir + '/udf4.jar') submission._copy_files('%s/workspace' % prefix, \"<xml>My", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "assert_true(cluster.fs.exists(jar_4)) deployment_dir = deployment_dir + '/lib' external_deployment_dir = external_deployment_dir +", "finish.append(YARN_CLUSTERS.set_for_testing({'default': {}})) finish.append(YARN_CLUSTERS['default'].SUBMIT_TO.set_for_testing(True)) try: properties = { 'user.name': 'hue', 'test.1':", "\"\"\" <workflow-app name=\"Pig\" xmlns=\"uri:oozie:workflow:0.4\"> <start to=\"Pig\"/> <action name=\"Pig\"> <pig> <job-tracker>${jobTracker}</job-tracker>", "submission.properties) submission._update_properties('jtaddress', 'deployment-directory') assert_equal(final_properties, submission.properties) clear_sys_caches() fs = cluster.get_hdfs() jt", "def __init__(self, logical_name=None): self.fs_defaultfs = 'hdfs://curacao:8020' self.logical_name = logical_name if", "or implied. # See the License for the specific language", "finish.append(MR_CLUSTERS.set_for_testing({'default': {}})) finish.append(MR_CLUSTERS['default'].SUBMIT_TO.set_for_testing(True)) finish.append(YARN_CLUSTERS.set_for_testing({'default': {}})) finish.append(YARN_CLUSTERS['default'].SUBMIT_TO.set_for_testing(True)) try: properties = {", "self.logical_name = logical_name if logical_name else '' class MockJt(): def", "information # regarding copyright ownership. Cloudera, Inc. licenses this file", "attr from nose.tools import assert_equal, assert_true, assert_not_equal from hadoop import", "try: c = make_logged_in_client() user = User.objects.get(username='test') prefix = '/tmp/test_copy_files'", "\"<xml>My XML</xml>\") assert_not_equal(stats_udf1['fileId'], cluster.fs.stats(deployment_dir + '/udf1.jar')['fileId']) assert_not_equal(stats_udf2['fileId'], cluster.fs.stats(deployment_dir + '/udf2.jar')['fileId'])", "'nameNode': 'hdfs://curacao:8020' }, submission.properties) def test_get_logical_properties(self): submission = Submission(self.user, fs=MockFs(logical_name='fsname'),", "# \"License\"); you may not use this file except in", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "% prefix) class MockFs(): def __init__(self, logical_name=None): self.fs_defaultfs = 'hdfs://curacao:8020'", "}, submission.properties) def test_update_properties(self): finish = [] finish.append(MR_CLUSTERS.set_for_testing({'default': {}})) finish.append(MR_CLUSTERS['default'].SUBMIT_TO.set_for_testing(True))", "{}})) finish.append(YARN_CLUSTERS['default'].SUBMIT_TO.set_for_testing(True)) try: properties = { 'user.name': 'hue', 'test.1': 'http://localhost/test?test1=test&test2=test',", "final_properties.update({ 'jobTracker': 'jobtracker', 'nameNode': 'namenode' }) submission = Submission(None, properties=properties,", "class MockNode(): def __init__(self, jar_path): self.jar_path = jar_path class MockJob():", "\"License\"); you may not use this file except in compliance", "python # Licensed to Cloudera, Inc. under one # or", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "# distributed with this work for additional information # regarding", "= '%s/workspace' % prefix external_deployment_dir = '%s/deployment' % prefix jar_1", "</action> <kill name=\"kill\"> <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message> </kill> <end name=\"end\"/>", "% prefix jar_2 = '%s/lib/udf2.jar' % prefix jar_3 = '%s/udf3.jar'", "external_deployment_dir = external_deployment_dir + '/lib' list_dir_workspace = cluster.fs.listdir(deployment_dir) list_dir_deployement =", "else '' class MockJt(): def __init__(self, logical_name=None): self.logical_name = logical_name", "c = make_logged_in_client() user = User.objects.get(username='test') prefix = '/tmp/test_copy_files' if", "external_deployment_dir = '%s/deployment' % prefix jar_1 = '%s/udf1.jar' % prefix", "'/udf4.jar'), list_dir_deployement) stats_udf1 = cluster.fs.stats(deployment_dir + '/udf1.jar') stats_udf2 = cluster.fs.stats(deployment_dir", "= cluster.fs.stats(deployment_dir + '/udf1.jar') stats_udf2 = cluster.fs.stats(deployment_dir + '/udf2.jar') stats_udf3", "prefix jar_2 = '%s/lib/udf2.jar' % prefix jar_3 = '%s/udf3.jar' %", "{ 'user.name': 'hue', 'test.1': 'http://localhost/test?test1=test&test2=test', 'nameNode': 'hdfs://curacao:8020', 'jobTracker': 'jtaddress' }", "jt=jt) assert_equal(properties, submission.properties) submission._update_properties('jtaddress', 'deployment-directory') assert_equal(final_properties, submission.properties) finally: clear_sys_caches() for", "properties=properties, oozie_id='test', fs=fs, jt=jt) assert_equal(properties, submission.properties) submission._update_properties('jtaddress', 'deployment-directory') assert_equal(final_properties, submission.properties)", "Inc. licenses this file # to you under the Apache", "stats_udf2 = cluster.fs.stats(deployment_dir + '/udf2.jar') stats_udf3 = cluster.fs.stats(deployment_dir + '/udf3.jar')", "'jobtracker', 'nameNode': 'namenode' }) submission = Submission(None, properties=properties, oozie_id='test', fs=fs,", "cluster.fs.stats(deployment_dir + '/udf4.jar') submission._copy_files('%s/workspace' % prefix, \"<xml>My XML</xml>\") assert_not_equal(stats_udf1['fileId'], cluster.fs.stats(deployment_dir", "= '%s/lib/udf2.jar' % prefix jar_3 = '%s/udf3.jar' % deployment_dir jar_4", "assert_equal(stats_udf4['fileId'], cluster.fs.stats(deployment_dir + '/udf4.jar')['fileId']) finally: try: cluster.fs.rmtree(prefix) except: LOG.exception('failed to", "def test_get_external_parameters(self): xml = \"\"\" <workflow-app name=\"Pig\" xmlns=\"uri:oozie:workflow:0.4\"> <start to=\"Pig\"/>", "'http://localhost/test?test1=test&test2=test', 'nameNode': 'hdfs://curacao:8020', 'jobTracker': 'jtaddress' } final_properties = properties.copy() submission", "finally: clear_sys_caches() for reset in finish: reset() def test_get_external_parameters(self): xml", "= Submission(user, job=MockJob(), fs=cluster.fs, jt=cluster.jt) submission._copy_files(deployment_dir, \"<xml>My XML</xml>\") submission._copy_files(external_deployment_dir, \"<xml>My", "# # Unless required by applicable law or agreed to", "Version 2.0 (the # \"License\"); you may not use this", "<script>aggregate.pig</script> <argument>-param</argument> <argument>INPUT=${input}</argument> <argument>-param</argument> <argument>OUTPUT=${output}</argument> <configuration> <property> <name>mapred.input.format.class</name> <value>org.apache.hadoop.examples.SleepJob$SleepInputFormat</value> </property>", "cluster.fs.stats(deployment_dir + '/udf2.jar') stats_udf3 = cluster.fs.stats(deployment_dir + '/udf3.jar') stats_udf4 =", "one # or more contributor license agreements. See the NOTICE", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "properties) assert_equal({'oozie.use.system.libpath': 'true', 'input': '', 'jobTracker': 'localhost:8021', 'oozie.wf.application.path': '${nameNode}/user/${user.name}/${examplesRoot}/apps/pig', 'examplesRoot':", "import Submission from oozie.tests import OozieMockBase from desktop.lib.test_utils import clear_sys_caches", "MockNode(jar_1), MockNode(jar_2), MockNode(jar_3), MockNode(jar_4), ] def get_application_filename(self): return 'workflow.xml' submission", "submission._update_properties('jtaddress', 'deployment-directory') assert_equal(final_properties, submission.properties) finish.append(HDFS_CLUSTERS['default'].LOGICAL_NAME.set_for_testing('namenode')) finish.append(MR_CLUSTERS['default'].LOGICAL_NAME.set_for_testing('jobtracker')) clear_sys_caches() fs = cluster.get_hdfs()", "jar_1 = '%s/udf1.jar' % prefix jar_2 = '%s/lib/udf2.jar' % prefix", "def __init__(self, logical_name=None): self.logical_name = logical_name if logical_name else ''", "<delete path=\"${output}\"/> </prepare> <script>aggregate.pig</script> <argument>-param</argument> <argument>INPUT=${input}</argument> <argument>-param</argument> <argument>OUTPUT=${output}</argument> <configuration> <property>", "assert_not_equal(stats_udf1['fileId'], cluster.fs.stats(deployment_dir + '/udf1.jar')['fileId']) assert_not_equal(stats_udf2['fileId'], cluster.fs.stats(deployment_dir + '/udf2.jar')['fileId']) assert_not_equal(stats_udf3['fileId'], cluster.fs.stats(deployment_dir", "except in compliance # with the License. You may obtain", "MockNode(jar_3), MockNode(jar_4), ] def get_application_filename(self): return 'workflow.xml' submission = Submission(user,", "list_dir_deployement = cluster.fs.listdir(external_deployment_dir) # All destinations there assert_true(cluster.fs.exists(deployment_dir + '/udf1.jar'),", "cluster.fs.rmtree(prefix) # Jars in various locations deployment_dir = '%s/workspace' %", "= Submission(None, properties=properties, oozie_id='test', fs=MockFs()) assert_equal(properties, submission.properties) submission._update_properties('jtaddress', 'deployment-directory') assert_equal(final_properties,", "implied. # See the License for the specific language governing", "NOTICE file # distributed with this work for additional information", "'/deployment_dir') assert_equal({ 'jobTracker': 'curacao:8032', 'nameNode': 'hdfs://curacao:8020' }, submission.properties) def test_get_logical_properties(self):", "test_get_external_parameters(self): xml = \"\"\" <workflow-app name=\"Pig\" xmlns=\"uri:oozie:workflow:0.4\"> <start to=\"Pig\"/> <action", "'/udf3.jar'), list_dir_deployement) assert_true(cluster.fs.exists(external_deployment_dir + '/udf4.jar'), list_dir_deployement) stats_udf1 = cluster.fs.stats(deployment_dir +", "this file except in compliance # with the License. You", "self.fs_defaultfs = 'hdfs://curacao:8020' self.logical_name = logical_name if logical_name else ''", "'/udf1.jar') stats_udf2 = cluster.fs.stats(deployment_dir + '/udf2.jar') stats_udf3 = cluster.fs.stats(deployment_dir +", "User.objects.get(username='test') prefix = '/tmp/test_copy_files' if cluster.fs.exists(prefix): cluster.fs.rmtree(prefix) # Jars in", "'/udf2.jar'), list_dir_deployement) assert_true(cluster.fs.exists(external_deployment_dir + '/udf3.jar'), list_dir_deployement) assert_true(cluster.fs.exists(external_deployment_dir + '/udf4.jar'), list_dir_deployement)", "license agreements. See the NOTICE file # distributed with this", "by applicable law or agreed to in writing, software #", "assert_equal({}, submission.properties) submission._update_properties('curacao:8032', '/deployment_dir') assert_equal({ 'jobTracker': 'jtname', 'nameNode': 'fsname' },", "assert_equal(properties, submission.properties) submission._update_properties('jtaddress', 'deployment-directory') assert_equal(final_properties, submission.properties) finish.append(HDFS_CLUSTERS['default'].LOGICAL_NAME.set_for_testing('namenode')) finish.append(MR_CLUSTERS['default'].LOGICAL_NAME.set_for_testing('jobtracker')) clear_sys_caches() fs", "% deployment_dir # Never move cluster.fs.mkdir(prefix) cluster.fs.create(jar_1) cluster.fs.create(jar_2) cluster.fs.create(jar_3) cluster.fs.create(jar_4)", "'jobTracker': 'jtaddress' } final_properties = properties.copy() submission = Submission(None, properties=properties,", "{}})) finish.append(MR_CLUSTERS['default'].SUBMIT_TO.set_for_testing(True)) finish.append(YARN_CLUSTERS.set_for_testing({'default': {}})) finish.append(YARN_CLUSTERS['default'].SUBMIT_TO.set_for_testing(True)) try: properties = { 'user.name':", "deployment_dir = deployment_dir + '/lib' external_deployment_dir = external_deployment_dir + '/lib'", "list_dir_deployement) assert_true(cluster.fs.exists(external_deployment_dir + '/udf4.jar'), list_dir_deployement) stats_udf1 = cluster.fs.stats(deployment_dir + '/udf1.jar')", "to remove %s' % prefix) class MockFs(): def __init__(self, logical_name=None):", "there assert_true(cluster.fs.exists(deployment_dir + '/udf1.jar'), list_dir_workspace) assert_true(cluster.fs.exists(deployment_dir + '/udf2.jar'), list_dir_workspace) assert_true(cluster.fs.exists(deployment_dir", "#!/usr/bin/env python # Licensed to Cloudera, Inc. under one #", "submission.properties) submission._update_properties('curacao:8032', '/deployment_dir') assert_equal({ 'jobTracker': 'curacao:8032', 'nameNode': 'hdfs://curacao:8020' }, submission.properties)", "'true', 'input': '', 'jobTracker': 'localhost:8021', 'oozie.wf.application.path': '${nameNode}/user/${user.name}/${examplesRoot}/apps/pig', 'examplesRoot': 'examples', 'output':", "try: properties = { 'user.name': 'hue', 'test.1': 'http://localhost/test?test1=test&test2=test', 'nameNode': 'hdfs://curacao:8020',", "assert_equal(final_properties, submission.properties) finish.append(HDFS_CLUSTERS['default'].LOGICAL_NAME.set_for_testing('namenode')) finish.append(MR_CLUSTERS['default'].LOGICAL_NAME.set_for_testing('jobtracker')) clear_sys_caches() fs = cluster.get_hdfs() jt =", "fs=MockFs(logical_name='fsname'), jt=MockJt(logical_name='jtname')) assert_equal({}, submission.properties) submission._update_properties('curacao:8032', '/deployment_dir') assert_equal({ 'jobTracker': 'jtname', 'nameNode':", "+ '/udf2.jar'), list_dir_workspace) assert_true(cluster.fs.exists(deployment_dir + '/udf3.jar'), list_dir_workspace) assert_true(cluster.fs.exists(deployment_dir + '/udf4.jar'),", "stats_udf1 = cluster.fs.stats(deployment_dir + '/udf1.jar') stats_udf2 = cluster.fs.stats(deployment_dir + '/udf2.jar')", "= cluster.get_next_ha_mrcluster()[1] final_properties = properties.copy() final_properties.update({ 'jobTracker': 'jtaddress', 'nameNode': fs.fs_defaultfs", "'/udf1.jar'), list_dir_workspace) assert_true(cluster.fs.exists(deployment_dir + '/udf2.jar'), list_dir_workspace) assert_true(cluster.fs.exists(deployment_dir + '/udf3.jar'), list_dir_workspace)", "submission.properties) submission._update_properties('curacao:8032', '/deployment_dir') assert_equal({ 'jobTracker': 'jtname', 'nameNode': 'fsname' }, submission.properties)", "= jar_path class MockJob(): def __init__(self): self.node_list = [ MockNode(jar_1),", "import User from nose.plugins.attrib import attr from nose.tools import assert_equal,", "# All destinations there assert_true(cluster.fs.exists(deployment_dir + '/udf1.jar'), list_dir_workspace) assert_true(cluster.fs.exists(deployment_dir +", "logical_name=None): self.logical_name = logical_name if logical_name else '' class TestSubmission(OozieMockBase):", "assert_true(cluster.fs.exists(external_deployment_dir + '/udf2.jar'), list_dir_deployement) assert_true(cluster.fs.exists(external_deployment_dir + '/udf3.jar'), list_dir_deployement) assert_true(cluster.fs.exists(external_deployment_dir +", "not use this file except in compliance # with the", "jar_3 = '%s/udf3.jar' % deployment_dir jar_4 = '%s/lib/udf4.jar' % deployment_dir", "'/udf2.jar'), list_dir_workspace) assert_true(cluster.fs.exists(deployment_dir + '/udf3.jar'), list_dir_workspace) assert_true(cluster.fs.exists(deployment_dir + '/udf4.jar'), list_dir_workspace)", "except: LOG.exception('failed to remove %s' % prefix) class MockFs(): def", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "self.logical_name = logical_name if logical_name else '' class TestSubmission(OozieMockBase): def", "Unless required by applicable law or agreed to in writing,", "Cloudera, Inc. licenses this file # to you under the", "'user.name': 'hue', 'test.1': 'http://localhost/test?test1=test&test2=test', 'nameNode': 'hdfs://curacao:8020', 'jobTracker': 'jtaddress' } final_properties", "</prepare> <script>aggregate.pig</script> <argument>-param</argument> <argument>INPUT=${input}</argument> <argument>-param</argument> <argument>OUTPUT=${output}</argument> <configuration> <property> <name>mapred.input.format.class</name> <value>org.apache.hadoop.examples.SleepJob$SleepInputFormat</value>", "MR_CLUSTERS, YARN_CLUSTERS from liboozie.submittion import Submission from oozie.tests import OozieMockBase", "'nameNode': 'namenode' }) submission = Submission(None, properties=properties, oozie_id='test', fs=fs, jt=jt)", "pseudo_hdfs4 from hadoop.conf import HDFS_CLUSTERS, MR_CLUSTERS, YARN_CLUSTERS from liboozie.submittion import", "the specific language governing permissions and # limitations under the", "cluster, pseudo_hdfs4 from hadoop.conf import HDFS_CLUSTERS, MR_CLUSTERS, YARN_CLUSTERS from liboozie.submittion", "# Jars in various locations deployment_dir = '%s/workspace' % prefix", "# or more contributor license agreements. See the NOTICE file", "sources still there assert_true(cluster.fs.exists(jar_1)) assert_true(cluster.fs.exists(jar_2)) assert_true(cluster.fs.exists(jar_3)) assert_true(cluster.fs.exists(jar_4)) deployment_dir = deployment_dir", "</pig> <ok to=\"end\"/> <error to=\"kill\"/> </action> <kill name=\"kill\"> <message>Action failed,", "applicable law or agreed to in writing, software # distributed", "'fsname' }, submission.properties) def test_update_properties(self): finish = [] finish.append(MR_CLUSTERS.set_for_testing({'default': {}}))", "+ '/udf4.jar')['fileId']) finally: try: cluster.fs.rmtree(prefix) except: LOG.exception('failed to remove %s'", "from nose.tools import assert_equal, assert_true, assert_not_equal from hadoop import cluster,", "job=MockJob(), fs=cluster.fs, jt=cluster.jt) submission._copy_files(deployment_dir, \"<xml>My XML</xml>\") submission._copy_files(external_deployment_dir, \"<xml>My XML</xml>\") #", "finally: try: cluster.fs.rmtree(prefix) except: LOG.exception('failed to remove %s' % prefix)", "to=\"end\"/> <error to=\"kill\"/> </action> <kill name=\"kill\"> <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>", "in writing, software # distributed under the License is distributed", "submission._update_properties('jtaddress', 'deployment-directory') assert_equal(final_properties, submission.properties) clear_sys_caches() fs = cluster.get_hdfs() jt =", "xmlns=\"uri:oozie:workflow:0.4\"> <start to=\"Pig\"/> <action name=\"Pig\"> <pig> <job-tracker>${jobTracker}</job-tracker> <name-node>${nameNode}</name-node> <prepare> <delete", "permissions and # limitations under the License. import logging from", "assert_equal, assert_true, assert_not_equal from hadoop import cluster, pseudo_hdfs4 from hadoop.conf", "<workflow-app name=\"Pig\" xmlns=\"uri:oozie:workflow:0.4\"> <start to=\"Pig\"/> <action name=\"Pig\"> <pig> <job-tracker>${jobTracker}</job-tracker> <name-node>${nameNode}</name-node>", "__init__(self, jar_path): self.jar_path = jar_path class MockJob(): def __init__(self): self.node_list", "(the # \"License\"); you may not use this file except", "<prepare> <delete path=\"${output}\"/> </prepare> <script>aggregate.pig</script> <argument>-param</argument> <argument>INPUT=${input}</argument> <argument>-param</argument> <argument>OUTPUT=${output}</argument> <configuration>", "hadoop import cluster, pseudo_hdfs4 from hadoop.conf import HDFS_CLUSTERS, MR_CLUSTERS, YARN_CLUSTERS", "% deployment_dir jar_4 = '%s/lib/udf4.jar' % deployment_dir # Never move", "to=\"Pig\"/> <action name=\"Pig\"> <pig> <job-tracker>${jobTracker}</job-tracker> <name-node>${nameNode}</name-node> <prepare> <delete path=\"${output}\"/> </prepare>", "HDFS_CLUSTERS, MR_CLUSTERS, YARN_CLUSTERS from liboozie.submittion import Submission from oozie.tests import", "cluster.fs.listdir(external_deployment_dir) # All destinations there assert_true(cluster.fs.exists(deployment_dir + '/udf1.jar'), list_dir_workspace) assert_true(cluster.fs.exists(deployment_dir", "class MockJt(): def __init__(self, logical_name=None): self.logical_name = logical_name if logical_name", "def get_application_filename(self): return 'workflow.xml' submission = Submission(user, job=MockJob(), fs=cluster.fs, jt=cluster.jt)", "from nose.plugins.attrib import attr from nose.tools import assert_equal, assert_true, assert_not_equal", "cluster.fs.create(jar_4) class MockNode(): def __init__(self, jar_path): self.jar_path = jar_path class", "'nameNode': fs.fs_defaultfs }) submission = Submission(None, properties=properties, oozie_id='test', fs=fs, jt=jt)", "there assert_true(cluster.fs.exists(jar_1)) assert_true(cluster.fs.exists(jar_2)) assert_true(cluster.fs.exists(jar_3)) assert_true(cluster.fs.exists(jar_4)) deployment_dir = deployment_dir + '/lib'", "<name-node>${nameNode}</name-node> <prepare> <delete path=\"${output}\"/> </prepare> <script>aggregate.pig</script> <argument>-param</argument> <argument>INPUT=${input}</argument> <argument>-param</argument> <argument>OUTPUT=${output}</argument>", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "+ '/udf1.jar'), list_dir_workspace) assert_true(cluster.fs.exists(deployment_dir + '/udf2.jar'), list_dir_workspace) assert_true(cluster.fs.exists(deployment_dir + '/udf3.jar'),", "'/deployment_dir') assert_equal({ 'jobTracker': 'jtname', 'nameNode': 'fsname' }, submission.properties) def test_update_properties(self):", "assert_true(cluster.fs.exists(jar_2)) assert_true(cluster.fs.exists(jar_3)) assert_true(cluster.fs.exists(jar_4)) deployment_dir = deployment_dir + '/lib' external_deployment_dir =", "import clear_sys_caches from desktop.lib.django_test_util import make_logged_in_client LOG = logging.getLogger(__name__) @attr('requires_hadoop')", "'%s/udf3.jar' % deployment_dir jar_4 = '%s/lib/udf4.jar' % deployment_dir # Never", "in various locations deployment_dir = '%s/workspace' % prefix external_deployment_dir =", "class MockFs(): def __init__(self, logical_name=None): self.fs_defaultfs = 'hdfs://curacao:8020' self.logical_name =", "<end name=\"end\"/> </workflow-app> \"\"\" properties = \"\"\" # # Licensed", "hadoop.conf import HDFS_CLUSTERS, MR_CLUSTERS, YARN_CLUSTERS from liboozie.submittion import Submission from", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "path=\"${output}\"/> </prepare> <script>aggregate.pig</script> <argument>-param</argument> <argument>INPUT=${input}</argument> <argument>-param</argument> <argument>OUTPUT=${output}</argument> <configuration> <property> <name>mapred.input.format.class</name>", "with the License. You may obtain a copy of the", "failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message> </kill> <end name=\"end\"/> </workflow-app> \"\"\" properties =", "# All sources still there assert_true(cluster.fs.exists(jar_1)) assert_true(cluster.fs.exists(jar_2)) assert_true(cluster.fs.exists(jar_3)) assert_true(cluster.fs.exists(jar_4)) deployment_dir", "finish.append(MR_CLUSTERS['default'].SUBMIT_TO.set_for_testing(True)) finish.append(YARN_CLUSTERS.set_for_testing({'default': {}})) finish.append(YARN_CLUSTERS['default'].SUBMIT_TO.set_for_testing(True)) try: properties = { 'user.name': 'hue',", "list_dir_workspace) assert_true(cluster.fs.exists(external_deployment_dir + '/udf1.jar'), list_dir_deployement) assert_true(cluster.fs.exists(external_deployment_dir + '/udf2.jar'), list_dir_deployement) assert_true(cluster.fs.exists(external_deployment_dir", "assert_true, assert_not_equal from hadoop import cluster, pseudo_hdfs4 from hadoop.conf import", "+ '/udf1.jar')['fileId']) assert_not_equal(stats_udf2['fileId'], cluster.fs.stats(deployment_dir + '/udf2.jar')['fileId']) assert_not_equal(stats_udf3['fileId'], cluster.fs.stats(deployment_dir + '/udf3.jar')['fileId'])", "still there assert_true(cluster.fs.exists(jar_1)) assert_true(cluster.fs.exists(jar_2)) assert_true(cluster.fs.exists(jar_3)) assert_true(cluster.fs.exists(jar_4)) deployment_dir = deployment_dir +", "'hdfs://curacao:8020', 'jobTracker': 'jtaddress' } final_properties = properties.copy() submission = Submission(None,", "MockNode(jar_4), ] def get_application_filename(self): return 'workflow.xml' submission = Submission(user, job=MockJob(),", "the License for the specific language governing permissions and #", "assert_not_equal(stats_udf2['fileId'], cluster.fs.stats(deployment_dir + '/udf2.jar')['fileId']) assert_not_equal(stats_udf3['fileId'], cluster.fs.stats(deployment_dir + '/udf3.jar')['fileId']) assert_equal(stats_udf4['fileId'], cluster.fs.stats(deployment_dir", "self.node_list = [ MockNode(jar_1), MockNode(jar_2), MockNode(jar_3), MockNode(jar_4), ] def get_application_filename(self):", "cluster.fs.create(jar_2) cluster.fs.create(jar_3) cluster.fs.create(jar_4) class MockNode(): def __init__(self, jar_path): self.jar_path =", "'' class TestSubmission(OozieMockBase): def test_get_properties(self): submission = Submission(self.user, fs=MockFs()) assert_equal({},", "file # to you under the Apache License, Version 2.0", "either express or implied. # See the License for the", "}) submission = Submission(None, properties=properties, oozie_id='test', fs=fs, jt=jt) assert_equal(properties, submission.properties)", "move cluster.fs.mkdir(prefix) cluster.fs.create(jar_1) cluster.fs.create(jar_2) cluster.fs.create(jar_3) cluster.fs.create(jar_4) class MockNode(): def __init__(self,", "# with the License. You may obtain a copy of", "= \"\"\" <workflow-app name=\"Pig\" xmlns=\"uri:oozie:workflow:0.4\"> <start to=\"Pig\"/> <action name=\"Pig\"> <pig>", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "import logging from django.contrib.auth.models import User from nose.plugins.attrib import attr", "fs=MockFs()) assert_equal(properties, submission.properties) submission._update_properties('jtaddress', 'deployment-directory') assert_equal(final_properties, submission.properties) clear_sys_caches() fs =", "'test.1': 'http://localhost/test?test1=test&test2=test', 'nameNode': 'hdfs://curacao:8020', 'jobTracker': 'jtaddress' } final_properties = properties.copy()", "fs=fs, jt=jt) assert_equal(properties, submission.properties) submission._update_properties('jtaddress', 'deployment-directory') assert_equal(final_properties, submission.properties) finally: clear_sys_caches()", "+ '/udf2.jar'), list_dir_deployement) assert_true(cluster.fs.exists(external_deployment_dir + '/udf3.jar'), list_dir_deployement) assert_true(cluster.fs.exists(external_deployment_dir + '/udf4.jar'),", "def __init__(self): self.node_list = [ MockNode(jar_1), MockNode(jar_2), MockNode(jar_3), MockNode(jar_4), ]", "'/udf1.jar'), list_dir_deployement) assert_true(cluster.fs.exists(external_deployment_dir + '/udf2.jar'), list_dir_deployement) assert_true(cluster.fs.exists(external_deployment_dir + '/udf3.jar'), list_dir_deployement)", "prefix, \"<xml>My XML</xml>\") assert_not_equal(stats_udf1['fileId'], cluster.fs.stats(deployment_dir + '/udf1.jar')['fileId']) assert_not_equal(stats_udf2['fileId'], cluster.fs.stats(deployment_dir +", "if logical_name else '' class MockJt(): def __init__(self, logical_name=None): self.logical_name", "'jobTracker': 'localhost:8021', 'oozie.wf.application.path': '${nameNode}/user/${user.name}/${examplesRoot}/apps/pig', 'examplesRoot': 'examples', 'output': '', 'nameNode': 'hdfs://localhost:8020',", "copyright ownership. Cloudera, Inc. licenses this file # to you", "under one # or more contributor license agreements. See the", "def test_get_properties(self): submission = Submission(self.user, fs=MockFs()) assert_equal({}, submission.properties) submission._update_properties('curacao:8032', '/deployment_dir')", "assert_true(cluster.fs.exists(jar_1)) assert_true(cluster.fs.exists(jar_2)) assert_true(cluster.fs.exists(jar_3)) assert_true(cluster.fs.exists(jar_4)) deployment_dir = deployment_dir + '/lib' external_deployment_dir", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "Cloudera, Inc. under one # or more contributor license agreements.", "message[${wf:errorMessage(wf:lastErrorNode())}]</message> </kill> <end name=\"end\"/> </workflow-app> \"\"\" properties = \"\"\" #", "'input': '', 'jobTracker': 'localhost:8021', 'oozie.wf.application.path': '${nameNode}/user/${user.name}/${examplesRoot}/apps/pig', 'examplesRoot': 'examples', 'output': '',", "__init__(self): self.node_list = [ MockNode(jar_1), MockNode(jar_2), MockNode(jar_3), MockNode(jar_4), ] def", "to Cloudera, Inc. under one # or more contributor license", "the Apache License, Version 2.0 (the # \"License\"); you may", "'%s/lib/udf4.jar' % deployment_dir # Never move cluster.fs.mkdir(prefix) cluster.fs.create(jar_1) cluster.fs.create(jar_2) cluster.fs.create(jar_3)", "All destinations there assert_true(cluster.fs.exists(deployment_dir + '/udf1.jar'), list_dir_workspace) assert_true(cluster.fs.exists(deployment_dir + '/udf2.jar'),", "reset() def test_get_external_parameters(self): xml = \"\"\" <workflow-app name=\"Pig\" xmlns=\"uri:oozie:workflow:0.4\"> <start", "cluster.fs.mkdir(prefix) cluster.fs.create(jar_1) cluster.fs.create(jar_2) cluster.fs.create(jar_3) cluster.fs.create(jar_4) class MockNode(): def __init__(self, jar_path):", "prefix external_deployment_dir = '%s/deployment' % prefix jar_1 = '%s/udf1.jar' %", "\"<xml>My XML</xml>\") # All sources still there assert_true(cluster.fs.exists(jar_1)) assert_true(cluster.fs.exists(jar_2)) assert_true(cluster.fs.exists(jar_3))", "oozie.wf.application.path=${nameNode}/user/${user.name}/${examplesRoot}/apps/pig \"\"\" parameters = Submission(self.user)._get_external_parameters(xml, properties) assert_equal({'oozie.use.system.libpath': 'true', 'input': '',", "= Submission(self.user)._get_external_parameters(xml, properties) assert_equal({'oozie.use.system.libpath': 'true', 'input': '', 'jobTracker': 'localhost:8021', 'oozie.wf.application.path':", "oozie_id='test', fs=fs, jt=jt) assert_equal(properties, submission.properties) submission._update_properties('jtaddress', 'deployment-directory') assert_equal(final_properties, submission.properties) finish.append(HDFS_CLUSTERS['default'].LOGICAL_NAME.set_for_testing('namenode'))", "you under the Apache License, Version 2.0 (the # \"License\");", "'/tmp/test_copy_files' if cluster.fs.exists(prefix): cluster.fs.rmtree(prefix) # Jars in various locations deployment_dir", "list_dir_workspace = cluster.fs.listdir(deployment_dir) list_dir_deployement = cluster.fs.listdir(external_deployment_dir) # All destinations there", "assert_true(cluster.fs.exists(external_deployment_dir + '/udf4.jar'), list_dir_deployement) stats_udf1 = cluster.fs.stats(deployment_dir + '/udf1.jar') stats_udf2", "'/udf2.jar')['fileId']) assert_not_equal(stats_udf3['fileId'], cluster.fs.stats(deployment_dir + '/udf3.jar')['fileId']) assert_equal(stats_udf4['fileId'], cluster.fs.stats(deployment_dir + '/udf4.jar')['fileId']) finally:", "submission = Submission(None, properties=properties, oozie_id='test', fs=fs, jt=jt) assert_equal(properties, submission.properties) submission._update_properties('jtaddress',", "jt=cluster.jt) submission._copy_files(deployment_dir, \"<xml>My XML</xml>\") submission._copy_files(external_deployment_dir, \"<xml>My XML</xml>\") # All sources", "# Licensed to the Hue # nameNode=hdfs://localhost:8020 jobTracker=localhost:8021 queueName=default examplesRoot=examples", "jt = cluster.get_next_ha_mrcluster()[1] final_properties = properties.copy() final_properties.update({ 'jobTracker': 'jobtracker', 'nameNode':", "xml = \"\"\" <workflow-app name=\"Pig\" xmlns=\"uri:oozie:workflow:0.4\"> <start to=\"Pig\"/> <action name=\"Pig\">", "agreements. See the NOTICE file # distributed with this work", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "assert_true(cluster.fs.exists(external_deployment_dir + '/udf1.jar'), list_dir_deployement) assert_true(cluster.fs.exists(external_deployment_dir + '/udf2.jar'), list_dir_deployement) assert_true(cluster.fs.exists(external_deployment_dir +", "list_dir_workspace) assert_true(cluster.fs.exists(deployment_dir + '/udf3.jar'), list_dir_workspace) assert_true(cluster.fs.exists(deployment_dir + '/udf4.jar'), list_dir_workspace) assert_true(cluster.fs.exists(external_deployment_dir", "licenses this file # to you under the Apache License,", "deployment_dir + '/lib' external_deployment_dir = external_deployment_dir + '/lib' list_dir_workspace =", "# distributed under the License is distributed on an \"AS", "submission.properties) clear_sys_caches() fs = cluster.get_hdfs() jt = cluster.get_next_ha_mrcluster()[1] final_properties =", "clear_sys_caches() for reset in finish: reset() def test_get_external_parameters(self): xml =", "work for additional information # regarding copyright ownership. Cloudera, Inc.", "# Unless required by applicable law or agreed to in", "= User.objects.get(username='test') prefix = '/tmp/test_copy_files' if cluster.fs.exists(prefix): cluster.fs.rmtree(prefix) # Jars", "+ '/udf3.jar'), list_dir_deployement) assert_true(cluster.fs.exists(external_deployment_dir + '/udf4.jar'), list_dir_deployement) stats_udf1 = cluster.fs.stats(deployment_dir", "%s' % prefix) class MockFs(): def __init__(self, logical_name=None): self.fs_defaultfs =", "submission._copy_files(deployment_dir, \"<xml>My XML</xml>\") submission._copy_files(external_deployment_dir, \"<xml>My XML</xml>\") # All sources still", "else '' class TestSubmission(OozieMockBase): def test_get_properties(self): submission = Submission(self.user, fs=MockFs())", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "test_get_properties(self): submission = Submission(self.user, fs=MockFs()) assert_equal({}, submission.properties) submission._update_properties('curacao:8032', '/deployment_dir') assert_equal({", "jt = cluster.get_next_ha_mrcluster()[1] final_properties = properties.copy() final_properties.update({ 'jobTracker': 'jtaddress', 'nameNode':", "</property> </configuration> </pig> <ok to=\"end\"/> <error to=\"kill\"/> </action> <kill name=\"kill\">", "License. You may obtain a copy of the License at", "= 'hdfs://curacao:8020' self.logical_name = logical_name if logical_name else '' class", "] def get_application_filename(self): return 'workflow.xml' submission = Submission(user, job=MockJob(), fs=cluster.fs,", "You may obtain a copy of the License at #", "<start to=\"Pig\"/> <action name=\"Pig\"> <pig> <job-tracker>${jobTracker}</job-tracker> <name-node>${nameNode}</name-node> <prepare> <delete path=\"${output}\"/>", "def test_copy_files(): cluster = pseudo_hdfs4.shared_cluster() try: c = make_logged_in_client() user", "= '%s/udf3.jar' % deployment_dir jar_4 = '%s/lib/udf4.jar' % deployment_dir #", "limitations under the License. import logging from django.contrib.auth.models import User", "submission._copy_files('%s/workspace' % prefix, \"<xml>My XML</xml>\") assert_not_equal(stats_udf1['fileId'], cluster.fs.stats(deployment_dir + '/udf1.jar')['fileId']) assert_not_equal(stats_udf2['fileId'],", "= pseudo_hdfs4.shared_cluster() try: c = make_logged_in_client() user = User.objects.get(username='test') prefix", "compliance # with the License. You may obtain a copy", "= [] finish.append(MR_CLUSTERS.set_for_testing({'default': {}})) finish.append(MR_CLUSTERS['default'].SUBMIT_TO.set_for_testing(True)) finish.append(YARN_CLUSTERS.set_for_testing({'default': {}})) finish.append(YARN_CLUSTERS['default'].SUBMIT_TO.set_for_testing(True)) try: properties", "<pig> <job-tracker>${jobTracker}</job-tracker> <name-node>${nameNode}</name-node> <prepare> <delete path=\"${output}\"/> </prepare> <script>aggregate.pig</script> <argument>-param</argument> <argument>INPUT=${input}</argument>", "+ '/udf3.jar') stats_udf4 = cluster.fs.stats(deployment_dir + '/udf4.jar') submission._copy_files('%s/workspace' % prefix,", "\"<xml>My XML</xml>\") submission._copy_files(external_deployment_dir, \"<xml>My XML</xml>\") # All sources still there", "= deployment_dir + '/lib' external_deployment_dir = external_deployment_dir + '/lib' list_dir_workspace", "= properties.copy() final_properties.update({ 'jobTracker': 'jobtracker', 'nameNode': 'namenode' }) submission =", "destinations there assert_true(cluster.fs.exists(deployment_dir + '/udf1.jar'), list_dir_workspace) assert_true(cluster.fs.exists(deployment_dir + '/udf2.jar'), list_dir_workspace)", "jar_path): self.jar_path = jar_path class MockJob(): def __init__(self): self.node_list =", "cluster.fs.stats(deployment_dir + '/udf3.jar') stats_udf4 = cluster.fs.stats(deployment_dir + '/udf4.jar') submission._copy_files('%s/workspace' %", "# Never move cluster.fs.mkdir(prefix) cluster.fs.create(jar_1) cluster.fs.create(jar_2) cluster.fs.create(jar_3) cluster.fs.create(jar_4) class MockNode():" ]
[ "# prev:[None, 3, 42, 42] -> [None, 3, 43, 43]", "opt.label_nc, opt.nzx, opt.nzy], 0, device=opt.device) in_s_S = prev_S # full", "schedulerS.step() functions.save_networks(netG, netD, netS, opt) ## save netG, netD, z_opt,", "__init__(self, opt): self.criterionGAN = GANLoss(not opt.no_lsgan) self.criterionFeat = nn.L1Loss() if", "D_x = loss_D_real.item() # train with fake if (j ==", "G [1, 3, 33, 33] count += 1 elif mode", "= reals[opt.scale_num] # find the current level image xn opt.nzx", "functions.convert_mask_np(segment_mask.detach(), num_classes=opt.label_nc)) schedulerD.step() schedulerG.step() schedulerS.step() functions.save_networks(netG, netD, netS, opt) ##", "alpha = opt.alpha # setup optimizer optimizerD = optim.Adam(netD.parameters(), lr=opt.lr_d,", "as plt from Models.pix2pixHD_base import GANLoss, VGGLoss from Models.pix2pixHD2 import", "[None, 1, 32, 32] noise_ = noise_.expand(opt.batchSize, 3, opt.nzx, opt.nzy)", "epoch), functions.convert_mask_np(data['label'][:,0:1,...], num_classes= opt.label_nc)) plt.imsave('%s/segmentation_mask_%d.png' % (opt.outf, epoch), functions.convert_mask_np(segment_mask.detach(), num_classes=opt.label_nc))", "epoch, opt.niter)) if epoch % 25 == 0 or epoch", "(epoch == 0): # first iteration training in this level", "opt.nzy], 0, device=opt.device) in_s = prev # full of 0", "opt.nzx, opt.nzy) ## Noise_: for generated false samples through generator", "noise = noise_ ## Gausiaan noise for generating image [None,", "[1, 3, 26, 26] :param mode: 'rand' :param opt: :return:", "training in this level if Gs == [] and Ss", "= torch.sqrt(criterion(onehot_label, prev_S)) # RMSE_S = 0 opt.noise_amp_S = opt.noise_amp_init", "* 0.8], gamma=opt.gamma) schedulerG = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerG, milestones=[opt.niter * 0.8], gamma=opt.gamma)", "= 0, second step prev = a image generated by", "/ opt.num_D for i in range(opt.num_D): for j in range(len(pred_fake[i])", "vmax=1) plt.imsave('%s/fake_sample_mask_%d.png' % (opt.outf, epoch), functions.convert_mask_np(data['label'][:,0:1,...], num_classes= opt.label_nc)) plt.imsave('%s/segmentation_mask_%d.png' %", "1-G_z), 1) G_z = imresize(G_z, real_next[1] / real_curr[1], opt) G_z", "opt.noise_amp = opt.noise_amp_init opt.noise_amp_S = opt.noise_amp_init else: prev = draw_concat(Gs,", "z_in = noise_amp * z + G_z G_z = G(z_in.detach(),", "## TODO: you can just pass image shape here :param", "rec_loss = 0 errG = loss_G_GAN + loss_G_GAN_Feat + loss_G_VGG", "matching loss loss_G_VGG = 0 if not opt.no_vgg_loss: loss_G_VGG =", "[1] :param in_s: all zeros [1, 3, 26, 26] :param", "> 0: if mode == 'generator': count = 0 for", "image shape here :param Gs: list of prev netG :param", "D(S(z)) ########################### netS.zero_grad() pred_fake_S = netD(data['image'], segment_prob) loss_G_GAN_S = 0.03", "data['label'][:,0:1,...]) loss_G_GAN = 0.5 * loss.criterionGAN(pred_fake, True) # GAN feature", "0 [None, 4, 32, 32] mask = data['label'][:,0:1,...] opt.noise_amp =", "= loss_G_GAN + loss_G_GAN_Feat + loss_G_VGG + rec_loss errG.backward() optimizerG.step()", "G_z = G_z[:, :, 0:real_curr[0], 0:real_curr[1]] ## G_z [None, 3,", "0.999)) schedulerD = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerD, milestones=[opt.niter * 0.8], gamma=opt.gamma) schedulerG =", "opt.nzy) ## Noise_: for generated false samples through generator else:", "of 0 [None, 3, 32, 32] prev_S = torch.full([opt.batchSize, opt.label_nc,", "(3) Update S network: maximize D(S(z)) ########################### netS.zero_grad() pred_fake_S =", "torch.utils.data from Training import functions from Training.imresize import imresize import", "import torch.nn as nn import torch.optim as optim import torch.utils.data", "* feat_weights * \\ loss.criterionFeat(pred_fake_S[i][j], pred_real[i][j].detach()) * opt.lambda_feat errS =", "D_S_z) ## discriminator loss on fake if epoch % 25", "noise_ = noise_.expand(opt.batchSize, 3, opt.nzx, opt.nzy) ## Noise_: for generated", "reals[opt.scale_num] # find the current level image xn opt.nzx =", "# train with real netD.zero_grad() pred_real = netD(data['image'], data['label'][:,0:1,...]) loss_D_real", "(opt.scale_num, epoch, opt.niter)) if epoch % 25 == 0 or", ":param mode: 'rand' :param opt: :return: ''' G_z = in_s[:opt.batchSize,", "mode == 'generator': count = 0 for G, mask, real_curr,", "self.crossEntropy = nn.CrossEntropyLoss() if not opt.no_vgg_loss: self.criterionVGG = VGGLoss() def", "D network: maximize D(x) + D(G(z)) ########################### # train with", "new noise is equal to the prev generated image plus", "represent the image shape :param NoiseAmp: [1] :param in_s: all", "schedulerD.step() schedulerG.step() schedulerS.step() functions.save_networks(netG, netD, netS, opt) ## save netG,", "from Models.pix2pixHD2 import mask2onehot class Losses(): def __init__(self, opt): self.criterionGAN", "= prev # full of 0 [None, 3, 32, 32]", "32] the same size with the input image # detach()", ":param NoiseAmp: [] -> [1] :param opt: config :return: '''", "opt.nzx, opt.nzy], opt.batchSize) # [None, 1, 32, 32] noise_ =", "[None, 3, 42, 42] else: noise = opt.noise_amp * noise_", "alpha != 0: ## alpha = 10 calculate the reconstruction", ":, :, :] # [None, 3, 26, 26] all zeros,", "functions.convert_image_np(data['image']), vmin=0, vmax=1) plt.imsave('%s/fake_sample_mask_%d.png' % (opt.outf, epoch), functions.convert_mask_np(data['label'][:,0:1,...], num_classes= opt.label_nc))", "netD(fake.detach(), data['label'][:,0:1,...]) # output shape [1, 1, 16, 16] ->", "opt.alpha # setup optimizer optimizerD = optim.Adam(netD.parameters(), lr=opt.lr_d, betas=(opt.beta1, 0.999))", "Ss == []: prev = torch.full([opt.batchSize, opt.nc_z, opt.nzx, opt.nzy], 0,", "segment_prob, segment_mask = netS(data['image'], prev_S.detach()) pred_fake_S = netD(data['image'], segment_prob.detach()) loss_D_fake_S", "pyramid] only used to represent the image shape :param NoiseAmp:", "errD = (loss_D_real + 0.5 * loss_D_fake + 0.5 *", "dim =1)) # GAN feature matching loss loss_G_GAN_Feat_S = 0", "prev = torch.full([opt.batchSize, opt.nc_z, opt.nzx, opt.nzy], 0, device=opt.device) in_s =", "3, 42, 42] else: noise = opt.noise_amp * noise_ +", "_mask] :param reals: [image pyramid] only used to represent the", "niter = 2000 if Gs == [] and Ss ==", "as optim import torch.utils.data from Training import functions from Training.imresize", "= z.expand(opt.batchSize, G_z.shape[1], z.shape[2], z.shape[3]) else: z = functions.generate_noise( [opt.nc_z,", "= 0 ## dummy z_opt alpha = opt.alpha # setup", "opt.contour: G_z = torch.cat((G_z, 1-G_z), 1) G_z = imresize(G_z, real_next[1]", ":param Ss: list of prev netS :param in_s: 0-> all", "RMSE prev_S = draw_concat(Ss, data['down_scale_image'], reals, NoiseAmpS, in_s_S, 'segment', opt)", "''' :param Gs: [G0] :param mask: [down scaled _mask] :param", "to parser output path return in_s, in_s_S, netG, netS def", "of prev netG :param Ss: list of prev netS :param", "of prev netS :param in_s: 0-> all zero [1, 3,", "= loss.crossEntropy(segment_logit, data['label'].float()) else: loss_G_Seg = loss.crossEntropy(segment_prob, torch.squeeze(data['label'][:,0:1,...], dim =1))", "## discriminator loss on real D_fake2plot.append(D_G_z + D_S_z) ## discriminator", "G [1, 3, 33, 33] count += 1 return G_z", "loss_D_real = loss.criterionGAN(pred_real, True) D_x = loss_D_real.item() # train with", "generator else: noise_ = functions.generate_noise([1, opt.nzx, opt.nzy], opt.batchSize) for j,", "else: noise_ = functions.generate_noise([1, opt.nzx, opt.nzy], opt.batchSize) for j, data", "mask = data['label'][:,0:1,...] if Gs == []: noise = noise_", "data['image'].to(opt.device) data['label'] = data['label'].long().to(opt.device) ############################ # (1) Update D network:", "a image generated by previous Generator with bilinear upsampling [1,", "Recloss(fake, data['image']) else: rec_loss = 0 errG = loss_G_GAN +", "= real[1] # z_opt = 0 ## dummy z_opt alpha", "= netD(data['image'], data['label'][:,0:1,...]) loss_D_real = loss.criterionGAN(pred_real, True) D_x = loss_D_real.item()", "level image xn opt.nzx = real[0] opt.nzy = real[1] #", "mask = data['label'][:,0:1,...] opt.noise_amp = opt.noise_amp_init opt.noise_amp_S = opt.noise_amp_init else:", "functions.generate_noise([1, opt.nzx, opt.nzy], opt.batchSize) for j, data in enumerate(dataloader): data['image']", "equal to the prev generated image plus the gaussian noise.", "# (2) Update G network: maximize D(G(z)) ########################### netG.zero_grad() pred_fake", "first step prev = 0, second step prev = a", "+ loss_G_GAN_Feat + loss_G_VGG + rec_loss errG.backward() optimizerG.step() ############################ #", "* opt.lambda_feat ## reconstruction loss if alpha != 0: ##", "second step prev = a image generated by previous Generator", "loss.criterionGAN(pred_fake, False) D_G_z = loss_D_fake.item() # segment_logit, segment_mask = netS(data['image'],", "D_weights = 1.0 / opt.num_D for i in range(opt.num_D): for", "in this level if Gs == [] and Ss ==", "NoiseAmp): if count == 0: z = functions.generate_noise([1, real_curr[0], real_curr[1]],", "current G [1, 3, 33, 33] count += 1 elif", "NoiseAmpS, in_s_S, 'segment', opt) ## prob with [None, 4, 32,", "32, 32] mask = data['label'][:,0:1,...] opt.noise_amp = opt.noise_amp_init opt.noise_amp_S =", "schedulerS = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerS, milestones=[opt.niter * 0.8], gamma=opt.gamma) errD2plot = []", "in_s = prev # full of 0 [None, 3, 32,", "image generated by previous Generator with bilinear upsampling [1, 3,", "pred_fake_S = netD(data['image'], segment_prob) loss_G_GAN_S = 0.03 * loss.criterionGAN(pred_fake_S, True)", "setup optimizer optimizerD = optim.Adam(netD.parameters(), lr=opt.lr_d, betas=(opt.beta1, 0.999)) optimizerG =", "# print(len(pred_fake), len(pred_fake[0])) loss_D_fake = loss.criterionGAN(pred_fake, False) D_G_z = loss_D_fake.item()", "functions.convert_image_np(fake.detach()), vmin=0, vmax=1) plt.imsave('%s/fake_sample_real_%d.png' % (opt.outf, epoch), functions.convert_image_np(data['image']), vmin=0, vmax=1)", "else: noise = opt.noise_amp * noise_ + prev ## [None,", "D(G(z)) ########################### netG.zero_grad() pred_fake = netD(fake, data['label'][:,0:1,...]) loss_G_GAN = 0.5", "self.criterionFeat = nn.L1Loss() if opt.contour: self.crossEntropy = nn.BCEWithLogitsLoss() else: self.crossEntropy", "= optim.Adam(netD.parameters(), lr=opt.lr_d, betas=(opt.beta1, 0.999)) optimizerG = optim.Adam(netG.parameters(), lr=opt.lr_g, betas=(opt.beta1,", "data['label'][:,0:1,...] opt.noise_amp = opt.noise_amp_init opt.noise_amp_S = opt.noise_amp_init else: prev =", "= loss.criterionGAN(pred_fake_S, False) D_S_z = loss_D_fake_S.item() errD = (loss_D_real +", "= netS(data['image'], prev_S.detach()) pred_fake_S = netD(data['image'], segment_prob.detach()) loss_D_fake_S = loss.criterionGAN(pred_fake_S,", "each iteration ############################ # (2) Update G network: maximize D(G(z))", "GAN feature matching loss loss_G_GAN_Feat = 0 if not opt.no_ganFeat_loss:", "draw_concat(Gs, data['down_scale_label'], reals, NoiseAmp, in_s, 'generator', opt) prev_S = draw_concat(Ss,", "prev = a image generated by previous Generator with bilinaer", "opt.contour: loss_G_Seg = loss.crossEntropy(segment_logit, data['label'].float()) else: loss_G_Seg = loss.crossEntropy(segment_prob, torch.squeeze(data['label'][:,0:1,...],", "the noise. # prev:[None, 3, 42, 42] -> [None, 3,", "== []: noise_ = functions.generate_noise([1, opt.nzx, opt.nzy], opt.batchSize) # [None,", "loss_G_GAN_S = 0.03 * loss.criterionGAN(pred_fake_S, True) # Segmentation loss if", "previous Generator with bilinear upsampling [1, 3, 33, 33] criterion", "1) D_weights = 1.0 / opt.num_D for i in range(opt.num_D):", "torch.squeeze(data['label'][:,0:1,...], dim =1)) # GAN feature matching loss loss_G_GAN_Feat_S =", "== (opt.niter - 1): plt.imsave('%s/fake_sample_%d.png' % (opt.outf, epoch), functions.convert_image_np(fake.detach()), vmin=0,", "don't go to the noise. # prev:[None, 3, 42, 42]", "[1, 3, 33, 33] count += 1 elif mode ==", "0 or epoch == (opt.niter - 1): print('scale %d:[%d/%d]' %", "can just pass image shape here :param Gs: list of", "loss_G_GAN_Feat_S + loss_G_Seg errS.backward() optimizerS.step() ## for every epoch, do", "% (opt.outf, epoch), functions.convert_mask_np(data['label'][:,0:1,...], num_classes= opt.label_nc)) plt.imsave('%s/segmentation_mask_%d.png' % (opt.outf, epoch),", "False) D_G_z = loss_D_fake.item() # segment_logit, segment_mask = netS(data['image'], mask2onehot(prev_S,", "as nn import torch.optim as optim import torch.utils.data from Training", "= functions.generate_noise([1, opt.nzx, opt.nzy], opt.batchSize) # [None, 1, 32, 32]", "all zero [1, 3, 26, 26] :param NoiseAmp: [] ->", "range(opt.niter): # niter = 2000 if Gs == [] and", "= loss_D_real.item() # train with fake if (j == 0)", "fake if (j == 0) & (epoch == 0): #", "prev ## [None, 3, 43, 43] new noise is equal", "16] -> [1, 1, 23, 23] # print(len(pred_fake), len(pred_fake[0])) loss_D_fake", "if not opt.no_vgg_loss: loss_G_VGG = loss.criterionVGG(fake, data['image']) * opt.lambda_feat ##", "= nn.L1Loss() if opt.contour: self.crossEntropy = nn.BCEWithLogitsLoss() else: self.crossEntropy =", "+ 0.5 * loss_D_fake_S) ## Todo: figure out a proper", "loss.criterionGAN(pred_fake_S, True) # Segmentation loss if opt.contour: loss_G_Seg = loss.crossEntropy(segment_logit,", "matching loss loss_G_GAN_Feat = 0 if not opt.no_ganFeat_loss: feat_weights =", "plt.imsave('%s/segmentation_mask_%d.png' % (opt.outf, epoch), functions.convert_mask_np(segment_mask.detach(), num_classes=opt.label_nc)) schedulerD.step() schedulerG.step() schedulerS.step() functions.save_networks(netG,", "if len(Gs) > 0: if mode == 'generator': count =", "generator G_z = imresize(G_z, real_next[1] / real_curr[1], opt) G_z =", "alpha = 10 calculate the reconstruction loss Recloss = nn.MSELoss()", "opt): ''' :param netD: currD :param netG: currG :param netS:", "reals, reals[1:], NoiseAmp): G_z = G_z[:, :, 0:real_curr[0], 0:real_curr[1]] ##", "feat_weights * \\ loss.criterionFeat(pred_fake[i][j], pred_real[i][j].detach()) * opt.lambda_feat # VGG feature", "43] new noise is equal to the prev generated image", "= draw_concat(Ss, data['down_scale_image'], reals, NoiseAmpS, in_s_S, 'segment', opt) ## prob", "opt.lambda_feat ## reconstruction loss if alpha != 0: ## alpha", "3, 32, 32] the same size with the input image", "generated by previous Generator with bilinaer upsampling pred_fake = netD(fake.detach(),", "_, G_z, _ = G(mask, G_z) ## [1, 3, 26,", "loss.criterionFeat(pred_fake_S[i][j], pred_real[i][j].detach()) * opt.lambda_feat errS = loss_G_GAN_S + loss_G_GAN_Feat_S +", "self.criterionVGG = VGGLoss() def train_single_scale(dataloader, netD, netG, netS, reals, Gs,", "data['label'].long().to(opt.device) ############################ # (1) Update D network: maximize D(x) +", ":param opt: config :return: ''' loss = Losses(opt) real =", "[1, 3, 33, 33] criterion = nn.MSELoss() RMSE = torch.sqrt(criterion(data['image'],", "Gs == []: noise = noise_ ## Gausiaan noise for", "[]: noise_ = functions.generate_noise([1, opt.nzx, opt.nzy], opt.batchSize) # [None, 1,", "network: maximize D(x) + D(G(z)) ########################### # train with real", "torch.full([opt.batchSize, opt.label_nc, opt.nzx, opt.nzy], 0, device=opt.device) in_s_S = prev_S #", ":param mask: [down scaled _mask] :param reals: [image pyramid] only", "for G, mask, real_curr, real_next, noise_amp in zip(Gs, masks, reals,", "26] :param mode: 'rand' :param opt: :return: ''' G_z =", "i in range(opt.num_D): for j in range(len(pred_fake_S[i]) - 1): loss_G_GAN_Feat_S", "% 25 == 0 or epoch == (opt.niter - 1):", "mode == 'segment': count = 0 for G, mask, real_curr,", "netD, netS, opt) ## save netG, netD, z_opt, opt is", "## alpha = 10 calculate the reconstruction loss Recloss =", "gamma=opt.gamma) errD2plot = [] errG2plot = [] D_real2plot = []", "a new noise, prev is a image generated by previous", "opt.lambda_feat # VGG feature matching loss loss_G_VGG = 0 if", "errG.backward() optimizerG.step() ############################ # (3) Update S network: maximize D(S(z))", "real_curr[1]], opt.batchSize) G_z = G_z[:, :, 0:real_curr[0], 0:real_curr[1]] ## G_z", "network: maximize D(S(z)) ########################### netS.zero_grad() pred_fake_S = netD(data['image'], segment_prob) loss_G_GAN_S", "errD.backward() optimizerD.step() errD2plot.append(errD.detach()) ## errD for each iteration ############################ #", "noise for generating image [None, 3, 42, 42] else: noise", "for i in range(opt.num_D): for j in range(len(pred_fake[i]) - 1):", "True) # Segmentation loss if opt.contour: loss_G_Seg = loss.crossEntropy(segment_logit, data['label'].float())", "''' :param netD: currD :param netG: currG :param netS: currS", "on real D_fake2plot.append(D_G_z + D_S_z) ## discriminator loss on fake", "########################### # train with real netD.zero_grad() pred_real = netD(data['image'], data['label'][:,0:1,...])", "= draw_concat(Gs, data['down_scale_label'], reals, NoiseAmp, in_s, 'generator', opt) ## given", "= 0 if not opt.no_ganFeat_loss: feat_weights = 4.0 / (opt.n_layers_D", ":, :] # [None, 3, 26, 26] all zeros, image", "Models.pix2pixHD_base import GANLoss, VGGLoss from Models.pix2pixHD2 import mask2onehot class Losses():", "range(opt.num_D): for j in range(len(pred_fake[i]) - 1): loss_G_GAN_Feat += D_weights", "with fake if (j == 0) & (epoch == 0):", "z + G_z G_z = G(z_in.detach(), G_z, mask) ## [1,", "with current G [1, 3, 33, 33] count += 1", "in_s, mode, opt): ''' :param Gs: [G0] :param mask: [down", "= nn.CrossEntropyLoss() if not opt.no_vgg_loss: self.criterionVGG = VGGLoss() def train_single_scale(dataloader,", "# find the current level image xn opt.nzx = real[0]", "loss loss_G_VGG = 0 if not opt.no_vgg_loss: loss_G_VGG = loss.criterionVGG(fake,", "list of prev netG :param Ss: list of prev netS", "1, 32, 32] noise_ = noise_.expand(opt.batchSize, 3, opt.nzx, opt.nzy) ##", "image [None, 3, 42, 42] else: noise = opt.noise_amp *", "3, 26, 26] all zeros, image input for the corest", "noise_ + prev ## [None, 3, 43, 43] new noise", "= in_s[:opt.batchSize, :, :, :] # [None, 3, 26, 26]", "2000 if Gs == [] and Ss == []: noise_", "pred_fake_S = netD(data['image'], segment_prob.detach()) loss_D_fake_S = loss.criterionGAN(pred_fake_S, False) D_S_z =", "= netD(data['image'], segment_prob.detach()) loss_D_fake_S = loss.criterionGAN(pred_fake_S, False) D_S_z = loss_D_fake_S.item()", "train with real netD.zero_grad() pred_real = netD(data['image'], data['label'][:,0:1,...]) loss_D_real =", "= data['label'][:,0:1,...] else: prev = draw_concat(Gs, data['down_scale_label'], reals, NoiseAmp, in_s,", ":, 0:real_next[0], 0:real_next[1]] ## resize the image to be compatible", "0:real_next[1]] ## resize the image to be compatible with current", "NoiseAmpS, opt): ''' :param netD: currD :param netG: currG :param", "0, second step prev = a image generated by previous", "data['label'][:,0:1,...]) # output shape [1, 1, 16, 16] -> [1,", "for j in range(len(pred_fake[i]) - 1): loss_G_GAN_Feat += D_weights *", "i in range(opt.num_D): for j in range(len(pred_fake[i]) - 1): loss_G_GAN_Feat", "[None, 3, 32, 32] _, G_z, _ = G(mask, G_z)", "loss_D_fake_S.item() errD = (loss_D_real + 0.5 * loss_D_fake + 0.5", "opt.noise_amp * noise_ + prev ## [None, 3, 43, 43]", "33] criterion = nn.MSELoss() RMSE = torch.sqrt(criterion(data['image'], prev)) opt.noise_amp =", "prev generated image plus the gaussian noise. fake = netG(noise.detach(),", "32, 32] onehot_label = mask2onehot(data['label'][:,0:1,...], opt.label_nc) RMSE_S = torch.sqrt(criterion(onehot_label, prev_S))", "############################ # (2) Update G network: maximize D(G(z)) ########################### netG.zero_grad()", "## for every epoch, do the following: errG2plot.append(errG.detach()) ## ErrG", "netG, netS def draw_concat(Gs, masks, reals, NoiseAmp, in_s, mode, opt):", "all zeros [1, 3, 26, 26] :param mode: 'rand' :param", "= loss.criterionGAN(pred_real, True) D_x = loss_D_real.item() # train with fake", "loss_G_Seg = loss.crossEntropy(segment_logit, data['label'].float()) else: loss_G_Seg = loss.crossEntropy(segment_prob, torch.squeeze(data['label'][:,0:1,...], dim", "the reconstruction loss Recloss = nn.MSELoss() rec_loss = alpha *", "= [] D_real2plot = [] D_fake2plot = [] for epoch", "betas=(opt.beta1, 0.999)) optimizerS = optim.Adam(netS.parameters(), lr=opt.lr_s, betas=(opt.beta1, 0.999)) schedulerD =", "+= 1 elif mode == 'segment': count = 0 for", "noise = opt.noise_amp * noise_ + prev ## [None, 3,", "go to the noise. # prev:[None, 3, 42, 42] ->", "previous generator G_z = imresize(G_z, real_next[1] / real_curr[1], opt) G_z", "for i in range(opt.num_D): for j in range(len(pred_fake_S[i]) - 1):", "optimizer optimizerD = optim.Adam(netD.parameters(), lr=opt.lr_d, betas=(opt.beta1, 0.999)) optimizerG = optim.Adam(netG.parameters(),", "= opt.noise_amp_init * RMSE_S mask = data['label'][:,0:1,...] else: prev =", "sure that the gradients don't go to the noise. #", "data['image']) * opt.lambda_feat ## reconstruction loss if alpha != 0:", "plt.imsave('%s/fake_sample_%d.png' % (opt.outf, epoch), functions.convert_image_np(fake.detach()), vmin=0, vmax=1) plt.imsave('%s/fake_sample_real_%d.png' % (opt.outf,", "z = functions.generate_noise([1, real_curr[0], real_curr[1]], opt.batchSize) z = z.expand(opt.batchSize, G_z.shape[1],", "0.8], gamma=opt.gamma) errD2plot = [] errG2plot = [] D_real2plot =", "netS: currS :param reals: a list of image pyramid ##", "(2) Update G network: maximize D(G(z)) ########################### netG.zero_grad() pred_fake =", "bilinaer upsampling pred_fake = netD(fake.detach(), data['label'][:,0:1,...]) # output shape [1,", "0:real_next[0], 0:real_next[1]] ## resize the image to be compatible with", "for each iteration ############################ # (2) Update G network: maximize", "loss on fake if epoch % 25 == 0 or", "epoch in range(opt.niter): # niter = 2000 if Gs ==", "# RMSE_S = 0 opt.noise_amp_S = opt.noise_amp_init * RMSE_S mask", "## G_z [None, 3, 32, 32] z_in = noise_amp *", "loss_D_fake + 0.5 * loss_D_fake_S) ## Todo: figure out a", "zip(Gs, masks, reals, reals[1:], NoiseAmp): if count == 0: z", "generated false samples through generator else: noise_ = functions.generate_noise([1, opt.nzx,", "* Recloss(fake, data['image']) else: rec_loss = 0 errG = loss_G_GAN", "+ G_z G_z = G(z_in.detach(), G_z, mask) ## [1, 3,", "0.8], gamma=opt.gamma) schedulerS = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerS, milestones=[opt.niter * 0.8], gamma=opt.gamma) errD2plot", "reals: a list of image pyramid ## TODO: you can", "# z_opt = 0 ## dummy z_opt alpha = opt.alpha", "loss_D_fake = loss.criterionGAN(pred_fake, False) D_G_z = loss_D_fake.item() # segment_logit, segment_mask", "NoiseAmp, in_s, 'generator', opt) ## given a new noise, prev", "with the input image # detach() make sure that the", "count += 1 elif mode == 'segment': count = 0", "G_z) ## [1, 3, 26, 26] output of previous generator", "_ = G(mask, G_z) ## [1, 3, 26, 26] output", "3, 26, 26] output of previous generator G_z = imresize(G_z,", "[None, 3, 43, 43] first step prev = 0, second", "G_z.shape[1], z.shape[2], z.shape[3]) else: z = functions.generate_noise( [opt.nc_z, real_curr[0], real_curr[1]],", "D_G_z = loss_D_fake.item() # segment_logit, segment_mask = netS(data['image'], mask2onehot(prev_S, opt.label_nc))", "in_s[:opt.batchSize, :, :, :] # [None, 3, 26, 26] all", "iteration training in this level if Gs == [] and", "Update D network: maximize D(x) + D(G(z)) ########################### # train", "betas=(opt.beta1, 0.999)) schedulerD = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerD, milestones=[opt.niter * 0.8], gamma=opt.gamma) schedulerG", "0, device=opt.device) in_s_S = prev_S # full of 0 [None,", "pass image shape here :param Gs: list of prev netG", "functions.generate_noise( [opt.nc_z, real_curr[0], real_curr[1]], opt.batchSize) G_z = G_z[:, :, 0:real_curr[0],", "only used to represent the image shape :param NoiseAmp: [1]", "= [] errG2plot = [] D_real2plot = [] D_fake2plot =", "loss.criterionGAN(pred_fake_S, False) D_S_z = loss_D_fake_S.item() errD = (loss_D_real + 0.5", "4.0 / (opt.n_layers_D + 1) D_weights = 1.0 / opt.num_D", "## [1, 3, 26, 26] output of previous generator G_z", "data['label'][:,0:1,...]) loss_D_real = loss.criterionGAN(pred_real, True) D_x = loss_D_real.item() # train", "previous generator if opt.contour: G_z = torch.cat((G_z, 1-G_z), 1) G_z", "else: loss_G_Seg = loss.crossEntropy(segment_prob, torch.squeeze(data['label'][:,0:1,...], dim =1)) # GAN feature", "0 [None, 3, 32, 32] prev_S = torch.full([opt.batchSize, opt.label_nc, opt.nzx,", "+= D_weights * feat_weights * \\ loss.criterionFeat(pred_fake_S[i][j], pred_real[i][j].detach()) * opt.lambda_feat", "G_z[:, :, 0:real_curr[0], 0:real_curr[1]] ## G_z [None, 3, 32, 32]", "[1, 3, 26, 26] output of previous generator G_z =", "here :param Gs: list of prev netG :param Ss: list", "loss.criterionFeat(pred_fake[i][j], pred_real[i][j].detach()) * opt.lambda_feat # VGG feature matching loss loss_G_VGG", "Generator with bilinaer upsampling pred_fake = netD(fake.detach(), data['label'][:,0:1,...]) # output", "errG2plot = [] D_real2plot = [] D_fake2plot = [] for", "(loss_D_real + 0.5 * loss_D_fake + 0.5 * loss_D_fake_S) ##", "device=opt.device) in_s_S = prev_S # full of 0 [None, 4,", "= 10 calculate the reconstruction loss Recloss = nn.MSELoss() rec_loss", "''' loss = Losses(opt) real = reals[opt.scale_num] # find the", "0: ## alpha = 10 calculate the reconstruction loss Recloss", "loss_D_fake_S) ## Todo: figure out a proper coefficient errD.backward() optimizerD.step()", "= opt.noise_amp * noise_ + prev ## [None, 3, 43,", "= netD(data['image'], segment_prob) loss_G_GAN_S = 0.03 * loss.criterionGAN(pred_fake_S, True) #", "if count == 0: z = functions.generate_noise([1, real_curr[0], real_curr[1]], opt.batchSize)", "noise. # prev:[None, 3, 42, 42] -> [None, 3, 43,", "[None, 3, 43, 43] new noise is equal to the", "loss loss_G_GAN_Feat_S = 0 if not opt.no_ganFeat_loss: feat_weights = 4.0", "upsampling pred_fake = netD(fake.detach(), data['label'][:,0:1,...]) # output shape [1, 1,", "GANLoss(not opt.no_lsgan) self.criterionFeat = nn.L1Loss() if opt.contour: self.crossEntropy = nn.BCEWithLogitsLoss()", "return in_s, in_s_S, netG, netS def draw_concat(Gs, masks, reals, NoiseAmp,", "corest level if len(Gs) > 0: if mode == 'generator':", "pyramid ## TODO: you can just pass image shape here", "len(pred_fake[0])) loss_D_fake = loss.criterionGAN(pred_fake, False) D_G_z = loss_D_fake.item() # segment_logit,", "!= 0: ## alpha = 10 calculate the reconstruction loss", "in range(len(pred_fake_S[i]) - 1): loss_G_GAN_Feat_S += D_weights * feat_weights *", "epoch == (opt.niter - 1): plt.imsave('%s/fake_sample_%d.png' % (opt.outf, epoch), functions.convert_image_np(fake.detach()),", "(opt.outf, epoch), functions.convert_image_np(fake.detach()), vmin=0, vmax=1) plt.imsave('%s/fake_sample_real_%d.png' % (opt.outf, epoch), functions.convert_image_np(data['image']),", "torch.full([opt.batchSize, opt.nc_z, opt.nzx, opt.nzy], 0, device=opt.device) in_s = prev #", "figure out a proper coefficient errD.backward() optimizerD.step() errD2plot.append(errD.detach()) ## errD", "epoch), functions.convert_image_np(fake.detach()), vmin=0, vmax=1) plt.imsave('%s/fake_sample_real_%d.png' % (opt.outf, epoch), functions.convert_image_np(data['image']), vmin=0,", "functions.convert_mask_np(data['label'][:,0:1,...], num_classes= opt.label_nc)) plt.imsave('%s/segmentation_mask_%d.png' % (opt.outf, epoch), functions.convert_mask_np(segment_mask.detach(), num_classes=opt.label_nc)) schedulerD.step()", "# Segmentation loss if opt.contour: loss_G_Seg = loss.crossEntropy(segment_logit, data['label'].float()) else:", "3, 33, 33] criterion = nn.MSELoss() RMSE = torch.sqrt(criterion(data['image'], prev))", "= functions.generate_noise( [opt.nc_z, real_curr[0], real_curr[1]], opt.batchSize) G_z = G_z[:, :,", "G_z G_z = G(z_in.detach(), G_z, mask) ## [1, 3, 26,", "\\ loss.criterionFeat(pred_fake[i][j], pred_real[i][j].detach()) * opt.lambda_feat # VGG feature matching loss", "prev:[None, 3, 42, 42] -> [None, 3, 43, 43] first", "43, 43] first step prev = 0, second step prev", "generated image plus the gaussian noise. fake = netG(noise.detach(), prev,", "= netD(fake, data['label'][:,0:1,...]) loss_G_GAN = 0.5 * loss.criterionGAN(pred_fake, True) #", "= 0.5 * loss.criterionGAN(pred_fake, True) # GAN feature matching loss", "pred_real[i][j].detach()) * opt.lambda_feat errS = loss_G_GAN_S + loss_G_GAN_Feat_S + loss_G_Seg", "in range(opt.niter): # niter = 2000 if Gs == []", "real D_fake2plot.append(D_G_z + D_S_z) ## discriminator loss on fake if", "errD2plot = [] errG2plot = [] D_real2plot = [] D_fake2plot", "netD(data['image'], data['label'][:,0:1,...]) loss_D_real = loss.criterionGAN(pred_real, True) D_x = loss_D_real.item() #", "in_s: all zeros [1, 3, 26, 26] :param mode: 'rand'", "count == 0: z = functions.generate_noise([1, real_curr[0], real_curr[1]], opt.batchSize) z", "False) D_S_z = loss_D_fake_S.item() errD = (loss_D_real + 0.5 *", "0:real_curr[1]] ## G_z [None, 3, 32, 32] _, G_z, _", "4, 32, 32] mask = data['label'][:,0:1,...] opt.noise_amp = opt.noise_amp_init opt.noise_amp_S", "D(G(z)) ########################### # train with real netD.zero_grad() pred_real = netD(data['image'],", "torch.sqrt(criterion(data['image'], prev)) opt.noise_amp = opt.noise_amp_init * RMSE prev_S = draw_concat(Ss,", "currG :param netS: currS :param reals: a list of image", "# niter = 2000 if Gs == [] and Ss", "data['label'] = data['label'].long().to(opt.device) ############################ # (1) Update D network: maximize", "Gs: list of prev netG :param Ss: list of prev", "-> [1] :param opt: config :return: ''' loss = Losses(opt)", "loss.crossEntropy(segment_prob, torch.squeeze(data['label'][:,0:1,...], dim =1)) # GAN feature matching loss loss_G_GAN_Feat_S", "opt) ## given a new noise, prev is a image", "0 ## dummy z_opt alpha = opt.alpha # setup optimizer", "epoch % 25 == 0 or epoch == (opt.niter -", "[G0] :param mask: [down scaled _mask] :param reals: [image pyramid]", "else: rec_loss = 0 errG = loss_G_GAN + loss_G_GAN_Feat +", "0.5 * loss.criterionGAN(pred_fake, True) # GAN feature matching loss loss_G_GAN_Feat", "# detach() make sure that the gradients don't go to", "opt.no_vgg_loss: loss_G_VGG = loss.criterionVGG(fake, data['image']) * opt.lambda_feat ## reconstruction loss", "D_fake2plot.append(D_G_z + D_S_z) ## discriminator loss on fake if epoch", "26, 26] :param NoiseAmp: [] -> [1] :param opt: config", "loss Recloss = nn.MSELoss() rec_loss = alpha * Recloss(fake, data['image'])", "loss_G_Seg errS.backward() optimizerS.step() ## for every epoch, do the following:", "image shape :param NoiseAmp: [1] :param in_s: all zeros [1,", "segment_prob) loss_G_GAN_S = 0.03 * loss.criterionGAN(pred_fake_S, True) # Segmentation loss", "netD.zero_grad() pred_real = netD(data['image'], data['label'][:,0:1,...]) loss_D_real = loss.criterionGAN(pred_real, True) D_x", "segment_mask = netS(data['image'], prev_S.detach()) pred_fake_S = netD(data['image'], segment_prob.detach()) loss_D_fake_S =", "* RMSE_S mask = data['label'][:,0:1,...] else: prev = draw_concat(Gs, data['down_scale_label'],", "import torch.optim as optim import torch.utils.data from Training import functions", "== 0 or epoch == (opt.niter - 1): plt.imsave('%s/fake_sample_%d.png' %", "0 if not opt.no_vgg_loss: loss_G_VGG = loss.criterionVGG(fake, data['image']) * opt.lambda_feat", "26, 26] :param mode: 'rand' :param opt: :return: ''' G_z", "# [None, 3, 26, 26] all zeros, image input for", "opt.noise_amp_init opt.noise_amp_S = opt.noise_amp_init else: prev = draw_concat(Gs, data['down_scale_label'], reals,", "= loss_D_fake.item() # segment_logit, segment_mask = netS(data['image'], mask2onehot(prev_S, opt.label_nc)) #", "########################### netG.zero_grad() pred_fake = netD(fake, data['label'][:,0:1,...]) loss_G_GAN = 0.5 *", "xn opt.nzx = real[0] opt.nzy = real[1] # z_opt =", "j in range(len(pred_fake_S[i]) - 1): loss_G_GAN_Feat_S += D_weights * feat_weights", "prev, mask) # [None, 3, 32, 32] the same size", "## reconstruction loss if alpha != 0: ## alpha =", "= noise_amp * z + G_z G_z = G(z_in.detach(), G_z,", "1 elif mode == 'segment': count = 0 for G,", "optimizerD = optim.Adam(netD.parameters(), lr=opt.lr_d, betas=(opt.beta1, 0.999)) optimizerG = optim.Adam(netG.parameters(), lr=opt.lr_g,", "if Gs == [] and Ss == []: noise_ =", "torch.optim as optim import torch.utils.data from Training import functions from", "32] mask = data['label'][:,0:1,...] opt.noise_amp = opt.noise_amp_init opt.noise_amp_S = opt.noise_amp_init", "feat_weights * \\ loss.criterionFeat(pred_fake_S[i][j], pred_real[i][j].detach()) * opt.lambda_feat errS = loss_G_GAN_S", "resize the image to be compatible with current G [1,", "# print(epoch, j) segment_logit, segment_prob, segment_mask = netS(data['image'], prev_S.detach()) pred_fake_S", "nn import torch.optim as optim import torch.utils.data from Training import", "the image to be compatible with current G [1, 3,", "= Losses(opt) real = reals[opt.scale_num] # find the current level", "prev_S # full of 0 [None, 4, 32, 32] mask", "in range(opt.num_D): for j in range(len(pred_fake_S[i]) - 1): loss_G_GAN_Feat_S +=", "0-> all zero [1, 3, 26, 26] :param NoiseAmp: []", "* 0.8], gamma=opt.gamma) schedulerS = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerS, milestones=[opt.niter * 0.8], gamma=opt.gamma)", "criterion = nn.MSELoss() RMSE = torch.sqrt(criterion(data['image'], prev)) opt.noise_amp = opt.noise_amp_init", "from Training import functions from Training.imresize import imresize import matplotlib.pyplot", "netD, netG, netS, reals, Gs, Ss, in_s, in_s_S, NoiseAmp, NoiseAmpS,", "= prev_S # full of 0 [None, 4, 32, 32]", "image generated by previous Generator with bilinaer upsampling pred_fake =", "3, opt.nzx, opt.nzy) ## Noise_: for generated false samples through", "out a proper coefficient errD.backward() optimizerD.step() errD2plot.append(errD.detach()) ## errD for", "(opt.niter - 1): plt.imsave('%s/fake_sample_%d.png' % (opt.outf, epoch), functions.convert_image_np(fake.detach()), vmin=0, vmax=1)", "in_s, 'generator', opt) ## given a new noise, prev is", "D_weights * feat_weights * \\ loss.criterionFeat(pred_fake_S[i][j], pred_real[i][j].detach()) * opt.lambda_feat errS", "########################### netS.zero_grad() pred_fake_S = netD(data['image'], segment_prob) loss_G_GAN_S = 0.03 *", "prev_S)) # RMSE_S = 0 opt.noise_amp_S = opt.noise_amp_init * RMSE_S", "optimizerD.step() errD2plot.append(errD.detach()) ## errD for each iteration ############################ # (2)", "G_z = torch.cat((G_z, 1-G_z), 1) G_z = imresize(G_z, real_next[1] /", "== 0): # first iteration training in this level if", "== []: noise = noise_ ## Gausiaan noise for generating", "real = reals[opt.scale_num] # find the current level image xn", "real_curr[1], opt) G_z = G_z[:, :, 0:real_next[0], 0:real_next[1]] ## resize", "############################ # (1) Update D network: maximize D(x) + D(G(z))", "# train with fake if (j == 0) & (epoch", "42] else: noise = opt.noise_amp * noise_ + prev ##", "zip(Gs, masks, reals, reals[1:], NoiseAmp): G_z = G_z[:, :, 0:real_curr[0],", ":param Gs: list of prev netG :param Ss: list of", "3, 42, 42] -> [None, 3, 43, 43] first step", "pred_fake = netD(fake.detach(), data['label'][:,0:1,...]) # output shape [1, 1, 16,", "netD(data['image'], segment_prob.detach()) loss_D_fake_S = loss.criterionGAN(pred_fake_S, False) D_S_z = loss_D_fake_S.item() errD", "data['label'].float()) else: loss_G_Seg = loss.crossEntropy(segment_prob, torch.squeeze(data['label'][:,0:1,...], dim =1)) # GAN", ":param NoiseAmp: [1] :param in_s: all zeros [1, 3, 26,", "image # detach() make sure that the gradients don't go", "= netG(noise.detach(), prev, mask) # [None, 3, 32, 32] the", "in enumerate(dataloader): data['image'] = data['image'].to(opt.device) data['label'] = data['label'].long().to(opt.device) ############################ #", "elif mode == 'segment': count = 0 for G, mask,", "draw_concat(Gs, data['down_scale_label'], reals, NoiseAmp, in_s, 'generator', opt) ## given a", "data['image'] = data['image'].to(opt.device) data['label'] = data['label'].long().to(opt.device) ############################ # (1) Update", "'segment', opt) mask = data['label'][:,0:1,...] if Gs == []: noise", "G_z = G(z_in.detach(), G_z, mask) ## [1, 3, 26, 26]", "loss_G_GAN = 0.5 * loss.criterionGAN(pred_fake, True) # GAN feature matching", "plus the gaussian noise. fake = netG(noise.detach(), prev, mask) #", "0.03 * loss.criterionGAN(pred_fake_S, True) # Segmentation loss if opt.contour: loss_G_Seg", "previous Generator with bilinaer upsampling pred_fake = netD(fake.detach(), data['label'][:,0:1,...]) #", "output path return in_s, in_s_S, netG, netS def draw_concat(Gs, masks,", "num_classes=opt.label_nc)) schedulerD.step() schedulerG.step() schedulerS.step() functions.save_networks(netG, netD, netS, opt) ## save", "from Training.imresize import imresize import matplotlib.pyplot as plt from Models.pix2pixHD_base", "prev_S = torch.full([opt.batchSize, opt.label_nc, opt.nzx, opt.nzy], 0, device=opt.device) in_s_S =", "0 opt.noise_amp_S = opt.noise_amp_init * RMSE_S mask = data['label'][:,0:1,...] else:", "0): # first iteration training in this level if Gs", "detach() make sure that the gradients don't go to the", "-> [1, 1, 23, 23] # print(len(pred_fake), len(pred_fake[0])) loss_D_fake =", "opt) prev_S = draw_concat(Ss, data['down_scale_image'], reals, NoiseAmpS, in_s_S, 'segment', opt)", "to represent the image shape :param NoiseAmp: [1] :param in_s:", "opt.no_vgg_loss: self.criterionVGG = VGGLoss() def train_single_scale(dataloader, netD, netG, netS, reals,", "be compatible with current G [1, 3, 33, 33] count", "32] z_in = noise_amp * z + G_z G_z =", "'segment': count = 0 for G, mask, real_curr, real_next, noise_amp", "'rand' :param opt: :return: ''' G_z = in_s[:opt.batchSize, :, :,", "self.criterionGAN = GANLoss(not opt.no_lsgan) self.criterionFeat = nn.L1Loss() if opt.contour: self.crossEntropy", "* loss_D_fake_S) ## Todo: figure out a proper coefficient errD.backward()", "26, 26] output of previous generator if opt.contour: G_z =", "by previous Generator with bilinaer upsampling pred_fake = netD(fake.detach(), data['label'][:,0:1,...])", "+ 1) D_weights = 1.0 / opt.num_D for i in", "size with the input image # detach() make sure that", "[image pyramid] only used to represent the image shape :param", "output of previous generator G_z = imresize(G_z, real_next[1] / real_curr[1],", "= [] D_fake2plot = [] for epoch in range(opt.niter): #", "iteration D_real2plot.append(D_x) ## discriminator loss on real D_fake2plot.append(D_G_z + D_S_z)", "RMSE_S = torch.sqrt(criterion(onehot_label, prev_S)) # RMSE_S = 0 opt.noise_amp_S =", ":param netG: currG :param netS: currS :param reals: a list", "the same size with the input image # detach() make", "plt.imsave('%s/fake_sample_real_%d.png' % (opt.outf, epoch), functions.convert_image_np(data['image']), vmin=0, vmax=1) plt.imsave('%s/fake_sample_mask_%d.png' % (opt.outf,", "G_z [None, 3, 32, 32] z_in = noise_amp * z", "[None, 4, 32, 32] onehot_label = mask2onehot(data['label'][:,0:1,...], opt.label_nc) RMSE_S =", "optimizerS = optim.Adam(netS.parameters(), lr=opt.lr_s, betas=(opt.beta1, 0.999)) schedulerD = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerD, milestones=[opt.niter", "## Gausiaan noise for generating image [None, 3, 42, 42]", "network: maximize D(G(z)) ########################### netG.zero_grad() pred_fake = netD(fake, data['label'][:,0:1,...]) loss_G_GAN", "feature matching loss loss_G_GAN_Feat = 0 if not opt.no_ganFeat_loss: feat_weights", "opt.nzy], opt.batchSize) # [None, 1, 32, 32] noise_ = noise_.expand(opt.batchSize,", "26] output of previous generator if opt.contour: G_z = torch.cat((G_z,", "Ss: list of prev netS :param in_s: 0-> all zero", "+ prev ## [None, 3, 43, 43] new noise is", "loss_G_GAN_Feat + loss_G_VGG + rec_loss errG.backward() optimizerG.step() ############################ # (3)", "netG(noise.detach(), prev, mask) # [None, 3, 32, 32] the same", "opt) ## prob with [None, 4, 32, 32] onehot_label =", "= torch.cat((G_z, 1-G_z), 1) G_z = imresize(G_z, real_next[1] / real_curr[1],", "= G_z[:, :, 0:real_next[0], 0:real_next[1]] ## resize the image to", "z.expand(opt.batchSize, G_z.shape[1], z.shape[2], z.shape[3]) else: z = functions.generate_noise( [opt.nc_z, real_curr[0],", "= torch.full([opt.batchSize, opt.label_nc, opt.nzx, opt.nzy], 0, device=opt.device) in_s_S = prev_S", "image plus the gaussian noise. fake = netG(noise.detach(), prev, mask)", "torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerG, milestones=[opt.niter * 0.8], gamma=opt.gamma) schedulerS = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerS, milestones=[opt.niter *", "RMSE_S = 0 opt.noise_amp_S = opt.noise_amp_init * RMSE_S mask =", "epoch, do the following: errG2plot.append(errG.detach()) ## ErrG for each iteration", "generator if opt.contour: G_z = torch.cat((G_z, 1-G_z), 1) G_z =", "lr=opt.lr_s, betas=(opt.beta1, 0.999)) schedulerD = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerD, milestones=[opt.niter * 0.8], gamma=opt.gamma)", "with bilinear upsampling [1, 3, 33, 33] criterion = nn.MSELoss()", "upsampling [1, 3, 33, 33] criterion = nn.MSELoss() RMSE =", ":return: ''' loss = Losses(opt) real = reals[opt.scale_num] # find", "netS(data['image'], mask2onehot(prev_S, opt.label_nc)) # print(data['image'].shape, onehot.shape) # print(epoch, j) segment_logit,", "opt.no_ganFeat_loss: feat_weights = 4.0 / (opt.n_layers_D + 1) D_weights =", "used to represent the image shape :param NoiseAmp: [1] :param", "[] -> [1] :param opt: config :return: ''' loss =", "to be compatible with current G [1, 3, 33, 33]", "= torch.full([opt.batchSize, opt.nc_z, opt.nzx, opt.nzy], 0, device=opt.device) in_s = prev", "= opt.alpha # setup optimizer optimizerD = optim.Adam(netD.parameters(), lr=opt.lr_d, betas=(opt.beta1,", "= nn.MSELoss() RMSE = torch.sqrt(criterion(data['image'], prev)) opt.noise_amp = opt.noise_amp_init *", "+ rec_loss errG.backward() optimizerG.step() ############################ # (3) Update S network:", "26, 26] all zeros, image input for the corest level", "netG :param Ss: list of prev netS :param in_s: 0->", "loss if alpha != 0: ## alpha = 10 calculate", "all zeros, image input for the corest level if len(Gs)", "## ErrG for each iteration D_real2plot.append(D_x) ## discriminator loss on", "opt.noise_amp = opt.noise_amp_init * RMSE prev_S = draw_concat(Ss, data['down_scale_image'], reals,", "generated by previous Generator with bilinear upsampling [1, 3, 33,", "[1, 1, 16, 16] -> [1, 1, 23, 23] #", "z = functions.generate_noise( [opt.nc_z, real_curr[0], real_curr[1]], opt.batchSize) G_z = G_z[:,", "netS(data['image'], prev_S.detach()) pred_fake_S = netD(data['image'], segment_prob.detach()) loss_D_fake_S = loss.criterionGAN(pred_fake_S, False)", "[None, 3, 32, 32] prev_S = torch.full([opt.batchSize, opt.label_nc, opt.nzx, opt.nzy],", "Update G network: maximize D(G(z)) ########################### netG.zero_grad() pred_fake = netD(fake,", ":param opt: :return: ''' G_z = in_s[:opt.batchSize, :, :, :]", "= functions.generate_noise([1, real_curr[0], real_curr[1]], opt.batchSize) z = z.expand(opt.batchSize, G_z.shape[1], z.shape[2],", "# (3) Update S network: maximize D(S(z)) ########################### netS.zero_grad() pred_fake_S", "# VGG feature matching loss loss_G_VGG = 0 if not", "optimizerG.step() ############################ # (3) Update S network: maximize D(S(z)) ###########################", "gaussian noise. fake = netG(noise.detach(), prev, mask) # [None, 3,", "32, 32] the same size with the input image #", "loss.criterionGAN(pred_fake, True) # GAN feature matching loss loss_G_GAN_Feat = 0", "reals, NoiseAmp, in_s, 'generator', opt) ## given a new noise,", "is a image generated by previous Generator with bilinear upsampling", "real[0] opt.nzy = real[1] # z_opt = 0 ## dummy", "G_z = in_s[:opt.batchSize, :, :, :] # [None, 3, 26,", "opt.nzx = real[0] opt.nzy = real[1] # z_opt = 0", "TODO: you can just pass image shape here :param Gs:", "loss.crossEntropy(segment_logit, data['label'].float()) else: loss_G_Seg = loss.crossEntropy(segment_prob, torch.squeeze(data['label'][:,0:1,...], dim =1)) #", "== (opt.niter - 1): print('scale %d:[%d/%d]' % (opt.scale_num, epoch, opt.niter))", "betas=(opt.beta1, 0.999)) optimizerG = optim.Adam(netG.parameters(), lr=opt.lr_g, betas=(opt.beta1, 0.999)) optimizerS =", "= VGGLoss() def train_single_scale(dataloader, netD, netG, netS, reals, Gs, Ss,", "VGGLoss from Models.pix2pixHD2 import mask2onehot class Losses(): def __init__(self, opt):", "opt.nc_z, opt.nzx, opt.nzy], 0, device=opt.device) in_s = prev # full", "## Todo: figure out a proper coefficient errD.backward() optimizerD.step() errD2plot.append(errD.detach())", "current level image xn opt.nzx = real[0] opt.nzy = real[1]", "masks, reals, reals[1:], NoiseAmp): G_z = G_z[:, :, 0:real_curr[0], 0:real_curr[1]]", "the gradients don't go to the noise. # prev:[None, 3,", "def __init__(self, opt): self.criterionGAN = GANLoss(not opt.no_lsgan) self.criterionFeat = nn.L1Loss()", "full of 0 [None, 3, 32, 32] prev_S = torch.full([opt.batchSize,", "3, 32, 32] prev_S = torch.full([opt.batchSize, opt.label_nc, opt.nzx, opt.nzy], 0,", "(opt.outf, epoch), functions.convert_mask_np(data['label'][:,0:1,...], num_classes= opt.label_nc)) plt.imsave('%s/segmentation_mask_%d.png' % (opt.outf, epoch), functions.convert_mask_np(segment_mask.detach(),", "range(opt.num_D): for j in range(len(pred_fake_S[i]) - 1): loss_G_GAN_Feat_S += D_weights", "real[1] # z_opt = 0 ## dummy z_opt alpha =", "noise. fake = netG(noise.detach(), prev, mask) # [None, 3, 32,", "mask) ## [1, 3, 26, 26] output of previous generator", "netS, reals, Gs, Ss, in_s, in_s_S, NoiseAmp, NoiseAmpS, opt): '''", "milestones=[opt.niter * 0.8], gamma=opt.gamma) errD2plot = [] errG2plot = []", "= loss.criterionGAN(pred_fake, False) D_G_z = loss_D_fake.item() # segment_logit, segment_mask =", "D_real2plot.append(D_x) ## discriminator loss on real D_fake2plot.append(D_G_z + D_S_z) ##", "real_curr, real_next, noise_amp in zip(Gs, masks, reals, reals[1:], NoiseAmp): G_z", "26] output of previous generator G_z = imresize(G_z, real_next[1] /", "Noise_: for generated false samples through generator else: noise_ =", "G_z, _ = G(mask, G_z) ## [1, 3, 26, 26]", "reals, NoiseAmpS, in_s_S, 'segment', opt) mask = data['label'][:,0:1,...] if Gs", "mask2onehot(data['label'][:,0:1,...], opt.label_nc) RMSE_S = torch.sqrt(criterion(onehot_label, prev_S)) # RMSE_S = 0", "for generating image [None, 3, 42, 42] else: noise =", "[]: noise = noise_ ## Gausiaan noise for generating image", "if opt.contour: self.crossEntropy = nn.BCEWithLogitsLoss() else: self.crossEntropy = nn.CrossEntropyLoss() if", "torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerS, milestones=[opt.niter * 0.8], gamma=opt.gamma) errD2plot = [] errG2plot =", "[] for epoch in range(opt.niter): # niter = 2000 if", "opt.nzx, opt.nzy], opt.batchSize) for j, data in enumerate(dataloader): data['image'] =", "0 for G, mask, real_curr, real_next, noise_amp in zip(Gs, masks,", "## Noise_: for generated false samples through generator else: noise_", "loss_G_GAN + loss_G_GAN_Feat + loss_G_VGG + rec_loss errG.backward() optimizerG.step() ############################", "* RMSE prev_S = draw_concat(Ss, data['down_scale_image'], reals, NoiseAmpS, in_s_S, 'segment',", "D_S_z = loss_D_fake_S.item() errD = (loss_D_real + 0.5 * loss_D_fake", "the following: errG2plot.append(errG.detach()) ## ErrG for each iteration D_real2plot.append(D_x) ##", "mode, opt): ''' :param Gs: [G0] :param mask: [down scaled", "full of 0 [None, 4, 32, 32] mask = data['label'][:,0:1,...]", "prev netS :param in_s: 0-> all zero [1, 3, 26,", "= draw_concat(Ss, data['down_scale_image'], reals, NoiseAmpS, in_s_S, 'segment', opt) mask =", "not opt.no_ganFeat_loss: feat_weights = 4.0 / (opt.n_layers_D + 1) D_weights", "* opt.lambda_feat # VGG feature matching loss loss_G_VGG = 0", "opt is used to parser output path return in_s, in_s_S,", "(j == 0) & (epoch == 0): # first iteration", "G_z, mask) ## [1, 3, 26, 26] output of previous", "= 0.03 * loss.criterionGAN(pred_fake_S, True) # Segmentation loss if opt.contour:", "level if Gs == [] and Ss == []: prev", "0 errG = loss_G_GAN + loss_G_GAN_Feat + loss_G_VGG + rec_loss", "loss_D_fake.item() # segment_logit, segment_mask = netS(data['image'], mask2onehot(prev_S, opt.label_nc)) # print(data['image'].shape,", "currS :param reals: a list of image pyramid ## TODO:", "[] and Ss == []: prev = torch.full([opt.batchSize, opt.nc_z, opt.nzx,", "feature matching loss loss_G_GAN_Feat_S = 0 if not opt.no_ganFeat_loss: feat_weights", "onehot.shape) # print(epoch, j) segment_logit, segment_prob, segment_mask = netS(data['image'], prev_S.detach())", "= functions.generate_noise([1, opt.nzx, opt.nzy], opt.batchSize) for j, data in enumerate(dataloader):", "if opt.contour: G_z = torch.cat((G_z, 1-G_z), 1) G_z = imresize(G_z,", "for j in range(len(pred_fake_S[i]) - 1): loss_G_GAN_Feat_S += D_weights *", "= loss.criterionVGG(fake, data['image']) * opt.lambda_feat ## reconstruction loss if alpha", "data['down_scale_image'], reals, NoiseAmpS, in_s_S, 'segment', opt) ## prob with [None,", "# GAN feature matching loss loss_G_GAN_Feat = 0 if not", "in_s, in_s_S, NoiseAmp, NoiseAmpS, opt): ''' :param netD: currD :param", "'generator', opt) prev_S = draw_concat(Ss, data['down_scale_image'], reals, NoiseAmpS, in_s_S, 'segment',", "prev # full of 0 [None, 3, 32, 32] prev_S", "z_opt = 0 ## dummy z_opt alpha = opt.alpha #", "% (opt.outf, epoch), functions.convert_image_np(fake.detach()), vmin=0, vmax=1) plt.imsave('%s/fake_sample_real_%d.png' % (opt.outf, epoch),", "reals, NoiseAmp, in_s, mode, opt): ''' :param Gs: [G0] :param", "opt.nzy = real[1] # z_opt = 0 ## dummy z_opt", "opt.noise_amp_init * RMSE prev_S = draw_concat(Ss, data['down_scale_image'], reals, NoiseAmpS, in_s_S,", "just pass image shape here :param Gs: list of prev", "bilinear upsampling [1, 3, 33, 33] criterion = nn.MSELoss() RMSE", "plt.imsave('%s/fake_sample_mask_%d.png' % (opt.outf, epoch), functions.convert_mask_np(data['label'][:,0:1,...], num_classes= opt.label_nc)) plt.imsave('%s/segmentation_mask_%d.png' % (opt.outf,", "on fake if epoch % 25 == 0 or epoch", "compatible with current G [1, 3, 33, 33] count +=", "errS = loss_G_GAN_S + loss_G_GAN_Feat_S + loss_G_Seg errS.backward() optimizerS.step() ##", "= netS(data['image'], mask2onehot(prev_S, opt.label_nc)) # print(data['image'].shape, onehot.shape) # print(epoch, j)", "* feat_weights * \\ loss.criterionFeat(pred_fake[i][j], pred_real[i][j].detach()) * opt.lambda_feat # VGG", "+ D_S_z) ## discriminator loss on fake if epoch %", "imresize(G_z, real_next[1] / real_curr[1], opt) G_z = G_z[:, :, 0:real_next[0],", "loss_G_GAN_Feat = 0 if not opt.no_ganFeat_loss: feat_weights = 4.0 /", "if mode == 'generator': count = 0 for G, mask,", "reals, reals[1:], NoiseAmp): if count == 0: z = functions.generate_noise([1,", "else: self.crossEntropy = nn.CrossEntropyLoss() if not opt.no_vgg_loss: self.criterionVGG = VGGLoss()", "1) G_z = imresize(G_z, real_next[1] / real_curr[1], opt) G_z =", "train_single_scale(dataloader, netD, netG, netS, reals, Gs, Ss, in_s, in_s_S, NoiseAmp,", "mask2onehot(prev_S, opt.label_nc)) # print(data['image'].shape, onehot.shape) # print(epoch, j) segment_logit, segment_prob,", "j in range(len(pred_fake[i]) - 1): loss_G_GAN_Feat += D_weights * feat_weights", "25 == 0 or epoch == (opt.niter - 1): plt.imsave('%s/fake_sample_%d.png'", "z.shape[2], z.shape[3]) else: z = functions.generate_noise( [opt.nc_z, real_curr[0], real_curr[1]], opt.batchSize)", "NoiseAmp, in_s, 'generator', opt) prev_S = draw_concat(Ss, data['down_scale_image'], reals, NoiseAmpS,", "prev_S = draw_concat(Ss, data['down_scale_image'], reals, NoiseAmpS, in_s_S, 'segment', opt) mask", "data['down_scale_label'], reals, NoiseAmp, in_s, 'generator', opt) prev_S = draw_concat(Ss, data['down_scale_image'],", "not opt.no_vgg_loss: loss_G_VGG = loss.criterionVGG(fake, data['image']) * opt.lambda_feat ## reconstruction", "coefficient errD.backward() optimizerD.step() errD2plot.append(errD.detach()) ## errD for each iteration ############################", "epoch), functions.convert_mask_np(segment_mask.detach(), num_classes=opt.label_nc)) schedulerD.step() schedulerG.step() schedulerS.step() functions.save_networks(netG, netD, netS, opt)", "-> [None, 3, 43, 43] first step prev = 0,", "noise_ = functions.generate_noise([1, opt.nzx, opt.nzy], opt.batchSize) # [None, 1, 32,", "opt.nzy], 0, device=opt.device) in_s_S = prev_S # full of 0", "milestones=[opt.niter * 0.8], gamma=opt.gamma) schedulerS = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerS, milestones=[opt.niter * 0.8],", "you can just pass image shape here :param Gs: list", "Ss == []: noise_ = functions.generate_noise([1, opt.nzx, opt.nzy], opt.batchSize) #", "= opt.noise_amp_init opt.noise_amp_S = opt.noise_amp_init else: prev = draw_concat(Gs, data['down_scale_label'],", "= opt.noise_amp_init else: prev = draw_concat(Gs, data['down_scale_label'], reals, NoiseAmp, in_s,", "iteration ############################ # (2) Update G network: maximize D(G(z)) ###########################", "3, 26, 26] :param mode: 'rand' :param opt: :return: '''", "import imresize import matplotlib.pyplot as plt from Models.pix2pixHD_base import GANLoss,", "real_curr[0], real_curr[1]], opt.batchSize) z = z.expand(opt.batchSize, G_z.shape[1], z.shape[2], z.shape[3]) else:", "opt.nzx, opt.nzy], 0, device=opt.device) in_s = prev # full of", "each iteration D_real2plot.append(D_x) ## discriminator loss on real D_fake2plot.append(D_G_z +", "optimizerS.step() ## for every epoch, do the following: errG2plot.append(errG.detach()) ##", "same size with the input image # detach() make sure", "image xn opt.nzx = real[0] opt.nzy = real[1] # z_opt", "torch.sqrt(criterion(onehot_label, prev_S)) # RMSE_S = 0 opt.noise_amp_S = opt.noise_amp_init *", "26] all zeros, image input for the corest level if", "of image pyramid ## TODO: you can just pass image", ":param in_s: 0-> all zero [1, 3, 26, 26] :param", "3, 43, 43] first step prev = 0, second step", "G network: maximize D(G(z)) ########################### netG.zero_grad() pred_fake = netD(fake, data['label'][:,0:1,...])", "Gs: [G0] :param mask: [down scaled _mask] :param reals: [image", "zeros [1, 3, 26, 26] :param mode: 'rand' :param opt:", "feat_weights = 4.0 / (opt.n_layers_D + 1) D_weights = 1.0", "of previous generator G_z = imresize(G_z, real_next[1] / real_curr[1], opt)", "G_z = imresize(G_z, real_next[1] / real_curr[1], opt) G_z = G_z[:,", "real_next[1] / real_curr[1], opt) G_z = G_z[:, :, 0:real_next[0], 0:real_next[1]]", "[1] :param opt: config :return: ''' loss = Losses(opt) real", "Training import functions from Training.imresize import imresize import matplotlib.pyplot as", "print(len(pred_fake), len(pred_fake[0])) loss_D_fake = loss.criterionGAN(pred_fake, False) D_G_z = loss_D_fake.item() #", "prev is a image generated by previous Generator with bilinear", "reconstruction loss Recloss = nn.MSELoss() rec_loss = alpha * Recloss(fake,", "opt.noise_amp_init * RMSE_S mask = data['label'][:,0:1,...] else: prev = draw_concat(Gs,", "= optim.Adam(netG.parameters(), lr=opt.lr_g, betas=(opt.beta1, 0.999)) optimizerS = optim.Adam(netS.parameters(), lr=opt.lr_s, betas=(opt.beta1,", "to the prev generated image plus the gaussian noise. fake", "prev netG :param Ss: list of prev netS :param in_s:", "draw_concat(Ss, data['down_scale_image'], reals, NoiseAmpS, in_s_S, 'segment', opt) mask = data['label'][:,0:1,...]", "= 0 for G, mask, real_curr, real_next, noise_amp in zip(Gs,", "% (opt.scale_num, epoch, opt.niter)) if epoch % 25 == 0", "[1, 1, 23, 23] # print(len(pred_fake), len(pred_fake[0])) loss_D_fake = loss.criterionGAN(pred_fake,", "== [] and Ss == []: noise_ = functions.generate_noise([1, opt.nzx,", "+ loss_G_VGG + rec_loss errG.backward() optimizerG.step() ############################ # (3) Update", "or epoch == (opt.niter - 1): plt.imsave('%s/fake_sample_%d.png' % (opt.outf, epoch),", "= 4.0 / (opt.n_layers_D + 1) D_weights = 1.0 /", "[None, 3, 26, 26] all zeros, image input for the", "config :return: ''' loss = Losses(opt) real = reals[opt.scale_num] #", "= alpha * Recloss(fake, data['image']) else: rec_loss = 0 errG", "prev)) opt.noise_amp = opt.noise_amp_init * RMSE prev_S = draw_concat(Ss, data['down_scale_image'],", "G_z [None, 3, 32, 32] _, G_z, _ = G(mask,", "fake if epoch % 25 == 0 or epoch ==", "netG, netD, z_opt, opt is used to parser output path", "/ opt.num_D for i in range(opt.num_D): for j in range(len(pred_fake_S[i])", "this level if Gs == [] and Ss == []:", "real_curr[1]], opt.batchSize) z = z.expand(opt.batchSize, G_z.shape[1], z.shape[2], z.shape[3]) else: z", "D_real2plot = [] D_fake2plot = [] for epoch in range(opt.niter):", "NoiseAmp, NoiseAmpS, opt): ''' :param netD: currD :param netG: currG", "loss_G_VGG = loss.criterionVGG(fake, data['image']) * opt.lambda_feat ## reconstruction loss if", "data['label'][:,0:1,...] else: prev = draw_concat(Gs, data['down_scale_label'], reals, NoiseAmp, in_s, 'generator',", "discriminator loss on fake if epoch % 25 == 0", "z_opt, opt is used to parser output path return in_s,", "noise_amp * z + G_z G_z = G(z_in.detach(), G_z, mask)", "[None, 3, 32, 32] the same size with the input", "= noise_ ## Gausiaan noise for generating image [None, 3,", "output of previous generator if opt.contour: G_z = torch.cat((G_z, 1-G_z),", "16, 16] -> [1, 1, 23, 23] # print(len(pred_fake), len(pred_fake[0]))", "(opt.outf, epoch), functions.convert_image_np(data['image']), vmin=0, vmax=1) plt.imsave('%s/fake_sample_mask_%d.png' % (opt.outf, epoch), functions.convert_mask_np(data['label'][:,0:1,...],", "''' G_z = in_s[:opt.batchSize, :, :, :] # [None, 3,", "== 0) & (epoch == 0): # first iteration training", "(opt.n_layers_D + 1) D_weights = 1.0 / opt.num_D for i", "image input for the corest level if len(Gs) > 0:", "Recloss = nn.MSELoss() rec_loss = alpha * Recloss(fake, data['image']) else:", "opt.noise_amp_init else: prev = draw_concat(Gs, data['down_scale_label'], reals, NoiseAmp, in_s, 'generator',", "generating image [None, 3, 42, 42] else: noise = opt.noise_amp", "samples through generator else: noise_ = functions.generate_noise([1, opt.nzx, opt.nzy], opt.batchSize)", "mode: 'rand' :param opt: :return: ''' G_z = in_s[:opt.batchSize, :,", "= torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerG, milestones=[opt.niter * 0.8], gamma=opt.gamma) schedulerS = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerS, milestones=[opt.niter", "input for the corest level if len(Gs) > 0: if", "0:real_curr[1]] ## G_z [None, 3, 32, 32] z_in = noise_amp", "32] noise_ = noise_.expand(opt.batchSize, 3, opt.nzx, opt.nzy) ## Noise_: for", "matching loss loss_G_GAN_Feat_S = 0 if not opt.no_ganFeat_loss: feat_weights =", "a list of image pyramid ## TODO: you can just", "1, 23, 23] # print(len(pred_fake), len(pred_fake[0])) loss_D_fake = loss.criterionGAN(pred_fake, False)", "from Models.pix2pixHD_base import GANLoss, VGGLoss from Models.pix2pixHD2 import mask2onehot class", "data in enumerate(dataloader): data['image'] = data['image'].to(opt.device) data['label'] = data['label'].long().to(opt.device) ############################", "if epoch % 25 == 0 or epoch == (opt.niter", "= torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerD, milestones=[opt.niter * 0.8], gamma=opt.gamma) schedulerG = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerG, milestones=[opt.niter", "for epoch in range(opt.niter): # niter = 2000 if Gs", "opt.nzx, opt.nzy], 0, device=opt.device) in_s_S = prev_S # full of", "Gs, Ss, in_s, in_s_S, NoiseAmp, NoiseAmpS, opt): ''' :param netD:", "opt.noise_amp_S = opt.noise_amp_init else: prev = draw_concat(Gs, data['down_scale_label'], reals, NoiseAmp,", "True) D_x = loss_D_real.item() # train with fake if (j", "torch.nn as nn import torch.optim as optim import torch.utils.data from", "maximize D(G(z)) ########################### netG.zero_grad() pred_fake = netD(fake, data['label'][:,0:1,...]) loss_G_GAN =", "+= D_weights * feat_weights * \\ loss.criterionFeat(pred_fake[i][j], pred_real[i][j].detach()) * opt.lambda_feat", "netG.zero_grad() pred_fake = netD(fake, data['label'][:,0:1,...]) loss_G_GAN = 0.5 * loss.criterionGAN(pred_fake,", "- 1): loss_G_GAN_Feat_S += D_weights * feat_weights * \\ loss.criterionFeat(pred_fake_S[i][j],", "import torch.utils.data from Training import functions from Training.imresize import imresize", "ErrG for each iteration D_real2plot.append(D_x) ## discriminator loss on real", "mask = data['label'][:,0:1,...] else: prev = draw_concat(Gs, data['down_scale_label'], reals, NoiseAmp,", "* loss.criterionGAN(pred_fake, True) # GAN feature matching loss loss_G_GAN_Feat =", "schedulerG = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerG, milestones=[opt.niter * 0.8], gamma=opt.gamma) schedulerS = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerS,", "netS.zero_grad() pred_fake_S = netD(data['image'], segment_prob) loss_G_GAN_S = 0.03 * loss.criterionGAN(pred_fake_S,", "the corest level if len(Gs) > 0: if mode ==", "count = 0 for G, mask, real_curr, real_next, noise_amp in", "Models.pix2pixHD2 import mask2onehot class Losses(): def __init__(self, opt): self.criterionGAN =", "Gs == [] and Ss == []: noise_ = functions.generate_noise([1,", "reals, NoiseAmpS, in_s_S, 'segment', opt) ## prob with [None, 4,", "reals: [image pyramid] only used to represent the image shape", "in_s_S, 'segment', opt) mask = data['label'][:,0:1,...] if Gs == []:", "zero [1, 3, 26, 26] :param NoiseAmp: [] -> [1]", "by previous Generator with bilinear upsampling [1, 3, 33, 33]", "rec_loss errG.backward() optimizerG.step() ############################ # (3) Update S network: maximize", "= 2000 if Gs == [] and Ss == []:", "Todo: figure out a proper coefficient errD.backward() optimizerD.step() errD2plot.append(errD.detach()) ##", "Losses(): def __init__(self, opt): self.criterionGAN = GANLoss(not opt.no_lsgan) self.criterionFeat =", "image to be compatible with current G [1, 3, 33,", "of previous generator if opt.contour: G_z = torch.cat((G_z, 1-G_z), 1)", "43] first step prev = 0, second step prev =", "num_classes= opt.label_nc)) plt.imsave('%s/segmentation_mask_%d.png' % (opt.outf, epoch), functions.convert_mask_np(segment_mask.detach(), num_classes=opt.label_nc)) schedulerD.step() schedulerG.step()", "* 0.8], gamma=opt.gamma) errD2plot = [] errG2plot = [] D_real2plot", "VGG feature matching loss loss_G_VGG = 0 if not opt.no_vgg_loss:", "opt.no_lsgan) self.criterionFeat = nn.L1Loss() if opt.contour: self.crossEntropy = nn.BCEWithLogitsLoss() else:", "currD :param netG: currG :param netS: currS :param reals: a", "path return in_s, in_s_S, netG, netS def draw_concat(Gs, masks, reals,", "list of image pyramid ## TODO: you can just pass", "errG = loss_G_GAN + loss_G_GAN_Feat + loss_G_VGG + rec_loss errG.backward()", "## save netG, netD, z_opt, opt is used to parser", "4, 32, 32] onehot_label = mask2onehot(data['label'][:,0:1,...], opt.label_nc) RMSE_S = torch.sqrt(criterion(onehot_label,", "import GANLoss, VGGLoss from Models.pix2pixHD2 import mask2onehot class Losses(): def", "## [None, 3, 43, 43] new noise is equal to", "## prob with [None, 4, 32, 32] onehot_label = mask2onehot(data['label'][:,0:1,...],", "z_opt alpha = opt.alpha # setup optimizer optimizerD = optim.Adam(netD.parameters(),", "* loss.criterionGAN(pred_fake_S, True) # Segmentation loss if opt.contour: loss_G_Seg =", "real_next, noise_amp in zip(Gs, masks, reals, reals[1:], NoiseAmp): if count", "opt.contour: self.crossEntropy = nn.BCEWithLogitsLoss() else: self.crossEntropy = nn.CrossEntropyLoss() if not", "image pyramid ## TODO: you can just pass image shape", "first iteration training in this level if Gs == []", "in_s_S = prev_S # full of 0 [None, 4, 32,", "netG: currG :param netS: currS :param reals: a list of", "loss = Losses(opt) real = reals[opt.scale_num] # find the current", "Generator with bilinear upsampling [1, 3, 33, 33] criterion =", "G(z_in.detach(), G_z, mask) ## [1, 3, 26, 26] output of", "0) & (epoch == 0): # first iteration training in", "to the noise. # prev:[None, 3, 42, 42] -> [None,", "plt from Models.pix2pixHD_base import GANLoss, VGGLoss from Models.pix2pixHD2 import mask2onehot", "in_s: 0-> all zero [1, 3, 26, 26] :param NoiseAmp:", "shape [1, 1, 16, 16] -> [1, 1, 23, 23]", "j) segment_logit, segment_prob, segment_mask = netS(data['image'], prev_S.detach()) pred_fake_S = netD(data['image'],", "opt) mask = data['label'][:,0:1,...] if Gs == []: noise =", "pred_real[i][j].detach()) * opt.lambda_feat # VGG feature matching loss loss_G_VGG =", "opt) ## save netG, netD, z_opt, opt is used to", "0, device=opt.device) in_s = prev # full of 0 [None,", "0.999)) optimizerS = optim.Adam(netS.parameters(), lr=opt.lr_s, betas=(opt.beta1, 0.999)) schedulerD = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerD,", "new noise, prev is a image generated by previous Generator", "= draw_concat(Gs, data['down_scale_label'], reals, NoiseAmp, in_s, 'generator', opt) prev_S =", "save netG, netD, z_opt, opt is used to parser output", "gradients don't go to the noise. # prev:[None, 3, 42,", "make sure that the gradients don't go to the noise.", "with bilinaer upsampling pred_fake = netD(fake.detach(), data['label'][:,0:1,...]) # output shape", "def draw_concat(Gs, masks, reals, NoiseAmp, in_s, mode, opt): ''' :param", "## [1, 3, 26, 26] output of previous generator if", "import mask2onehot class Losses(): def __init__(self, opt): self.criterionGAN = GANLoss(not", "errD for each iteration ############################ # (2) Update G network:", "for j, data in enumerate(dataloader): data['image'] = data['image'].to(opt.device) data['label'] =", "[None, 4, 32, 32] mask = data['label'][:,0:1,...] opt.noise_amp = opt.noise_amp_init", "33, 33] count += 1 elif mode == 'segment': count", "## discriminator loss on fake if epoch % 25 ==", "nn.BCEWithLogitsLoss() else: self.crossEntropy = nn.CrossEntropyLoss() if not opt.no_vgg_loss: self.criterionVGG =", "32] onehot_label = mask2onehot(data['label'][:,0:1,...], opt.label_nc) RMSE_S = torch.sqrt(criterion(onehot_label, prev_S)) #", "opt.niter)) if epoch % 25 == 0 or epoch ==", "3, 32, 32] _, G_z, _ = G(mask, G_z) ##", "netD(data['image'], segment_prob) loss_G_GAN_S = 0.03 * loss.criterionGAN(pred_fake_S, True) # Segmentation", "Update S network: maximize D(S(z)) ########################### netS.zero_grad() pred_fake_S = netD(data['image'],", "functions from Training.imresize import imresize import matplotlib.pyplot as plt from", "find the current level image xn opt.nzx = real[0] opt.nzy", "lr=opt.lr_g, betas=(opt.beta1, 0.999)) optimizerS = optim.Adam(netS.parameters(), lr=opt.lr_s, betas=(opt.beta1, 0.999)) schedulerD", "segment_mask = netS(data['image'], mask2onehot(prev_S, opt.label_nc)) # print(data['image'].shape, onehot.shape) # print(epoch,", "optim import torch.utils.data from Training import functions from Training.imresize import", "32] _, G_z, _ = G(mask, G_z) ## [1, 3,", "(opt.niter - 1): print('scale %d:[%d/%d]' % (opt.scale_num, epoch, opt.niter)) if", "23, 23] # print(len(pred_fake), len(pred_fake[0])) loss_D_fake = loss.criterionGAN(pred_fake, False) D_G_z", "opt.label_nc)) plt.imsave('%s/segmentation_mask_%d.png' % (opt.outf, epoch), functions.convert_mask_np(segment_mask.detach(), num_classes=opt.label_nc)) schedulerD.step() schedulerG.step() schedulerS.step()", "real_next, noise_amp in zip(Gs, masks, reals, reals[1:], NoiseAmp): G_z =", "current G [1, 3, 33, 33] count += 1 return", "device=opt.device) in_s = prev # full of 0 [None, 3,", "NoiseAmp, in_s, mode, opt): ''' :param Gs: [G0] :param mask:", "loss_G_GAN_Feat += D_weights * feat_weights * \\ loss.criterionFeat(pred_fake[i][j], pred_real[i][j].detach()) *", "## given a new noise, prev is a image generated", "noise_amp in zip(Gs, masks, reals, reals[1:], NoiseAmp): if count ==", "milestones=[opt.niter * 0.8], gamma=opt.gamma) schedulerG = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerG, milestones=[opt.niter * 0.8],", "loss.criterionVGG(fake, data['image']) * opt.lambda_feat ## reconstruction loss if alpha !=", "every epoch, do the following: errG2plot.append(errG.detach()) ## ErrG for each", "do the following: errG2plot.append(errG.detach()) ## ErrG for each iteration D_real2plot.append(D_x)", "+ 0.5 * loss_D_fake + 0.5 * loss_D_fake_S) ## Todo:", "1): loss_G_GAN_Feat += D_weights * feat_weights * \\ loss.criterionFeat(pred_fake[i][j], pred_real[i][j].detach())", "discriminator loss on real D_fake2plot.append(D_G_z + D_S_z) ## discriminator loss", "that the gradients don't go to the noise. # prev:[None,", "and Ss == []: prev = torch.full([opt.batchSize, opt.nc_z, opt.nzx, opt.nzy],", "10 calculate the reconstruction loss Recloss = nn.MSELoss() rec_loss =", "* z + G_z G_z = G(z_in.detach(), G_z, mask) ##", "3, 26, 26] :param NoiseAmp: [] -> [1] :param opt:", "== [] and Ss == []: prev = torch.full([opt.batchSize, opt.nc_z,", "25 == 0 or epoch == (opt.niter - 1): print('scale", "G_z[:, :, 0:real_next[0], 0:real_next[1]] ## resize the image to be", "not opt.no_vgg_loss: self.criterionVGG = VGGLoss() def train_single_scale(dataloader, netD, netG, netS,", "= 0 opt.noise_amp_S = opt.noise_amp_init * RMSE_S mask = data['label'][:,0:1,...]", "loss_G_GAN_S + loss_G_GAN_Feat_S + loss_G_Seg errS.backward() optimizerS.step() ## for every", "step prev = a image generated by previous Generator with", "opt.batchSize) z = z.expand(opt.batchSize, G_z.shape[1], z.shape[2], z.shape[3]) else: z =", "in range(opt.num_D): for j in range(len(pred_fake[i]) - 1): loss_G_GAN_Feat +=", "33, 33] criterion = nn.MSELoss() RMSE = torch.sqrt(criterion(data['image'], prev)) opt.noise_amp", "nn.MSELoss() RMSE = torch.sqrt(criterion(data['image'], prev)) opt.noise_amp = opt.noise_amp_init * RMSE", ":param netS: currS :param reals: a list of image pyramid", "=1)) # GAN feature matching loss loss_G_GAN_Feat_S = 0 if", "1): print('scale %d:[%d/%d]' % (opt.scale_num, epoch, opt.niter)) if epoch %", "netD: currD :param netG: currG :param netS: currS :param reals:", "= a image generated by previous Generator with bilinaer upsampling", "S network: maximize D(S(z)) ########################### netS.zero_grad() pred_fake_S = netD(data['image'], segment_prob)", "nn.CrossEntropyLoss() if not opt.no_vgg_loss: self.criterionVGG = VGGLoss() def train_single_scale(dataloader, netD,", "epoch == (opt.niter - 1): print('scale %d:[%d/%d]' % (opt.scale_num, epoch,", ":] # [None, 3, 26, 26] all zeros, image input", "imresize import matplotlib.pyplot as plt from Models.pix2pixHD_base import GANLoss, VGGLoss", "opt.label_nc) RMSE_S = torch.sqrt(criterion(onehot_label, prev_S)) # RMSE_S = 0 opt.noise_amp_S", "# [None, 3, 32, 32] the same size with the", "for every epoch, do the following: errG2plot.append(errG.detach()) ## ErrG for", "for the corest level if len(Gs) > 0: if mode", "output shape [1, 1, 16, 16] -> [1, 1, 23,", "= G(z_in.detach(), G_z, mask) ## [1, 3, 26, 26] output", "# [None, 1, 32, 32] noise_ = noise_.expand(opt.batchSize, 3, opt.nzx,", "= G(mask, G_z) ## [1, 3, 26, 26] output of", "reals[1:], NoiseAmp): if count == 0: z = functions.generate_noise([1, real_curr[0],", "in zip(Gs, masks, reals, reals[1:], NoiseAmp): G_z = G_z[:, :,", "GANLoss, VGGLoss from Models.pix2pixHD2 import mask2onehot class Losses(): def __init__(self,", "shape :param NoiseAmp: [1] :param in_s: all zeros [1, 3,", ":return: ''' G_z = in_s[:opt.batchSize, :, :, :] # [None,", "* opt.lambda_feat errS = loss_G_GAN_S + loss_G_GAN_Feat_S + loss_G_Seg errS.backward()", "mask, real_curr, real_next, noise_amp in zip(Gs, masks, reals, reals[1:], NoiseAmp):", "level if len(Gs) > 0: if mode == 'generator': count", "opt.batchSize) for j, data in enumerate(dataloader): data['image'] = data['image'].to(opt.device) data['label']", "= optim.Adam(netS.parameters(), lr=opt.lr_s, betas=(opt.beta1, 0.999)) schedulerD = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerD, milestones=[opt.niter *", "# (1) Update D network: maximize D(x) + D(G(z)) ###########################", "loss_G_Seg = loss.crossEntropy(segment_prob, torch.squeeze(data['label'][:,0:1,...], dim =1)) # GAN feature matching", "26, 26] output of previous generator G_z = imresize(G_z, real_next[1]", "reconstruction loss if alpha != 0: ## alpha = 10", "calculate the reconstruction loss Recloss = nn.MSELoss() rec_loss = alpha", "& (epoch == 0): # first iteration training in this", ":param reals: [image pyramid] only used to represent the image", "masks, reals, NoiseAmp, in_s, mode, opt): ''' :param Gs: [G0]", "'segment', opt) ## prob with [None, 4, 32, 32] onehot_label", "input image # detach() make sure that the gradients don't", "for each iteration D_real2plot.append(D_x) ## discriminator loss on real D_fake2plot.append(D_G_z", "gamma=opt.gamma) schedulerS = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerS, milestones=[opt.niter * 0.8], gamma=opt.gamma) errD2plot =", "= noise_.expand(opt.batchSize, 3, opt.nzx, opt.nzy) ## Noise_: for generated false", "a image generated by previous Generator with bilinaer upsampling pred_fake", "range(len(pred_fake[i]) - 1): loss_G_GAN_Feat += D_weights * feat_weights * \\", "## dummy z_opt alpha = opt.alpha # setup optimizer optimizerD", "1, 16, 16] -> [1, 1, 23, 23] # print(len(pred_fake),", "torch.cat((G_z, 1-G_z), 1) G_z = imresize(G_z, real_next[1] / real_curr[1], opt)", "43, 43] new noise is equal to the prev generated", "zeros, image input for the corest level if len(Gs) >", "loss_D_real.item() # train with fake if (j == 0) &", "nn.L1Loss() if opt.contour: self.crossEntropy = nn.BCEWithLogitsLoss() else: self.crossEntropy = nn.CrossEntropyLoss()", "in_s_S, 'segment', opt) ## prob with [None, 4, 32, 32]", "prev_S = draw_concat(Ss, data['down_scale_image'], reals, NoiseAmpS, in_s_S, 'segment', opt) ##", "mask2onehot class Losses(): def __init__(self, opt): self.criterionGAN = GANLoss(not opt.no_lsgan)", "32] prev_S = torch.full([opt.batchSize, opt.label_nc, opt.nzx, opt.nzy], 0, device=opt.device) in_s_S", "0.8], gamma=opt.gamma) schedulerG = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerG, milestones=[opt.niter * 0.8], gamma=opt.gamma) schedulerS", "D_fake2plot = [] for epoch in range(opt.niter): # niter =", "= [] for epoch in range(opt.niter): # niter = 2000", "# output shape [1, 1, 16, 16] -> [1, 1,", "class Losses(): def __init__(self, opt): self.criterionGAN = GANLoss(not opt.no_lsgan) self.criterionFeat", "for generated false samples through generator else: noise_ = functions.generate_noise([1,", "D(x) + D(G(z)) ########################### # train with real netD.zero_grad() pred_real", "G(mask, G_z) ## [1, 3, 26, 26] output of previous", "vmax=1) plt.imsave('%s/fake_sample_real_%d.png' % (opt.outf, epoch), functions.convert_image_np(data['image']), vmin=0, vmax=1) plt.imsave('%s/fake_sample_mask_%d.png' %", "0: z = functions.generate_noise([1, real_curr[0], real_curr[1]], opt.batchSize) z = z.expand(opt.batchSize,", "list of prev netS :param in_s: 0-> all zero [1,", "feature matching loss loss_G_VGG = 0 if not opt.no_vgg_loss: loss_G_VGG", "schedulerG.step() schedulerS.step() functions.save_networks(netG, netD, netS, opt) ## save netG, netD,", "loss on real D_fake2plot.append(D_G_z + D_S_z) ## discriminator loss on", "## resize the image to be compatible with current G", "noise_amp in zip(Gs, masks, reals, reals[1:], NoiseAmp): G_z = G_z[:,", "= 0 if not opt.no_vgg_loss: loss_G_VGG = loss.criterionVGG(fake, data['image']) *", "= real[0] opt.nzy = real[1] # z_opt = 0 ##", "reals[1:], NoiseAmp): G_z = G_z[:, :, 0:real_curr[0], 0:real_curr[1]] ## G_z", "vmin=0, vmax=1) plt.imsave('%s/fake_sample_real_%d.png' % (opt.outf, epoch), functions.convert_image_np(data['image']), vmin=0, vmax=1) plt.imsave('%s/fake_sample_mask_%d.png'", "schedulerD = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerD, milestones=[opt.niter * 0.8], gamma=opt.gamma) schedulerG = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerG,", "print(epoch, j) segment_logit, segment_prob, segment_mask = netS(data['image'], prev_S.detach()) pred_fake_S =", "32, 32] _, G_z, _ = G(mask, G_z) ## [1,", "netS :param in_s: 0-> all zero [1, 3, 26, 26]", "= loss_D_fake_S.item() errD = (loss_D_real + 0.5 * loss_D_fake +", "of 0 [None, 4, 32, 32] mask = data['label'][:,0:1,...] opt.noise_amp", "= G_z[:, :, 0:real_curr[0], 0:real_curr[1]] ## G_z [None, 3, 32,", "Losses(opt) real = reals[opt.scale_num] # find the current level image", "if Gs == []: noise = noise_ ## Gausiaan noise", "D_weights * feat_weights * \\ loss.criterionFeat(pred_fake[i][j], pred_real[i][j].detach()) * opt.lambda_feat #", "dummy z_opt alpha = opt.alpha # setup optimizer optimizerD =", "netS, opt) ## save netG, netD, z_opt, opt is used", "data['down_scale_label'], reals, NoiseAmp, in_s, 'generator', opt) ## given a new", "real_curr[0], real_curr[1]], opt.batchSize) G_z = G_z[:, :, 0:real_curr[0], 0:real_curr[1]] ##", "mask) # [None, 3, 32, 32] the same size with", "- 1): loss_G_GAN_Feat += D_weights * feat_weights * \\ loss.criterionFeat(pred_fake[i][j],", "3, 33, 33] count += 1 elif mode == 'segment':", "segment_prob.detach()) loss_D_fake_S = loss.criterionGAN(pred_fake_S, False) D_S_z = loss_D_fake_S.item() errD =", "torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerD, milestones=[opt.niter * 0.8], gamma=opt.gamma) schedulerG = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerG, milestones=[opt.niter *", "42, 42] -> [None, 3, 43, 43] first step prev", "alpha * Recloss(fake, data['image']) else: rec_loss = 0 errG =", "draw_concat(Ss, data['down_scale_image'], reals, NoiseAmpS, in_s_S, 'segment', opt) ## prob with", "opt.noise_amp_S = opt.noise_amp_init * RMSE_S mask = data['label'][:,0:1,...] else: prev", "(opt.outf, epoch), functions.convert_mask_np(segment_mask.detach(), num_classes=opt.label_nc)) schedulerD.step() schedulerG.step() schedulerS.step() functions.save_networks(netG, netD, netS,", "in_s_S, netG, netS def draw_concat(Gs, masks, reals, NoiseAmp, in_s, mode,", "scaled _mask] :param reals: [image pyramid] only used to represent", ":, 0:real_curr[0], 0:real_curr[1]] ## G_z [None, 3, 32, 32] _,", "= GANLoss(not opt.no_lsgan) self.criterionFeat = nn.L1Loss() if opt.contour: self.crossEntropy =", "or epoch == (opt.niter - 1): print('scale %d:[%d/%d]' % (opt.scale_num,", "loss.criterionGAN(pred_real, True) D_x = loss_D_real.item() # train with fake if", "## errD for each iteration ############################ # (2) Update G", "opt.lambda_feat errS = loss_G_GAN_S + loss_G_GAN_Feat_S + loss_G_Seg errS.backward() optimizerS.step()", "% (opt.outf, epoch), functions.convert_mask_np(segment_mask.detach(), num_classes=opt.label_nc)) schedulerD.step() schedulerG.step() schedulerS.step() functions.save_networks(netG, netD,", "NoiseAmp): G_z = G_z[:, :, 0:real_curr[0], 0:real_curr[1]] ## G_z [None,", "shape here :param Gs: list of prev netG :param Ss:", ":param reals: a list of image pyramid ## TODO: you", "= loss_G_GAN_S + loss_G_GAN_Feat_S + loss_G_Seg errS.backward() optimizerS.step() ## for", "len(Gs) > 0: if mode == 'generator': count = 0", "false samples through generator else: noise_ = functions.generate_noise([1, opt.nzx, opt.nzy],", "= data['label'].long().to(opt.device) ############################ # (1) Update D network: maximize D(x)", "[down scaled _mask] :param reals: [image pyramid] only used to", "opt.batchSize) # [None, 1, 32, 32] noise_ = noise_.expand(opt.batchSize, 3,", "and Ss == []: noise_ = functions.generate_noise([1, opt.nzx, opt.nzy], opt.batchSize)", "42, 42] else: noise = opt.noise_amp * noise_ + prev", "0.5 * loss_D_fake_S) ## Todo: figure out a proper coefficient", "data['label'][:,0:1,...] if Gs == []: noise = noise_ ## Gausiaan", "reals, NoiseAmp, in_s, 'generator', opt) prev_S = draw_concat(Ss, data['down_scale_image'], reals,", "(1) Update D network: maximize D(x) + D(G(z)) ########################### #", "functions.generate_noise([1, opt.nzx, opt.nzy], opt.batchSize) # [None, 1, 32, 32] noise_", "0 or epoch == (opt.niter - 1): plt.imsave('%s/fake_sample_%d.png' % (opt.outf,", "z = z.expand(opt.batchSize, G_z.shape[1], z.shape[2], z.shape[3]) else: z = functions.generate_noise(", "= opt.noise_amp_init * RMSE prev_S = draw_concat(Ss, data['down_scale_image'], reals, NoiseAmpS,", "def train_single_scale(dataloader, netD, netG, netS, reals, Gs, Ss, in_s, in_s_S,", "# first iteration training in this level if Gs ==", "[None, 3, 32, 32] z_in = noise_amp * z +", "'generator', opt) ## given a new noise, prev is a", "lr=opt.lr_d, betas=(opt.beta1, 0.999)) optimizerG = optim.Adam(netG.parameters(), lr=opt.lr_g, betas=(opt.beta1, 0.999)) optimizerS", "noise_ ## Gausiaan noise for generating image [None, 3, 42,", "opt): ''' :param Gs: [G0] :param mask: [down scaled _mask]", "0:real_curr[0], 0:real_curr[1]] ## G_z [None, 3, 32, 32] _, G_z,", "prev_S.detach()) pred_fake_S = netD(data['image'], segment_prob.detach()) loss_D_fake_S = loss.criterionGAN(pred_fake_S, False) D_S_z", "[opt.nc_z, real_curr[0], real_curr[1]], opt.batchSize) G_z = G_z[:, :, 0:real_curr[0], 0:real_curr[1]]", "netG, netS, reals, Gs, Ss, in_s, in_s_S, NoiseAmp, NoiseAmpS, opt):", "# setup optimizer optimizerD = optim.Adam(netD.parameters(), lr=opt.lr_d, betas=(opt.beta1, 0.999)) optimizerG", "optim.Adam(netD.parameters(), lr=opt.lr_d, betas=(opt.beta1, 0.999)) optimizerG = optim.Adam(netG.parameters(), lr=opt.lr_g, betas=(opt.beta1, 0.999))", "0 if not opt.no_ganFeat_loss: feat_weights = 4.0 / (opt.n_layers_D +", "noise_ = functions.generate_noise([1, opt.nzx, opt.nzy], opt.batchSize) for j, data in", "loss_D_fake_S = loss.criterionGAN(pred_fake_S, False) D_S_z = loss_D_fake_S.item() errD = (loss_D_real", "# GAN feature matching loss loss_G_GAN_Feat_S = 0 if not", "data['down_scale_image'], reals, NoiseAmpS, in_s_S, 'segment', opt) mask = data['label'][:,0:1,...] if", "segment_logit, segment_prob, segment_mask = netS(data['image'], prev_S.detach()) pred_fake_S = netD(data['image'], segment_prob.detach())", "% (opt.outf, epoch), functions.convert_image_np(data['image']), vmin=0, vmax=1) plt.imsave('%s/fake_sample_mask_%d.png' % (opt.outf, epoch),", "0: if mode == 'generator': count = 0 for G,", "33] count += 1 elif mode == 'segment': count =", ":param in_s: all zeros [1, 3, 26, 26] :param mode:", "mask: [down scaled _mask] :param reals: [image pyramid] only used", "print(data['image'].shape, onehot.shape) # print(epoch, j) segment_logit, segment_prob, segment_mask = netS(data['image'],", "# full of 0 [None, 3, 32, 32] prev_S =", "Segmentation loss if opt.contour: loss_G_Seg = loss.crossEntropy(segment_logit, data['label'].float()) else: loss_G_Seg", "Ss, in_s, in_s_S, NoiseAmp, NoiseAmpS, opt): ''' :param netD: currD", "= torch.sqrt(criterion(data['image'], prev)) opt.noise_amp = opt.noise_amp_init * RMSE prev_S =", "loss_G_VGG = 0 if not opt.no_vgg_loss: loss_G_VGG = loss.criterionVGG(fake, data['image'])", "[]: prev = torch.full([opt.batchSize, opt.nc_z, opt.nzx, opt.nzy], 0, device=opt.device) in_s", "loss if opt.contour: loss_G_Seg = loss.crossEntropy(segment_logit, data['label'].float()) else: loss_G_Seg =", "errG2plot.append(errG.detach()) ## ErrG for each iteration D_real2plot.append(D_x) ## discriminator loss", "the gaussian noise. fake = netG(noise.detach(), prev, mask) # [None,", "- 1): print('scale %d:[%d/%d]' % (opt.scale_num, epoch, opt.niter)) if epoch", "/ real_curr[1], opt) G_z = G_z[:, :, 0:real_next[0], 0:real_next[1]] ##", "23] # print(len(pred_fake), len(pred_fake[0])) loss_D_fake = loss.criterionGAN(pred_fake, False) D_G_z =", "prev = draw_concat(Gs, data['down_scale_label'], reals, NoiseAmp, in_s, 'generator', opt) prev_S", "[1, 3, 26, 26] output of previous generator if opt.contour:", ":param netD: currD :param netG: currG :param netS: currS :param", "import functions from Training.imresize import imresize import matplotlib.pyplot as plt", "through generator else: noise_ = functions.generate_noise([1, opt.nzx, opt.nzy], opt.batchSize) for", "the image shape :param NoiseAmp: [1] :param in_s: all zeros", "1): plt.imsave('%s/fake_sample_%d.png' % (opt.outf, epoch), functions.convert_image_np(fake.detach()), vmin=0, vmax=1) plt.imsave('%s/fake_sample_real_%d.png' %", "j, data in enumerate(dataloader): data['image'] = data['image'].to(opt.device) data['label'] = data['label'].long().to(opt.device)", "parser output path return in_s, in_s_S, netG, netS def draw_concat(Gs,", "= data['label'][:,0:1,...] opt.noise_amp = opt.noise_amp_init opt.noise_amp_S = opt.noise_amp_init else: prev", "= data['label'][:,0:1,...] if Gs == []: noise = noise_ ##", "= 0 errG = loss_G_GAN + loss_G_GAN_Feat + loss_G_VGG +", "netS def draw_concat(Gs, masks, reals, NoiseAmp, in_s, mode, opt): '''", "GAN feature matching loss loss_G_GAN_Feat_S = 0 if not opt.no_ganFeat_loss:", "real netD.zero_grad() pred_real = netD(data['image'], data['label'][:,0:1,...]) loss_D_real = loss.criterionGAN(pred_real, True)", "############################ # (3) Update S network: maximize D(S(z)) ########################### netS.zero_grad()", "opt.num_D for i in range(opt.num_D): for j in range(len(pred_fake_S[i]) -", "in_s_S, NoiseAmp, NoiseAmpS, opt): ''' :param netD: currD :param netG:", "enumerate(dataloader): data['image'] = data['image'].to(opt.device) data['label'] = data['label'].long().to(opt.device) ############################ # (1)", "with real netD.zero_grad() pred_real = netD(data['image'], data['label'][:,0:1,...]) loss_D_real = loss.criterionGAN(pred_real,", "0.5 * loss_D_fake + 0.5 * loss_D_fake_S) ## Todo: figure", "masks, reals, reals[1:], NoiseAmp): if count == 0: z =", "# full of 0 [None, 4, 32, 32] mask =", "in_s, 'generator', opt) prev_S = draw_concat(Ss, data['down_scale_image'], reals, NoiseAmpS, in_s_S,", "maximize D(S(z)) ########################### netS.zero_grad() pred_fake_S = netD(data['image'], segment_prob) loss_G_GAN_S =", "+ loss_G_Seg errS.backward() optimizerS.step() ## for every epoch, do the", "* loss_D_fake + 0.5 * loss_D_fake_S) ## Todo: figure out", "optim.Adam(netG.parameters(), lr=opt.lr_g, betas=(opt.beta1, 0.999)) optimizerS = optim.Adam(netS.parameters(), lr=opt.lr_s, betas=(opt.beta1, 0.999))", "/ (opt.n_layers_D + 1) D_weights = 1.0 / opt.num_D for", "the prev generated image plus the gaussian noise. fake =", "<gh_stars>1-10 import torch.nn as nn import torch.optim as optim import", "optim.Adam(netS.parameters(), lr=opt.lr_s, betas=(opt.beta1, 0.999)) schedulerD = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerD, milestones=[opt.niter * 0.8],", "loss loss_G_GAN_Feat = 0 if not opt.no_ganFeat_loss: feat_weights = 4.0", "range(len(pred_fake_S[i]) - 1): loss_G_GAN_Feat_S += D_weights * feat_weights * \\", ":param Gs: [G0] :param mask: [down scaled _mask] :param reals:", "== 0: z = functions.generate_noise([1, real_curr[0], real_curr[1]], opt.batchSize) z =", "\\ loss.criterionFeat(pred_fake_S[i][j], pred_real[i][j].detach()) * opt.lambda_feat errS = loss_G_GAN_S + loss_G_GAN_Feat_S", "== []: prev = torch.full([opt.batchSize, opt.nc_z, opt.nzx, opt.nzy], 0, device=opt.device)", "= torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerS, milestones=[opt.niter * 0.8], gamma=opt.gamma) errD2plot = [] errG2plot", "opt) G_z = G_z[:, :, 0:real_next[0], 0:real_next[1]] ## resize the", "noise_.expand(opt.batchSize, 3, opt.nzx, opt.nzy) ## Noise_: for generated false samples", "* \\ loss.criterionFeat(pred_fake_S[i][j], pred_real[i][j].detach()) * opt.lambda_feat errS = loss_G_GAN_S +", "z.shape[3]) else: z = functions.generate_noise( [opt.nc_z, real_curr[0], real_curr[1]], opt.batchSize) G_z", "opt: :return: ''' G_z = in_s[:opt.batchSize, :, :, :] #", "is equal to the prev generated image plus the gaussian", "= (loss_D_real + 0.5 * loss_D_fake + 0.5 * loss_D_fake_S)", "netD, z_opt, opt is used to parser output path return", "- 1): plt.imsave('%s/fake_sample_%d.png' % (opt.outf, epoch), functions.convert_image_np(fake.detach()), vmin=0, vmax=1) plt.imsave('%s/fake_sample_real_%d.png'", "32, 32] prev_S = torch.full([opt.batchSize, opt.label_nc, opt.nzx, opt.nzy], 0, device=opt.device)", "in range(len(pred_fake[i]) - 1): loss_G_GAN_Feat += D_weights * feat_weights *", "= imresize(G_z, real_next[1] / real_curr[1], opt) G_z = G_z[:, :,", "functions.save_networks(netG, netD, netS, opt) ## save netG, netD, z_opt, opt", "if not opt.no_vgg_loss: self.criterionVGG = VGGLoss() def train_single_scale(dataloader, netD, netG,", "%d:[%d/%d]' % (opt.scale_num, epoch, opt.niter)) if epoch % 25 ==", "26] :param NoiseAmp: [] -> [1] :param opt: config :return:", "train with fake if (j == 0) & (epoch ==", "loss_G_VGG + rec_loss errG.backward() optimizerG.step() ############################ # (3) Update S", "the input image # detach() make sure that the gradients", "opt.num_D for i in range(opt.num_D): for j in range(len(pred_fake[i]) -", "0.999)) optimizerG = optim.Adam(netG.parameters(), lr=opt.lr_g, betas=(opt.beta1, 0.999)) optimizerS = optim.Adam(netS.parameters(),", "[] D_real2plot = [] D_fake2plot = [] for epoch in", "functions.generate_noise([1, real_curr[0], real_curr[1]], opt.batchSize) z = z.expand(opt.batchSize, G_z.shape[1], z.shape[2], z.shape[3])", "errD2plot.append(errD.detach()) ## errD for each iteration ############################ # (2) Update", "## G_z [None, 3, 32, 32] _, G_z, _ =", "= netD(fake.detach(), data['label'][:,0:1,...]) # output shape [1, 1, 16, 16]", "32, 32] noise_ = noise_.expand(opt.batchSize, 3, opt.nzx, opt.nzy) ## Noise_:", "import matplotlib.pyplot as plt from Models.pix2pixHD_base import GANLoss, VGGLoss from", "* noise_ + prev ## [None, 3, 43, 43] new", "[] D_fake2plot = [] for epoch in range(opt.niter): # niter", "rec_loss = alpha * Recloss(fake, data['image']) else: rec_loss = 0", "real_curr, real_next, noise_amp in zip(Gs, masks, reals, reals[1:], NoiseAmp): if", "VGGLoss() def train_single_scale(dataloader, netD, netG, netS, reals, Gs, Ss, in_s,", "3, 32, 32] z_in = noise_amp * z + G_z", "if alpha != 0: ## alpha = 10 calculate the", "== 'segment': count = 0 for G, mask, real_curr, real_next,", "= mask2onehot(data['label'][:,0:1,...], opt.label_nc) RMSE_S = torch.sqrt(criterion(onehot_label, prev_S)) # RMSE_S =", "opt.label_nc)) # print(data['image'].shape, onehot.shape) # print(epoch, j) segment_logit, segment_prob, segment_mask", "used to parser output path return in_s, in_s_S, netG, netS", "= nn.MSELoss() rec_loss = alpha * Recloss(fake, data['image']) else: rec_loss", "gamma=opt.gamma) schedulerG = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizerG, milestones=[opt.niter * 0.8], gamma=opt.gamma) schedulerS =", "loss_G_GAN_Feat_S = 0 if not opt.no_ganFeat_loss: feat_weights = 4.0 /", "RMSE = torch.sqrt(criterion(data['image'], prev)) opt.noise_amp = opt.noise_amp_init * RMSE prev_S", "step prev = 0, second step prev = a image", "= nn.BCEWithLogitsLoss() else: self.crossEntropy = nn.CrossEntropyLoss() if not opt.no_vgg_loss: self.criterionVGG", "draw_concat(Gs, masks, reals, NoiseAmp, in_s, mode, opt): ''' :param Gs:", "prob with [None, 4, 32, 32] onehot_label = mask2onehot(data['label'][:,0:1,...], opt.label_nc)", "# print(data['image'].shape, onehot.shape) # print(epoch, j) segment_logit, segment_prob, segment_mask =", "G_z = G_z[:, :, 0:real_next[0], 0:real_next[1]] ## resize the image", "loss_G_GAN_Feat_S += D_weights * feat_weights * \\ loss.criterionFeat(pred_fake_S[i][j], pred_real[i][j].detach()) *", "else: z = functions.generate_noise( [opt.nc_z, real_curr[0], real_curr[1]], opt.batchSize) G_z =", "opt): self.criterionGAN = GANLoss(not opt.no_lsgan) self.criterionFeat = nn.L1Loss() if opt.contour:", "data['image']) else: rec_loss = 0 errG = loss_G_GAN + loss_G_GAN_Feat", "noise is equal to the prev generated image plus the", "G, mask, real_curr, real_next, noise_amp in zip(Gs, masks, reals, reals[1:],", "opt.nzy], opt.batchSize) for j, data in enumerate(dataloader): data['image'] = data['image'].to(opt.device)", "else: prev = draw_concat(Gs, data['down_scale_label'], reals, NoiseAmp, in_s, 'generator', opt)", "nn.MSELoss() rec_loss = alpha * Recloss(fake, data['image']) else: rec_loss =", "matplotlib.pyplot as plt from Models.pix2pixHD_base import GANLoss, VGGLoss from Models.pix2pixHD2", "1.0 / opt.num_D for i in range(opt.num_D): for j in", "'generator': count = 0 for G, mask, real_curr, real_next, noise_amp", "NoiseAmpS, in_s_S, 'segment', opt) mask = data['label'][:,0:1,...] if Gs ==", "prev = 0, second step prev = a image generated", "Training.imresize import imresize import matplotlib.pyplot as plt from Models.pix2pixHD_base import", "== 'generator': count = 0 for G, mask, real_curr, real_next,", "True) # GAN feature matching loss loss_G_GAN_Feat = 0 if", "is used to parser output path return in_s, in_s_S, netG,", "Gausiaan noise for generating image [None, 3, 42, 42] else:", "if (j == 0) & (epoch == 0): # first", "fake = netG(noise.detach(), prev, mask) # [None, 3, 32, 32]", "pred_fake = netD(fake, data['label'][:,0:1,...]) loss_G_GAN = 0.5 * loss.criterionGAN(pred_fake, True)", "noise, prev is a image generated by previous Generator with", "3, 26, 26] output of previous generator if opt.contour: G_z", "opt: config :return: ''' loss = Losses(opt) real = reals[opt.scale_num]", "vmin=0, vmax=1) plt.imsave('%s/fake_sample_mask_%d.png' % (opt.outf, epoch), functions.convert_mask_np(data['label'][:,0:1,...], num_classes= opt.label_nc)) plt.imsave('%s/segmentation_mask_%d.png'", "if not opt.no_ganFeat_loss: feat_weights = 4.0 / (opt.n_layers_D + 1)", "* \\ loss.criterionFeat(pred_fake[i][j], pred_real[i][j].detach()) * opt.lambda_feat # VGG feature matching", "1): loss_G_GAN_Feat_S += D_weights * feat_weights * \\ loss.criterionFeat(pred_fake_S[i][j], pred_real[i][j].detach())", "32, 32] z_in = noise_amp * z + G_z G_z", "[] and Ss == []: noise_ = functions.generate_noise([1, opt.nzx, opt.nzy],", "epoch), functions.convert_image_np(data['image']), vmin=0, vmax=1) plt.imsave('%s/fake_sample_mask_%d.png' % (opt.outf, epoch), functions.convert_mask_np(data['label'][:,0:1,...], num_classes=", "[1, 3, 26, 26] :param NoiseAmp: [] -> [1] :param", "[] errG2plot = [] D_real2plot = [] D_fake2plot = []", "= loss.crossEntropy(segment_prob, torch.squeeze(data['label'][:,0:1,...], dim =1)) # GAN feature matching loss", "segment_logit, segment_mask = netS(data['image'], mask2onehot(prev_S, opt.label_nc)) # print(data['image'].shape, onehot.shape) #", "self.crossEntropy = nn.BCEWithLogitsLoss() else: self.crossEntropy = nn.CrossEntropyLoss() if not opt.no_vgg_loss:", "== 0 or epoch == (opt.niter - 1): print('scale %d:[%d/%d]'", "= data['image'].to(opt.device) data['label'] = data['label'].long().to(opt.device) ############################ # (1) Update D", "pred_real = netD(data['image'], data['label'][:,0:1,...]) loss_D_real = loss.criterionGAN(pred_real, True) D_x =", "NoiseAmp: [] -> [1] :param opt: config :return: ''' loss", "print('scale %d:[%d/%d]' % (opt.scale_num, epoch, opt.niter)) if epoch % 25", "opt.batchSize) G_z = G_z[:, :, 0:real_curr[0], 0:real_curr[1]] ## G_z [None,", "following: errG2plot.append(errG.detach()) ## ErrG for each iteration D_real2plot.append(D_x) ## discriminator", "reals, Gs, Ss, in_s, in_s_S, NoiseAmp, NoiseAmpS, opt): ''' :param", "Gs == [] and Ss == []: prev = torch.full([opt.batchSize,", "in zip(Gs, masks, reals, reals[1:], NoiseAmp): if count == 0:", ":, 0:real_curr[0], 0:real_curr[1]] ## G_z [None, 3, 32, 32] z_in", "the current level image xn opt.nzx = real[0] opt.nzy =", "+ D(G(z)) ########################### # train with real netD.zero_grad() pred_real =", "RMSE_S mask = data['label'][:,0:1,...] else: prev = draw_concat(Gs, data['down_scale_label'], reals,", "+ loss_G_GAN_Feat_S + loss_G_Seg errS.backward() optimizerS.step() ## for every epoch,", "42] -> [None, 3, 43, 43] first step prev =", "3, 43, 43] new noise is equal to the prev", "prev = draw_concat(Gs, data['down_scale_label'], reals, NoiseAmp, in_s, 'generator', opt) ##", "maximize D(x) + D(G(z)) ########################### # train with real netD.zero_grad()", "given a new noise, prev is a image generated by", "with [None, 4, 32, 32] onehot_label = mask2onehot(data['label'][:,0:1,...], opt.label_nc) RMSE_S", "proper coefficient errD.backward() optimizerD.step() errD2plot.append(errD.detach()) ## errD for each iteration", "# segment_logit, segment_mask = netS(data['image'], mask2onehot(prev_S, opt.label_nc)) # print(data['image'].shape, onehot.shape)", "if opt.contour: loss_G_Seg = loss.crossEntropy(segment_logit, data['label'].float()) else: loss_G_Seg = loss.crossEntropy(segment_prob,", "NoiseAmp: [1] :param in_s: all zeros [1, 3, 26, 26]", "netD(fake, data['label'][:,0:1,...]) loss_G_GAN = 0.5 * loss.criterionGAN(pred_fake, True) # GAN", "a proper coefficient errD.backward() optimizerD.step() errD2plot.append(errD.detach()) ## errD for each", "optimizerG = optim.Adam(netG.parameters(), lr=opt.lr_g, betas=(opt.beta1, 0.999)) optimizerS = optim.Adam(netS.parameters(), lr=opt.lr_s,", "0:real_curr[0], 0:real_curr[1]] ## G_z [None, 3, 32, 32] z_in =", "in_s, in_s_S, netG, netS def draw_concat(Gs, masks, reals, NoiseAmp, in_s,", "onehot_label = mask2onehot(data['label'][:,0:1,...], opt.label_nc) RMSE_S = torch.sqrt(criterion(onehot_label, prev_S)) # RMSE_S", "errS.backward() optimizerS.step() ## for every epoch, do the following: errG2plot.append(errG.detach())", "if Gs == [] and Ss == []: prev =", "= 1.0 / opt.num_D for i in range(opt.num_D): for j" ]
[ "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "OF ANY # KIND, either express or implied. See the", "1 stmt = ib.get() stmt = tvm.tir.ir_pass.InjectDoubleBuffer(stmt, 2) stmt =", "Software Foundation (ASF) under one # or more contributor license", "more contributor license agreements. See the NOTICE file # distributed", "Unless required by applicable law or agreed to in writing,", "4 tx = te.thread_axis(\"threadIdx.x\") ib = tvm.tir.ir_builder.create() A = ib.pointer(\"float32\",", "name=\"C\") ib.scope_attr(tx, \"thread_extent\", 1) with ib.for_range(0, n) as i: B", "j: B[j] = A[i * 4 + j] with ib.for_range(0,", "an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "ib.new_scope(): ib.scope_attr(B.asobject(), \"double_buffer_scope\", 1) with ib.for_range(0, m) as j: B[j]", "# regarding copyright ownership. The ASF licenses this file #", "Apache Software Foundation (ASF) under one # or more contributor", "tvm.tir.ir_builder.create() A = ib.pointer(\"float32\", name=\"A\") C = ib.pointer(\"float32\", name=\"C\") ib.scope_attr(tx,", "in compliance # with the License. You may obtain a", "# to you under the Apache License, Version 2.0 (the", "WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express", "License for the # specific language governing permissions and limitations", "WARRANTIES OR CONDITIONS OF ANY # KIND, either express or", "j] with ib.for_range(0, m) as j: C[j] = B[j] +", "with this work for additional information # regarding copyright ownership.", "tvm import te def test_double_buffer(): dtype = 'int64' n =", "governing permissions and limitations # under the License. import tvm", "tvm.IRModule({ \"db\" : tvm.tir.PrimFunc([A.asobject(), C.asobject()], stmt) }) f = tvm.tir.transform.ThreadSync(\"shared\")(mod)[\"db\"]", "(ASF) under one # or more contributor license agreements. See", "2.0 (the # \"License\"); you may not use this file", "OR CONDITIONS OF ANY # KIND, either express or implied.", "+ j] with ib.for_range(0, m) as j: C[j] = B[j]", "License. import tvm from tvm import te def test_double_buffer(): dtype", "tvm.tir.PrimFunc([A.asobject(), C.asobject()], stmt) }) f = tvm.tir.transform.ThreadSync(\"shared\")(mod)[\"db\"] count = [0]", "# or more contributor license agreements. See the NOTICE file", "agreed to in writing, # software distributed under the License", "def test_double_buffer(): dtype = 'int64' n = 100 m =", "\"thread_extent\", 1) with ib.for_range(0, n) as i: B = ib.allocate(\"float32\",", "= tvm.tir.ir_builder.create() A = ib.pointer(\"float32\", name=\"A\") C = ib.pointer(\"float32\", name=\"C\")", "C[j] = B[j] + 1 stmt = ib.get() stmt =", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "with ib.for_range(0, m) as j: B[j] = A[i * 4", "te.thread_axis(\"threadIdx.x\") ib = tvm.tir.ir_builder.create() A = ib.pointer(\"float32\", name=\"A\") C =", "work for additional information # regarding copyright ownership. The ASF", "\"db\" : tvm.tir.PrimFunc([A.asobject(), C.asobject()], stmt) }) f = tvm.tir.transform.ThreadSync(\"shared\")(mod)[\"db\"] count", "specific language governing permissions and limitations # under the License.", "+= 1 tvm.tir.ir_pass.PostOrderVisit(f.body, count_sync) assert count[0] == 4 if __name__", "# under the License. import tvm from tvm import te", "n) as i: B = ib.allocate(\"float32\", m, name=\"B\", scope=\"shared\") with", "under the License is distributed on an # \"AS IS\"", "this file # to you under the Apache License, Version", "distributed under the License is distributed on an # \"AS", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either", "the License. import tvm from tvm import te def test_double_buffer():", "copyright ownership. The ASF licenses this file # to you", "# software distributed under the License is distributed on an", "A[i * 4 + j] with ib.for_range(0, m) as j:", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY #", "1) with ib.for_range(0, n) as i: B = ib.allocate(\"float32\", m,", "2) stmt = tvm.tir.ir_pass.Simplify(stmt) assert isinstance(stmt.body.body, tvm.tir.Allocate) assert stmt.body.body.extents[0].value ==", "tvm.tir.ir_pass.PostOrderVisit(f.body, count_sync) assert count[0] == 4 if __name__ == \"__main__\":", "(the # \"License\"); you may not use this file except", "the License. You may obtain a copy of the License", "in writing, # software distributed under the License is distributed", "m, name=\"B\", scope=\"shared\") with ib.new_scope(): ib.scope_attr(B.asobject(), \"double_buffer_scope\", 1) with ib.for_range(0,", "distributed with this work for additional information # regarding copyright", "under the License. import tvm from tvm import te def", "n = 100 m = 4 tx = te.thread_axis(\"threadIdx.x\") ib", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "ib.pointer(\"float32\", name=\"A\") C = ib.pointer(\"float32\", name=\"C\") ib.scope_attr(tx, \"thread_extent\", 1) with", "License is distributed on an # \"AS IS\" BASIS, WITHOUT", "ASF licenses this file # to you under the Apache", "name=\"A\") C = ib.pointer(\"float32\", name=\"C\") ib.scope_attr(tx, \"thread_extent\", 1) with ib.for_range(0,", "under the Apache License, Version 2.0 (the # \"License\"); you", "for the # specific language governing permissions and limitations #", "assert isinstance(stmt.body.body, tvm.tir.Allocate) assert stmt.body.body.extents[0].value == 2 mod = tvm.IRModule({", "ib.for_range(0, m) as j: C[j] = B[j] + 1 stmt", "with ib.new_scope(): ib.scope_attr(B.asobject(), \"double_buffer_scope\", 1) with ib.for_range(0, m) as j:", "as i: B = ib.allocate(\"float32\", m, name=\"B\", scope=\"shared\") with ib.new_scope():", "distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "regarding copyright ownership. The ASF licenses this file # to", "and limitations # under the License. import tvm from tvm", "See the License for the # specific language governing permissions", "to in writing, # software distributed under the License is", "or agreed to in writing, # software distributed under the", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "= ib.allocate(\"float32\", m, name=\"B\", scope=\"shared\") with ib.new_scope(): ib.scope_attr(B.asobject(), \"double_buffer_scope\", 1)", "ib.allocate(\"float32\", m, name=\"B\", scope=\"shared\") with ib.new_scope(): ib.scope_attr(B.asobject(), \"double_buffer_scope\", 1) with", "= tvm.tir.ir_pass.InjectDoubleBuffer(stmt, 2) stmt = tvm.tir.ir_pass.Simplify(stmt) assert isinstance(stmt.body.body, tvm.tir.Allocate) assert", "and op.name == \"tvm_storage_sync\": count[0] += 1 tvm.tir.ir_pass.PostOrderVisit(f.body, count_sync) assert", "ownership. The ASF licenses this file # to you under", "B[j] = A[i * 4 + j] with ib.for_range(0, m)", "tvm.tir.Call) and op.name == \"tvm_storage_sync\": count[0] += 1 tvm.tir.ir_pass.PostOrderVisit(f.body, count_sync)", "# \"License\"); you may not use this file except in", "# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY", "m = 4 tx = te.thread_axis(\"threadIdx.x\") ib = tvm.tir.ir_builder.create() A", "2 mod = tvm.IRModule({ \"db\" : tvm.tir.PrimFunc([A.asobject(), C.asobject()], stmt) })", "to the Apache Software Foundation (ASF) under one # or", "\"License\"); you may not use this file except in compliance", "file # distributed with this work for additional information #", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "with the License. You may obtain a copy of the", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "ib.for_range(0, n) as i: B = ib.allocate(\"float32\", m, name=\"B\", scope=\"shared\")", "B[j] + 1 stmt = ib.get() stmt = tvm.tir.ir_pass.InjectDoubleBuffer(stmt, 2)", "= tvm.IRModule({ \"db\" : tvm.tir.PrimFunc([A.asobject(), C.asobject()], stmt) }) f =", "or more contributor license agreements. See the NOTICE file #", "from tvm import te def test_double_buffer(): dtype = 'int64' n", "m) as j: C[j] = B[j] + 1 stmt =", "applicable law or agreed to in writing, # software distributed", "# distributed with this work for additional information # regarding", "this work for additional information # regarding copyright ownership. The", ": tvm.tir.PrimFunc([A.asobject(), C.asobject()], stmt) }) f = tvm.tir.transform.ThreadSync(\"shared\")(mod)[\"db\"] count =", "writing, # software distributed under the License is distributed on", "the NOTICE file # distributed with this work for additional", "tvm.tir.ir_pass.InjectDoubleBuffer(stmt, 2) stmt = tvm.tir.ir_pass.Simplify(stmt) assert isinstance(stmt.body.body, tvm.tir.Allocate) assert stmt.body.body.extents[0].value", "C.asobject()], stmt) }) f = tvm.tir.transform.ThreadSync(\"shared\")(mod)[\"db\"] count = [0] def", "is distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES", "implied. See the License for the # specific language governing", "file # to you under the Apache License, Version 2.0", "+ 1 stmt = ib.get() stmt = tvm.tir.ir_pass.InjectDoubleBuffer(stmt, 2) stmt", "to you under the Apache License, Version 2.0 (the #", "= 'int64' n = 100 m = 4 tx =", "stmt = tvm.tir.ir_pass.Simplify(stmt) assert isinstance(stmt.body.body, tvm.tir.Allocate) assert stmt.body.body.extents[0].value == 2", "CONDITIONS OF ANY # KIND, either express or implied. See", "ib = tvm.tir.ir_builder.create() A = ib.pointer(\"float32\", name=\"A\") C = ib.pointer(\"float32\",", "# with the License. You may obtain a copy of", "= ib.get() stmt = tvm.tir.ir_pass.InjectDoubleBuffer(stmt, 2) stmt = tvm.tir.ir_pass.Simplify(stmt) assert", "A = ib.pointer(\"float32\", name=\"A\") C = ib.pointer(\"float32\", name=\"C\") ib.scope_attr(tx, \"thread_extent\",", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "limitations # under the License. import tvm from tvm import", "= tvm.tir.transform.ThreadSync(\"shared\")(mod)[\"db\"] count = [0] def count_sync(op): if isinstance(op, tvm.tir.Call)", "= ib.pointer(\"float32\", name=\"A\") C = ib.pointer(\"float32\", name=\"C\") ib.scope_attr(tx, \"thread_extent\", 1)", "assert stmt.body.body.extents[0].value == 2 mod = tvm.IRModule({ \"db\" : tvm.tir.PrimFunc([A.asobject(),", "tvm from tvm import te def test_double_buffer(): dtype = 'int64'", "stmt = tvm.tir.ir_pass.InjectDoubleBuffer(stmt, 2) stmt = tvm.tir.ir_pass.Simplify(stmt) assert isinstance(stmt.body.body, tvm.tir.Allocate)", "j: C[j] = B[j] + 1 stmt = ib.get() stmt", "may not use this file except in compliance # with", "stmt.body.body.extents[0].value == 2 mod = tvm.IRModule({ \"db\" : tvm.tir.PrimFunc([A.asobject(), C.asobject()],", "software distributed under the License is distributed on an #", "import te def test_double_buffer(): dtype = 'int64' n = 100", "Licensed to the Apache Software Foundation (ASF) under one #", "for additional information # regarding copyright ownership. The ASF licenses", "= 100 m = 4 tx = te.thread_axis(\"threadIdx.x\") ib =", "the Apache Software Foundation (ASF) under one # or more", "ib.scope_attr(B.asobject(), \"double_buffer_scope\", 1) with ib.for_range(0, m) as j: B[j] =", "# # Unless required by applicable law or agreed to", "f = tvm.tir.transform.ThreadSync(\"shared\")(mod)[\"db\"] count = [0] def count_sync(op): if isinstance(op,", "Version 2.0 (the # \"License\"); you may not use this", "as j: B[j] = A[i * 4 + j] with", "under one # or more contributor license agreements. See the", "= A[i * 4 + j] with ib.for_range(0, m) as", "one # or more contributor license agreements. See the NOTICE", "count_sync) assert count[0] == 4 if __name__ == \"__main__\": test_double_buffer()", "License, Version 2.0 (the # \"License\"); you may not use", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "either express or implied. See the License for the #", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "with ib.for_range(0, n) as i: B = ib.allocate(\"float32\", m, name=\"B\",", "if isinstance(op, tvm.tir.Call) and op.name == \"tvm_storage_sync\": count[0] += 1", "tx = te.thread_axis(\"threadIdx.x\") ib = tvm.tir.ir_builder.create() A = ib.pointer(\"float32\", name=\"A\")", "'int64' n = 100 m = 4 tx = te.thread_axis(\"threadIdx.x\")", "KIND, either express or implied. See the License for the", "B = ib.allocate(\"float32\", m, name=\"B\", scope=\"shared\") with ib.new_scope(): ib.scope_attr(B.asobject(), \"double_buffer_scope\",", "information # regarding copyright ownership. The ASF licenses this file", "= B[j] + 1 stmt = ib.get() stmt = tvm.tir.ir_pass.InjectDoubleBuffer(stmt,", "ib.scope_attr(tx, \"thread_extent\", 1) with ib.for_range(0, n) as i: B =", "the Apache License, Version 2.0 (the # \"License\"); you may", "tvm.tir.transform.ThreadSync(\"shared\")(mod)[\"db\"] count = [0] def count_sync(op): if isinstance(op, tvm.tir.Call) and", "isinstance(op, tvm.tir.Call) and op.name == \"tvm_storage_sync\": count[0] += 1 tvm.tir.ir_pass.PostOrderVisit(f.body,", "except in compliance # with the License. You may obtain", "additional information # regarding copyright ownership. The ASF licenses this", "te def test_double_buffer(): dtype = 'int64' n = 100 m", "mod = tvm.IRModule({ \"db\" : tvm.tir.PrimFunc([A.asobject(), C.asobject()], stmt) }) f", "}) f = tvm.tir.transform.ThreadSync(\"shared\")(mod)[\"db\"] count = [0] def count_sync(op): if", "you under the Apache License, Version 2.0 (the # \"License\");", "[0] def count_sync(op): if isinstance(op, tvm.tir.Call) and op.name == \"tvm_storage_sync\":", "tvm.tir.Allocate) assert stmt.body.body.extents[0].value == 2 mod = tvm.IRModule({ \"db\" :", "or implied. See the License for the # specific language", "ib.pointer(\"float32\", name=\"C\") ib.scope_attr(tx, \"thread_extent\", 1) with ib.for_range(0, n) as i:", "See the NOTICE file # distributed with this work for", "# KIND, either express or implied. See the License for", "express or implied. See the License for the # specific", "as j: C[j] = B[j] + 1 stmt = ib.get()", "\"tvm_storage_sync\": count[0] += 1 tvm.tir.ir_pass.PostOrderVisit(f.body, count_sync) assert count[0] == 4", "NOTICE file # distributed with this work for additional information", "test_double_buffer(): dtype = 'int64' n = 100 m = 4", "import tvm from tvm import te def test_double_buffer(): dtype =", "= tvm.tir.ir_pass.Simplify(stmt) assert isinstance(stmt.body.body, tvm.tir.Allocate) assert stmt.body.body.extents[0].value == 2 mod", "stmt) }) f = tvm.tir.transform.ThreadSync(\"shared\")(mod)[\"db\"] count = [0] def count_sync(op):", "this file except in compliance # with the License. You", "ib.for_range(0, m) as j: B[j] = A[i * 4 +", "= te.thread_axis(\"threadIdx.x\") ib = tvm.tir.ir_builder.create() A = ib.pointer(\"float32\", name=\"A\") C", "1) with ib.for_range(0, m) as j: B[j] = A[i *", "agreements. See the NOTICE file # distributed with this work", "language governing permissions and limitations # under the License. import", "count_sync(op): if isinstance(op, tvm.tir.Call) and op.name == \"tvm_storage_sync\": count[0] +=", "Apache License, Version 2.0 (the # \"License\"); you may not", "\"double_buffer_scope\", 1) with ib.for_range(0, m) as j: B[j] = A[i", "count[0] += 1 tvm.tir.ir_pass.PostOrderVisit(f.body, count_sync) assert count[0] == 4 if", "the # specific language governing permissions and limitations # under", "licenses this file # to you under the Apache License,", "with ib.for_range(0, m) as j: C[j] = B[j] + 1", "stmt = ib.get() stmt = tvm.tir.ir_pass.InjectDoubleBuffer(stmt, 2) stmt = tvm.tir.ir_pass.Simplify(stmt)", "= [0] def count_sync(op): if isinstance(op, tvm.tir.Call) and op.name ==", "license agreements. See the NOTICE file # distributed with this", "4 + j] with ib.for_range(0, m) as j: C[j] =", "name=\"B\", scope=\"shared\") with ib.new_scope(): ib.scope_attr(B.asobject(), \"double_buffer_scope\", 1) with ib.for_range(0, m)", "required by applicable law or agreed to in writing, #", "by applicable law or agreed to in writing, # software", "may obtain a copy of the License at # #", "# Unless required by applicable law or agreed to in", "* 4 + j] with ib.for_range(0, m) as j: C[j]", "== \"tvm_storage_sync\": count[0] += 1 tvm.tir.ir_pass.PostOrderVisit(f.body, count_sync) assert count[0] ==", "dtype = 'int64' n = 100 m = 4 tx", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "1 tvm.tir.ir_pass.PostOrderVisit(f.body, count_sync) assert count[0] == 4 if __name__ ==", "The ASF licenses this file # to you under the", "<filename>tests/python/unittest/test_tir_pass_inject_double_buffer.py # Licensed to the Apache Software Foundation (ASF) under", "file except in compliance # with the License. You may", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND,", "i: B = ib.allocate(\"float32\", m, name=\"B\", scope=\"shared\") with ib.new_scope(): ib.scope_attr(B.asobject(),", "# specific language governing permissions and limitations # under the", "the License for the # specific language governing permissions and", "count = [0] def count_sync(op): if isinstance(op, tvm.tir.Call) and op.name", "License. You may obtain a copy of the License at", "== 2 mod = tvm.IRModule({ \"db\" : tvm.tir.PrimFunc([A.asobject(), C.asobject()], stmt)", "tvm.tir.ir_pass.Simplify(stmt) assert isinstance(stmt.body.body, tvm.tir.Allocate) assert stmt.body.body.extents[0].value == 2 mod =", "You may obtain a copy of the License at #", "ANY # KIND, either express or implied. See the License", "# Licensed to the Apache Software Foundation (ASF) under one", "100 m = 4 tx = te.thread_axis(\"threadIdx.x\") ib = tvm.tir.ir_builder.create()", "the License is distributed on an # \"AS IS\" BASIS,", "you may not use this file except in compliance #", "= 4 tx = te.thread_axis(\"threadIdx.x\") ib = tvm.tir.ir_builder.create() A =", "def count_sync(op): if isinstance(op, tvm.tir.Call) and op.name == \"tvm_storage_sync\": count[0]", "use this file except in compliance # with the License.", "compliance # with the License. You may obtain a copy", "permissions and limitations # under the License. import tvm from", "C = ib.pointer(\"float32\", name=\"C\") ib.scope_attr(tx, \"thread_extent\", 1) with ib.for_range(0, n)", "= ib.pointer(\"float32\", name=\"C\") ib.scope_attr(tx, \"thread_extent\", 1) with ib.for_range(0, n) as", "ib.get() stmt = tvm.tir.ir_pass.InjectDoubleBuffer(stmt, 2) stmt = tvm.tir.ir_pass.Simplify(stmt) assert isinstance(stmt.body.body,", "isinstance(stmt.body.body, tvm.tir.Allocate) assert stmt.body.body.extents[0].value == 2 mod = tvm.IRModule({ \"db\"", "law or agreed to in writing, # software distributed under", "contributor license agreements. See the NOTICE file # distributed with", "Foundation (ASF) under one # or more contributor license agreements.", "m) as j: B[j] = A[i * 4 + j]", "op.name == \"tvm_storage_sync\": count[0] += 1 tvm.tir.ir_pass.PostOrderVisit(f.body, count_sync) assert count[0]", "not use this file except in compliance # with the", "scope=\"shared\") with ib.new_scope(): ib.scope_attr(B.asobject(), \"double_buffer_scope\", 1) with ib.for_range(0, m) as" ]
[ "with pressure #data r=requests.post('http://127.0.0.1:8080/function/sensor',data) print(r.text) sense=SenseHat() sense.show_message(r.text) except KeyboardInterrupt: sys.exit()", "smbus import time while True: try: pressure=0 sense = SenseHat()", "data = {'pressure':pressure} print(pressure) #send http request to sense serverless", "sense serverless function with pressure #data r=requests.post('http://127.0.0.1:8080/function/sensor',data) print(r.text) sense=SenseHat() sense.show_message(r.text)", "import requests from sense_hat import SenseHat import smbus import time", "import SenseHat import smbus import time while True: try: pressure=0", "sense = SenseHat() pressure = sense.get_pressure() data = {'pressure':pressure} print(pressure)", "http request to sense serverless function with pressure #data r=requests.post('http://127.0.0.1:8080/function/sensor',data)", "= sense.get_pressure() data = {'pressure':pressure} print(pressure) #send http request to", "to sense serverless function with pressure #data r=requests.post('http://127.0.0.1:8080/function/sensor',data) print(r.text) sense=SenseHat()", "= {'pressure':pressure} print(pressure) #send http request to sense serverless function", "while True: try: pressure=0 sense = SenseHat() pressure = sense.get_pressure()", "request to sense serverless function with pressure #data r=requests.post('http://127.0.0.1:8080/function/sensor',data) print(r.text)", "requests from sense_hat import SenseHat import smbus import time while", "sense_hat import SenseHat import smbus import time while True: try:", "True: try: pressure=0 sense = SenseHat() pressure = sense.get_pressure() data", "SenseHat() pressure = sense.get_pressure() data = {'pressure':pressure} print(pressure) #send http", "pressure = sense.get_pressure() data = {'pressure':pressure} print(pressure) #send http request", "from sense_hat import SenseHat import smbus import time while True:", "import smbus import time while True: try: pressure=0 sense =", "{'pressure':pressure} print(pressure) #send http request to sense serverless function with", "try: pressure=0 sense = SenseHat() pressure = sense.get_pressure() data =", "sense.get_pressure() data = {'pressure':pressure} print(pressure) #send http request to sense", "serverless function with pressure #data r=requests.post('http://127.0.0.1:8080/function/sensor',data) print(r.text) sense=SenseHat() sense.show_message(r.text) except", "#send http request to sense serverless function with pressure #data", "time while True: try: pressure=0 sense = SenseHat() pressure =", "SenseHat import smbus import time while True: try: pressure=0 sense", "pressure=0 sense = SenseHat() pressure = sense.get_pressure() data = {'pressure':pressure}", "import time while True: try: pressure=0 sense = SenseHat() pressure", "function with pressure #data r=requests.post('http://127.0.0.1:8080/function/sensor',data) print(r.text) sense=SenseHat() sense.show_message(r.text) except KeyboardInterrupt:", "= SenseHat() pressure = sense.get_pressure() data = {'pressure':pressure} print(pressure) #send", "print(pressure) #send http request to sense serverless function with pressure" ]
[ "v ''' invader = r''' /XXXXXX\\ /XXXXXXXX\\ XXXXXXXXXX XX XX", "# Steering. vel = keydir()*50 + tapdir(ship.pos())*4 ship.vel((vel.x,0,0)) # Only", "keys() or 'LCtrl' in keys() or is_tap_close if is_shooting and", "ASCII geometries. shipascii = r''' /\\ /XXXXXXXX\\ v v '''", "/XXXXXXXX\\ XXXXXXXXXX XX XX XX \\XXXXXXXX/ /XX XX\\ /X/ \\/", "make explosions. for o in collided_objects(): if o in invaders:", "shipascii = r''' /\\ /XXXXXXXX\\ v v ''' invader =", "(0,0,-10), (-25,0,0), (0,0,-10)],0 invaders = set() for y in range(2):", "# Run invaders. if timeout(3, timer='invaders'): isi = (isi+1)%len(invaderspeeds) [i.vel(invaderspeeds[isi])", "[(25,0,0), (0,0,-10), (-25,0,0), (0,0,-10)],0 invaders = set() for y in", "# Check collisions, make explosions. for o in collided_objects(): if", "col='#fff')] sound(sound_bang, shots[-1].pos()) # Run invaders. if timeout(3, timer='invaders'): isi", "[i.vel(invaderspeeds[isi]) for i in invaders] # Check collisions, make explosions.", "o in collided_objects(): if o in invaders: invaders.remove(o) explode(o.pos(),o.vel(),5) elif", "3 is_shooting = 'Space' in keys() or 'LCtrl' in keys()", "from trabant import * # ASCII geometries. shipascii = r'''", "r''' /XXXXXX\\ /XXXXXXXX\\ XXXXXXXXXX XX XX XX \\XXXXXXXX/ /XX XX\\", "invaders = set() for y in range(2): for x in", "X. # Shooting. is_tap_close = taps() and tapdir(ship.pos()).x < 3", "XXXXXXXXXX XX XX XX \\XXXXXXXX/ /XX XX\\ /X/ \\/ \\X\\", "invader = r''' /XXXXXX\\ /XXXXXXXX\\ XXXXXXXXXX XX XX XX \\XXXXXXXX/", "is_shooting = 'Space' in keys() or 'LCtrl' in keys() or", "shots[-1].pos()) # Run invaders. if timeout(3, timer='invaders'): isi = (isi+1)%len(invaderspeeds)", "\\X ''' cam(distance=250) gravity((0,0,0)) ship = create_ascii_object(shipascii, pos=(0,0,-100), col='#070') shots", "in keys() or 'LCtrl' in keys() or is_tap_close if is_shooting", "invaders.remove(o) explode(o.pos(),o.vel(),5) elif o == ship: while loop(): pass o.release()", "taps() and tapdir(ship.pos()).x < 3 is_shooting = 'Space' in keys()", "keys() or is_tap_close if is_shooting and timeout(0.7, first_hit=True): shots +=", "in invaders] # Check collisions, make explosions. for o in", "for invader in invaders: invader.vel(invaderspeeds[0]) while loop(): # Steering. vel", "Invadersishkebab. from trabant import * # ASCII geometries. shipascii =", "''' invader = r''' /XXXXXX\\ /XXXXXXXX\\ XXXXXXXXXX XX XX XX", "for y in range(2): for x in range(8): invaders.add(create_ascii_object(invader, pos=(x*25-130,0,100-y*20),", "= keydir()*50 + tapdir(ship.pos())*4 ship.vel((vel.x,0,0)) # Only move in X.", "= (isi+1)%len(invaderspeeds) [i.vel(invaderspeeds[isi]) for i in invaders] # Check collisions,", "vel = keydir()*50 + tapdir(ship.pos())*4 ship.vel((vel.x,0,0)) # Only move in", "pos=(0,0,-100), col='#070') shots = [] invaderspeeds,isi = [(25,0,0), (0,0,-10), (-25,0,0),", "physmesh=True)) for invader in invaders: invader.vel(invaderspeeds[0]) while loop(): # Steering.", "Shooting. is_tap_close = taps() and tapdir(ship.pos()).x < 3 is_shooting =", "invaders: invaders.remove(o) explode(o.pos(),o.vel(),5) elif o == ship: while loop(): pass", "create_ascii_object(shipascii, pos=(0,0,-100), col='#070') shots = [] invaderspeeds,isi = [(25,0,0), (0,0,-10),", "in collided_objects(): if o in invaders: invaders.remove(o) explode(o.pos(),o.vel(),5) elif o", "[] invaderspeeds,isi = [(25,0,0), (0,0,-10), (-25,0,0), (0,0,-10)],0 invaders = set()", "for o in collided_objects(): if o in invaders: invaders.remove(o) explode(o.pos(),o.vel(),5)", "invaderspeeds,isi = [(25,0,0), (0,0,-10), (-25,0,0), (0,0,-10)],0 invaders = set() for", "tapdir(ship.pos()).x < 3 is_shooting = 'Space' in keys() or 'LCtrl'", "\\X\\ X/ \\X ''' cam(distance=250) gravity((0,0,0)) ship = create_ascii_object(shipascii, pos=(0,0,-100),", "geometries. shipascii = r''' /\\ /XXXXXXXX\\ v v ''' invader", "or 'LCtrl' in keys() or is_tap_close if is_shooting and timeout(0.7,", "and tapdir(ship.pos()).x < 3 is_shooting = 'Space' in keys() or", "shots += [create_sphere(ship.pos()+vec3(0,0,10), vel=(0,0,200), col='#fff')] sound(sound_bang, shots[-1].pos()) # Run invaders.", "Check collisions, make explosions. for o in collided_objects(): if o", "in invaders: invaders.remove(o) explode(o.pos(),o.vel(),5) elif o == ship: while loop():", "invaders.add(create_ascii_object(invader, pos=(x*25-130,0,100-y*20), col=rndvec().abs(), physmesh=True)) for invader in invaders: invader.vel(invaderspeeds[0]) while", "in keys() or is_tap_close if is_shooting and timeout(0.7, first_hit=True): shots", "(0,0,-10)],0 invaders = set() for y in range(2): for x", "in range(8): invaders.add(create_ascii_object(invader, pos=(x*25-130,0,100-y*20), col=rndvec().abs(), physmesh=True)) for invader in invaders:", "pos=(x*25-130,0,100-y*20), col=rndvec().abs(), physmesh=True)) for invader in invaders: invader.vel(invaderspeeds[0]) while loop():", "if is_shooting and timeout(0.7, first_hit=True): shots += [create_sphere(ship.pos()+vec3(0,0,10), vel=(0,0,200), col='#fff')]", "i in invaders] # Check collisions, make explosions. for o", "in invaders: invader.vel(invaderspeeds[0]) while loop(): # Steering. vel = keydir()*50", "keydir()*50 + tapdir(ship.pos())*4 ship.vel((vel.x,0,0)) # Only move in X. #", "while loop(): # Steering. vel = keydir()*50 + tapdir(ship.pos())*4 ship.vel((vel.x,0,0))", "loop(): # Steering. vel = keydir()*50 + tapdir(ship.pos())*4 ship.vel((vel.x,0,0)) #", "# Shooting. is_tap_close = taps() and tapdir(ship.pos()).x < 3 is_shooting", "y in range(2): for x in range(8): invaders.add(create_ascii_object(invader, pos=(x*25-130,0,100-y*20), col=rndvec().abs(),", "XX XX \\XXXXXXXX/ /XX XX\\ /X/ \\/ \\X\\ X/ \\X", "invaders: invader.vel(invaderspeeds[0]) while loop(): # Steering. vel = keydir()*50 +", "is_tap_close if is_shooting and timeout(0.7, first_hit=True): shots += [create_sphere(ship.pos()+vec3(0,0,10), vel=(0,0,200),", "+= [create_sphere(ship.pos()+vec3(0,0,10), vel=(0,0,200), col='#fff')] sound(sound_bang, shots[-1].pos()) # Run invaders. if", "in X. # Shooting. is_tap_close = taps() and tapdir(ship.pos()).x <", "# ASCII geometries. shipascii = r''' /\\ /XXXXXXXX\\ v v", "(isi+1)%len(invaderspeeds) [i.vel(invaderspeeds[isi]) for i in invaders] # Check collisions, make", "for i in invaders] # Check collisions, make explosions. for", "is_tap_close = taps() and tapdir(ship.pos()).x < 3 is_shooting = 'Space'", "/XXXXXXXX\\ v v ''' invader = r''' /XXXXXX\\ /XXXXXXXX\\ XXXXXXXXXX", "r''' /\\ /XXXXXXXX\\ v v ''' invader = r''' /XXXXXX\\", "[create_sphere(ship.pos()+vec3(0,0,10), vel=(0,0,200), col='#fff')] sound(sound_bang, shots[-1].pos()) # Run invaders. if timeout(3,", "range(8): invaders.add(create_ascii_object(invader, pos=(x*25-130,0,100-y*20), col=rndvec().abs(), physmesh=True)) for invader in invaders: invader.vel(invaderspeeds[0])", "for x in range(8): invaders.add(create_ascii_object(invader, pos=(x*25-130,0,100-y*20), col=rndvec().abs(), physmesh=True)) for invader", "/XXXXXX\\ /XXXXXXXX\\ XXXXXXXXXX XX XX XX \\XXXXXXXX/ /XX XX\\ /X/", "shots = [] invaderspeeds,isi = [(25,0,0), (0,0,-10), (-25,0,0), (0,0,-10)],0 invaders", "# Space Invadersishkebab. from trabant import * # ASCII geometries.", "''' cam(distance=250) gravity((0,0,0)) ship = create_ascii_object(shipascii, pos=(0,0,-100), col='#070') shots =", "Steering. vel = keydir()*50 + tapdir(ship.pos())*4 ship.vel((vel.x,0,0)) # Only move", "+ tapdir(ship.pos())*4 ship.vel((vel.x,0,0)) # Only move in X. # Shooting.", "/X/ \\/ \\X\\ X/ \\X ''' cam(distance=250) gravity((0,0,0)) ship =", "Run invaders. if timeout(3, timer='invaders'): isi = (isi+1)%len(invaderspeeds) [i.vel(invaderspeeds[isi]) for", "invaders] # Check collisions, make explosions. for o in collided_objects():", "cam(distance=250) gravity((0,0,0)) ship = create_ascii_object(shipascii, pos=(0,0,-100), col='#070') shots = []", "or is_tap_close if is_shooting and timeout(0.7, first_hit=True): shots += [create_sphere(ship.pos()+vec3(0,0,10),", "ship.vel((vel.x,0,0)) # Only move in X. # Shooting. is_tap_close =", "if timeout(3, timer='invaders'): isi = (isi+1)%len(invaderspeeds) [i.vel(invaderspeeds[isi]) for i in", "trabant import * # ASCII geometries. shipascii = r''' /\\", "/\\ /XXXXXXXX\\ v v ''' invader = r''' /XXXXXX\\ /XXXXXXXX\\", "collided_objects(): if o in invaders: invaders.remove(o) explode(o.pos(),o.vel(),5) elif o ==", "Only move in X. # Shooting. is_tap_close = taps() and", "= taps() and tapdir(ship.pos()).x < 3 is_shooting = 'Space' in", "gravity((0,0,0)) ship = create_ascii_object(shipascii, pos=(0,0,-100), col='#070') shots = [] invaderspeeds,isi", "tapdir(ship.pos())*4 ship.vel((vel.x,0,0)) # Only move in X. # Shooting. is_tap_close", "first_hit=True): shots += [create_sphere(ship.pos()+vec3(0,0,10), vel=(0,0,200), col='#fff')] sound(sound_bang, shots[-1].pos()) # Run", "/XX XX\\ /X/ \\/ \\X\\ X/ \\X ''' cam(distance=250) gravity((0,0,0))", "col='#070') shots = [] invaderspeeds,isi = [(25,0,0), (0,0,-10), (-25,0,0), (0,0,-10)],0", "invaders. if timeout(3, timer='invaders'): isi = (isi+1)%len(invaderspeeds) [i.vel(invaderspeeds[isi]) for i", "= [] invaderspeeds,isi = [(25,0,0), (0,0,-10), (-25,0,0), (0,0,-10)],0 invaders =", "col=rndvec().abs(), physmesh=True)) for invader in invaders: invader.vel(invaderspeeds[0]) while loop(): #", "* # ASCII geometries. shipascii = r''' /\\ /XXXXXXXX\\ v", "XX \\XXXXXXXX/ /XX XX\\ /X/ \\/ \\X\\ X/ \\X '''", "X/ \\X ''' cam(distance=250) gravity((0,0,0)) ship = create_ascii_object(shipascii, pos=(0,0,-100), col='#070')", "in range(2): for x in range(8): invaders.add(create_ascii_object(invader, pos=(x*25-130,0,100-y*20), col=rndvec().abs(), physmesh=True))", "vel=(0,0,200), col='#fff')] sound(sound_bang, shots[-1].pos()) # Run invaders. if timeout(3, timer='invaders'):", "ship = create_ascii_object(shipascii, pos=(0,0,-100), col='#070') shots = [] invaderspeeds,isi =", "= 'Space' in keys() or 'LCtrl' in keys() or is_tap_close", "x in range(8): invaders.add(create_ascii_object(invader, pos=(x*25-130,0,100-y*20), col=rndvec().abs(), physmesh=True)) for invader in", "v v ''' invader = r''' /XXXXXX\\ /XXXXXXXX\\ XXXXXXXXXX XX", "timeout(3, timer='invaders'): isi = (isi+1)%len(invaderspeeds) [i.vel(invaderspeeds[isi]) for i in invaders]", "python3 # Space Invadersishkebab. from trabant import * # ASCII", "timeout(0.7, first_hit=True): shots += [create_sphere(ship.pos()+vec3(0,0,10), vel=(0,0,200), col='#fff')] sound(sound_bang, shots[-1].pos()) #", "XX\\ /X/ \\/ \\X\\ X/ \\X ''' cam(distance=250) gravity((0,0,0)) ship", "'Space' in keys() or 'LCtrl' in keys() or is_tap_close if", "and timeout(0.7, first_hit=True): shots += [create_sphere(ship.pos()+vec3(0,0,10), vel=(0,0,200), col='#fff')] sound(sound_bang, shots[-1].pos())", "XX XX XX \\XXXXXXXX/ /XX XX\\ /X/ \\/ \\X\\ X/", "invader.vel(invaderspeeds[0]) while loop(): # Steering. vel = keydir()*50 + tapdir(ship.pos())*4", "= set() for y in range(2): for x in range(8):", "< 3 is_shooting = 'Space' in keys() or 'LCtrl' in", "invader in invaders: invader.vel(invaderspeeds[0]) while loop(): # Steering. vel =", "collisions, make explosions. for o in collided_objects(): if o in", "\\XXXXXXXX/ /XX XX\\ /X/ \\/ \\X\\ X/ \\X ''' cam(distance=250)", "explosions. for o in collided_objects(): if o in invaders: invaders.remove(o)", "if o in invaders: invaders.remove(o) explode(o.pos(),o.vel(),5) elif o == ship:", "= create_ascii_object(shipascii, pos=(0,0,-100), col='#070') shots = [] invaderspeeds,isi = [(25,0,0),", "Space Invadersishkebab. from trabant import * # ASCII geometries. shipascii", "(-25,0,0), (0,0,-10)],0 invaders = set() for y in range(2): for", "= r''' /XXXXXX\\ /XXXXXXXX\\ XXXXXXXXXX XX XX XX \\XXXXXXXX/ /XX", "timer='invaders'): isi = (isi+1)%len(invaderspeeds) [i.vel(invaderspeeds[isi]) for i in invaders] #", "# Only move in X. # Shooting. is_tap_close = taps()", "isi = (isi+1)%len(invaderspeeds) [i.vel(invaderspeeds[isi]) for i in invaders] # Check", "#!/usr/bin/env python3 # Space Invadersishkebab. from trabant import * #", "move in X. # Shooting. is_tap_close = taps() and tapdir(ship.pos()).x", "range(2): for x in range(8): invaders.add(create_ascii_object(invader, pos=(x*25-130,0,100-y*20), col=rndvec().abs(), physmesh=True)) for", "= r''' /\\ /XXXXXXXX\\ v v ''' invader = r'''", "\\/ \\X\\ X/ \\X ''' cam(distance=250) gravity((0,0,0)) ship = create_ascii_object(shipascii,", "set() for y in range(2): for x in range(8): invaders.add(create_ascii_object(invader,", "is_shooting and timeout(0.7, first_hit=True): shots += [create_sphere(ship.pos()+vec3(0,0,10), vel=(0,0,200), col='#fff')] sound(sound_bang,", "'LCtrl' in keys() or is_tap_close if is_shooting and timeout(0.7, first_hit=True):", "sound(sound_bang, shots[-1].pos()) # Run invaders. if timeout(3, timer='invaders'): isi =", "o in invaders: invaders.remove(o) explode(o.pos(),o.vel(),5) elif o == ship: while", "import * # ASCII geometries. shipascii = r''' /\\ /XXXXXXXX\\", "= [(25,0,0), (0,0,-10), (-25,0,0), (0,0,-10)],0 invaders = set() for y" ]
[ "2048, 3, stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm) self.bn5 = BatchNorm(2048) # Init", "torch.rand(1, 3, 512, 512) output, low_level_feat = model(input) print(output.size()) print(low_level_feat.size())", "dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block10 = Block(728, 728, reps=3, stride=1,", "v elif k.startswith('bn4'): model_dict[k.replace('bn4', 'bn5')] = v else: model_dict[k] =", "= self.block14(x) x = self.block15(x) x = self.block16(x) x =", "else: self.skip = None self.relu = nn.ReLU(inplace=True) rep = []", "x = self.pointwise(x) return x class Block(nn.Module): def __init__(self, inplanes,", "planes, 1, stride=stride, bias=False) self.skipbn = BatchNorm(planes) else: self.skip =", "start_with_relu=True, grow_first=True) self.block10 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm,", "grow_first=True, is_last=False): super(Block, self).__init__() if planes != inplanes or stride", "= Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block15", "planes, 3, 1, dilation, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) filters = planes for", "grow_first=True) # Exit flow self.block20 = Block(728, 1024, reps=2, stride=1,", "self.conv3(x) x = self.bn3(x) x = self.relu(x) x = self.conv4(x)", "m.weight.data.normal_(0, math.sqrt(2. / n)) elif isinstance(m, SynchronizedBatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_() elif", "v elif k.startswith('conv4'): model_dict[k.replace('conv4', 'conv5')] = v elif k.startswith('bn4'): model_dict[k.replace('bn4',", "model_dict[k] = v model_dict[k.replace('block11', 'block12')] = v model_dict[k.replace('block11', 'block13')] =", "if planes != inplanes or stride != 1: self.skip =", "return x class AlignedXception(nn.Module): \"\"\" Modified Alighed Xception \"\"\" def", "rep.append(self.relu) rep.append(SeparableConv2d(filters, filters, 3, 1, dilation, BatchNorm=BatchNorm)) rep.append(BatchNorm(filters)) if not", "BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block8 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,", "reps=2, stride=1, dilation=exit_block_dilations[0], BatchNorm=BatchNorm, start_with_relu=True, grow_first=False, is_last=True) self.conv3 = SeparableConv2d(1024,", "reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block19 = Block(728, 728,", "Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block16 =", "grow_first=True) self.block15 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True,", "return x, low_level_feat def _init_weight(self): for m in self.modules(): if", "reps=2, stride=entry_block3_stride, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True, is_last=True) # Middle flow self.block4", "reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block13 = Block(728, 728,", "grow_first=True) self.block18 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True,", "model.sync_batchnorm.batchnorm import SynchronizedBatchNorm2d def fixed_padding(inputs, kernel_size, dilation): kernel_size_effective = kernel_size", "super(Block, self).__init__() if planes != inplanes or stride != 1:", "= kernel_size + (kernel_size - 1) * (dilation - 1)", "1 and is_last: rep.append(self.relu) rep.append(SeparableConv2d(planes, planes, 3, 1, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes))", "stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm) self.bn3 = BatchNorm(1536) self.conv4 = SeparableConv2d(1536, 1536,", "k: v = v.unsqueeze(-1).unsqueeze(-1) if k.startswith('block11'): model_dict[k] = v model_dict[k.replace('block11',", "stride=1, padding=1, bias=False) self.bn2 = BatchNorm(64) self.block1 = Block(64, 128,", "x = self.block5(x) x = self.block6(x) x = self.block7(x) x", "self.block20 = Block(728, 1024, reps=2, stride=1, dilation=exit_block_dilations[0], BatchNorm=BatchNorm, start_with_relu=True, grow_first=False,", "/ n)) elif isinstance(m, SynchronizedBatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_() elif isinstance(m, nn.BatchNorm2d):", "exit_block_dilations = (2, 4) else: raise NotImplementedError # Entry flow", "= x + skip return x class AlignedXception(nn.Module): \"\"\" Modified", "1): rep.append(self.relu) rep.append(SeparableConv2d(filters, filters, 3, 1, dilation, BatchNorm=BatchNorm)) rep.append(BatchNorm(filters)) if", "not None: skip = self.skip(inp) skip = self.skipbn(skip) else: skip", "= self.block13(x) x = self.block14(x) x = self.block15(x) x =", "= nn.ReLU(inplace=True) rep = [] filters = inplanes if grow_first:", "pad_total = kernel_size_effective - 1 pad_beg = pad_total // 2", "= self.block5(x) x = self.block6(x) x = self.block7(x) x =", "self.conv1(x) x = self.bn1(x) x = self.relu(x) x = self.conv2(x)", "dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block16 = Block(728, 728, reps=3, stride=1,", "dilation=self.conv1.dilation[0]) x = self.conv1(x) x = self.bn(x) x = self.pointwise(x)", "k.startswith('bn3'): model_dict[k] = v model_dict[k.replace('bn3', 'bn4')] = v elif k.startswith('conv4'):", "m.bias.data.zero_() def _load_pretrained_model(self): pretrain_dict = model_zoo.load_url('http://data.lip6.fr/cadene/pretrainedmodels/xception-b5690688.pth') model_dict = {} state_dict", "= v model_dict[k.replace('block11', 'block19')] = v elif k.startswith('block12'): model_dict[k.replace('block12', 'block20')]", "= self.bn(x) x = self.pointwise(x) return x class Block(nn.Module): def", "= 1 middle_block_dilation = 2 exit_block_dilations = (2, 4) else:", "= self.conv3(x) x = self.bn3(x) x = self.relu(x) x =", "dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block13 = Block(728, 728, reps=3, stride=1,", "Block(nn.Module): def __init__(self, inplanes, planes, reps, stride=1, dilation=1, BatchNorm=None, start_with_relu=True,", "reps=2, stride=2, BatchNorm=BatchNorm, start_with_relu=False, grow_first=True) self.block3 = Block(256, 728, reps=2,", "x = self.bn3(x) x = self.relu(x) x = self.conv4(x) x", "model_dict[k.replace('block11', 'block15')] = v model_dict[k.replace('block11', 'block16')] = v model_dict[k.replace('block11', 'block17')]", "model_dict[k.replace('bn4', 'bn5')] = v else: model_dict[k] = v state_dict.update(model_dict) self.load_state_dict(state_dict)", "# Middle flow self.block4 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,", "nn.Conv2d(3, 32, 3, stride=2, padding=1, bias=False) self.bn1 = BatchNorm(32) self.relu", "BatchNorm=BatchNorm) self.bn5 = BatchNorm(2048) # Init weights self._init_weight() # Load", "Load pretrained model if pretrained: self._load_pretrained_model() def forward(self, x): #", "elif isinstance(m, nn.BatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_() def _load_pretrained_model(self): pretrain_dict = model_zoo.load_url('http://data.lip6.fr/cadene/pretrainedmodels/xception-b5690688.pth')", "BatchNorm(64) self.block1 = Block(64, 128, reps=2, stride=2, BatchNorm=BatchNorm, start_with_relu=False) self.block2", "128, reps=2, stride=2, BatchNorm=BatchNorm, start_with_relu=False) self.block2 = Block(128, 256, reps=2,", "reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block8 = Block(728, 728,", "model_dict = {} state_dict = self.state_dict() for k, v in", "= v elif k.startswith('bn4'): model_dict[k.replace('bn4', 'bn5')] = v else: model_dict[k]", "grow_first: rep.append(self.relu) rep.append(SeparableConv2d(inplanes, planes, 3, 1, dilation, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) if", "256, reps=2, stride=2, BatchNorm=BatchNorm, start_with_relu=False, grow_first=True) self.block3 = Block(256, 728,", "BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block19 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,", "def __init__(self, inplanes, planes, kernel_size=3, stride=1, dilation=1, bias=False, BatchNorm=None): super(SeparableConv2d,", "dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block15 = Block(728, 728, reps=3, stride=1,", "0, 1, 1, bias=bias) def forward(self, x): x = fixed_padding(x,", "BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) if stride != 1: rep.append(self.relu) rep.append(SeparableConv2d(planes, planes, 3,", "BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) # Exit flow self.block20 = Block(728, 1024,", "Exit flow x = self.block20(x) x = self.relu(x) x =", "= self.relu(x) x = self.block1(x) # add relu here x", "= Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block10", "reps=2, stride=2, BatchNorm=BatchNorm, start_with_relu=False) self.block2 = Block(128, 256, reps=2, stride=2,", "nn.ReLU(inplace=True) self.conv2 = nn.Conv2d(32, 64, 3, stride=1, padding=1, bias=False) self.bn2", "def __init__(self, inplanes, planes, reps, stride=1, dilation=1, BatchNorm=None, start_with_relu=True, grow_first=True,", "= Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) #", "= rep[1:] self.rep = nn.Sequential(*rep) def forward(self, inp): x =", "self.conv1 = nn.Conv2d(inplanes, inplanes, kernel_size, stride, 0, dilation, groups=inplanes, bias=bias)", "= self.block3(x) # Middle flow x = self.block4(x) x =", "= nn.Conv2d(3, 32, 3, stride=2, padding=1, bias=False) self.bn1 = BatchNorm(32)", "output_stride == 8: entry_block3_stride = 1 middle_block_dilation = 2 exit_block_dilations", "rep.append(self.relu) rep.append(SeparableConv2d(inplanes, planes, 3, 1, dilation, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) if stride", "self.block11 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)", "AlignedXception(BatchNorm=nn.BatchNorm2d, pretrained=True, output_stride=16) input = torch.rand(1, 3, 512, 512) output,", "m.bias.data.zero_() elif isinstance(m, nn.BatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_() def _load_pretrained_model(self): pretrain_dict =", "Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block7 =", "grow_first=True) self.block12 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True,", "pretrained=True, output_stride=16) input = torch.rand(1, 3, 512, 512) output, low_level_feat", "self.block7 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)", "= v model_dict[k.replace('block11', 'block18')] = v model_dict[k.replace('block11', 'block19')] = v", "(2, 4) else: raise NotImplementedError # Entry flow self.conv1 =", "- 1) pad_total = kernel_size_effective - 1 pad_beg = pad_total", "BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block16 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,", "= (1, 2) elif output_stride == 8: entry_block3_stride = 1", "self.block16 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)", "not start_with_relu: rep = rep[1:] self.rep = nn.Sequential(*rep) def forward(self,", "= Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block6", "for m in self.modules(): if isinstance(m, nn.Conv2d): n = m.kernel_size[0]", "BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block9 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,", "reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block5 = Block(728, 728,", "self.bn3 = BatchNorm(1536) self.conv4 = SeparableConv2d(1536, 1536, 3, stride=1, dilation=exit_block_dilations[1],", "model_dict[k] = v model_dict[k.replace('bn3', 'bn4')] = v elif k.startswith('conv4'): model_dict[k.replace('conv4',", "= nn.Conv2d(32, 64, 3, stride=1, padding=1, bias=False) self.bn2 = BatchNorm(64)", "1 pad_beg = pad_total // 2 pad_end = pad_total -", "stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block8 = Block(728, 728, reps=3,", "self.bn4 = BatchNorm(1536) self.conv5 = SeparableConv2d(1536, 2048, 3, stride=1, dilation=exit_block_dilations[1],", "v model_dict[k.replace('block11', 'block18')] = v model_dict[k.replace('block11', 'block19')] = v elif", "grow_first=True) self.block16 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True,", "1024, reps=2, stride=1, dilation=exit_block_dilations[0], BatchNorm=BatchNorm, start_with_relu=True, grow_first=False, is_last=True) self.conv3 =", "grow_first=True) self.block19 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True,", "= self.rep(inp) if self.skip is not None: skip = self.skip(inp)", "v = v.unsqueeze(-1).unsqueeze(-1) if k.startswith('block11'): model_dict[k] = v model_dict[k.replace('block11', 'block12')]", "self.relu(x) x = self.conv2(x) x = self.bn2(x) x = self.relu(x)", "x = self.block4(x) x = self.block5(x) x = self.block6(x) x", "= v model_dict[k.replace('block11', 'block17')] = v model_dict[k.replace('block11', 'block18')] = v", "3, stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm) self.bn5 = BatchNorm(2048) # Init weights", "4) else: raise NotImplementedError # Entry flow self.conv1 = nn.Conv2d(3,", "import torch import torch.nn as nn import torch.nn.functional as F", "64, 3, stride=1, padding=1, bias=False) self.bn2 = BatchNorm(64) self.block1 =", "isinstance(m, nn.Conv2d): n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels m.weight.data.normal_(0,", "dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block5 = Block(728, 728, reps=3, stride=1,", "elif output_stride == 8: entry_block3_stride = 1 middle_block_dilation = 2", "= BatchNorm(32) self.relu = nn.ReLU(inplace=True) self.conv2 = nn.Conv2d(32, 64, 3,", "dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block12 = Block(728, 728, reps=3, stride=1,", "self.block14(x) x = self.block15(x) x = self.block16(x) x = self.block17(x)", "- pad_beg padded_inputs = F.pad(inputs, (pad_beg, pad_end, pad_beg, pad_end)) return", "_init_weight(self): for m in self.modules(): if isinstance(m, nn.Conv2d): n =", "start_with_relu=True, grow_first=True) self.block17 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm,", "= self.state_dict() for k, v in pretrain_dict.items(): if k in", "x = self.bn4(x) x = self.relu(x) x = self.conv5(x) x", "kernel_size, stride, 0, dilation, groups=inplanes, bias=bias) self.bn = BatchNorm(inplanes) self.pointwise", "dilation): kernel_size_effective = kernel_size + (kernel_size - 1) * (dilation", "self.block2 = Block(128, 256, reps=2, stride=2, BatchNorm=BatchNorm, start_with_relu=False, grow_first=True) self.block3", "dilation, BatchNorm=BatchNorm)) rep.append(BatchNorm(filters)) if not grow_first: rep.append(self.relu) rep.append(SeparableConv2d(inplanes, planes, 3,", "'block19')] = v elif k.startswith('block12'): model_dict[k.replace('block12', 'block20')] = v elif", "middle_block_dilation = 1 exit_block_dilations = (1, 2) elif output_stride ==", "kernel_size=3, stride=1, dilation=1, bias=False, BatchNorm=None): super(SeparableConv2d, self).__init__() self.conv1 = nn.Conv2d(inplanes,", "if self.skip is not None: skip = self.skip(inp) skip =", "self.conv2 = nn.Conv2d(32, 64, 3, stride=1, padding=1, bias=False) self.bn2 =", "__init__(self, inplanes, planes, reps, stride=1, dilation=1, BatchNorm=None, start_with_relu=True, grow_first=True, is_last=False):", "k.startswith('block12'): model_dict[k.replace('block12', 'block20')] = v elif k.startswith('bn3'): model_dict[k] = v", "reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block6 = Block(728, 728,", "self._init_weight() # Load pretrained model if pretrained: self._load_pretrained_model() def forward(self,", "+ skip return x class AlignedXception(nn.Module): \"\"\" Modified Alighed Xception", "planes, 3, 1, dilation, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) if stride != 1:", "grow_first=True) self.block17 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True,", "1536, 3, stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm) self.bn4 = BatchNorm(1536) self.conv5 =", "self.relu(x) return x, low_level_feat def _init_weight(self): for m in self.modules():", "in range(reps - 1): rep.append(self.relu) rep.append(SeparableConv2d(filters, filters, 3, 1, dilation,", "def __init__(self, output_stride, BatchNorm, pretrained=True): super(AlignedXception, self).__init__() if output_stride ==", "stride=1, dilation=1, bias=False, BatchNorm=None): super(SeparableConv2d, self).__init__() self.conv1 = nn.Conv2d(inplanes, inplanes,", "self).__init__() self.conv1 = nn.Conv2d(inplanes, inplanes, kernel_size, stride, 0, dilation, groups=inplanes,", "dilation=1, bias=False, BatchNorm=None): super(SeparableConv2d, self).__init__() self.conv1 = nn.Conv2d(inplanes, inplanes, kernel_size,", "= torch.rand(1, 3, 512, 512) output, low_level_feat = model(input) print(output.size())", "BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block12 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,", "= self.conv1(x) x = self.bn(x) x = self.pointwise(x) return x", "= Block(64, 128, reps=2, stride=2, BatchNorm=BatchNorm, start_with_relu=False) self.block2 = Block(128,", "= self.relu(x) x = self.conv4(x) x = self.bn4(x) x =", "class Block(nn.Module): def __init__(self, inplanes, planes, reps, stride=1, dilation=1, BatchNorm=None,", "entry_block3_stride = 2 middle_block_dilation = 1 exit_block_dilations = (1, 2)", "= self.block20(x) x = self.relu(x) x = self.conv3(x) x =", "model_dict[k.replace('conv4', 'conv5')] = v elif k.startswith('bn4'): model_dict[k.replace('bn4', 'bn5')] = v", "= self.block2(x) x = self.block3(x) # Middle flow x =", "= v state_dict.update(model_dict) self.load_state_dict(state_dict) if __name__ == \"__main__\": import torch", "reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block14 = Block(728, 728,", "forward(self, x): x = fixed_padding(x, self.conv1.kernel_size[0], dilation=self.conv1.dilation[0]) x = self.conv1(x)", "F.pad(inputs, (pad_beg, pad_end, pad_beg, pad_end)) return padded_inputs class SeparableConv2d(nn.Module): def", "self.block1 = Block(64, 128, reps=2, stride=2, BatchNorm=BatchNorm, start_with_relu=False) self.block2 =", "dilation=exit_block_dilations[1], BatchNorm=BatchNorm) self.bn5 = BatchNorm(2048) # Init weights self._init_weight() #", "Exit flow self.block20 = Block(728, 1024, reps=2, stride=1, dilation=exit_block_dilations[0], BatchNorm=BatchNorm,", "= self.skipbn(skip) else: skip = inp x = x +", "start_with_relu=True, grow_first=True, is_last=True) # Middle flow self.block4 = Block(728, 728,", "groups=inplanes, bias=bias) self.bn = BatchNorm(inplanes) self.pointwise = nn.Conv2d(inplanes, planes, 1,", "x = self.relu(x) x = self.block1(x) # add relu here", "self.skip = nn.Conv2d(inplanes, planes, 1, stride=stride, bias=False) self.skipbn = BatchNorm(planes)", "BatchNorm(planes) else: self.skip = None self.relu = nn.ReLU(inplace=True) rep =", "stride=stride, bias=False) self.skipbn = BatchNorm(planes) else: self.skip = None self.relu", "= nn.Conv2d(inplanes, planes, 1, 1, 0, 1, 1, bias=bias) def", "728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block16 = Block(728,", "3, stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm) self.bn4 = BatchNorm(1536) self.conv5 = SeparableConv2d(1536,", "- 1): rep.append(self.relu) rep.append(SeparableConv2d(filters, filters, 3, 1, dilation, BatchNorm=BatchNorm)) rep.append(BatchNorm(filters))", "stride != 1: self.skip = nn.Conv2d(inplanes, planes, 1, stride=stride, bias=False)", "bias=False) self.bn1 = BatchNorm(32) self.relu = nn.ReLU(inplace=True) self.conv2 = nn.Conv2d(32,", "Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block12 =", "728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block15 = Block(728,", "grow_first=True) self.block3 = Block(256, 728, reps=2, stride=entry_block3_stride, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True,", "SynchronizedBatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_() elif isinstance(m, nn.BatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_() def _load_pretrained_model(self):", "BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block11 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,", "= {} state_dict = self.state_dict() for k, v in pretrain_dict.items():", "x = x + skip return x class AlignedXception(nn.Module): \"\"\"", "self.block17(x) x = self.block18(x) x = self.block19(x) # Exit flow", "= Block(256, 728, reps=2, stride=entry_block3_stride, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True, is_last=True) #", "2 exit_block_dilations = (2, 4) else: raise NotImplementedError # Entry", "x = self.conv1(x) x = self.bn(x) x = self.pointwise(x) return", "Block(64, 128, reps=2, stride=2, BatchNorm=BatchNorm, start_with_relu=False) self.block2 = Block(128, 256,", "= self.relu(x) x = self.conv3(x) x = self.bn3(x) x =", "math.sqrt(2. / n)) elif isinstance(m, SynchronizedBatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_() elif isinstance(m,", "self.conv4(x) x = self.bn4(x) x = self.relu(x) x = self.conv5(x)", "pad_end = pad_total - pad_beg padded_inputs = F.pad(inputs, (pad_beg, pad_end,", "kernel_size_effective = kernel_size + (kernel_size - 1) * (dilation -", "rep.append(BatchNorm(planes)) filters = planes for i in range(reps - 1):", "Block(128, 256, reps=2, stride=2, BatchNorm=BatchNorm, start_with_relu=False, grow_first=True) self.block3 = Block(256,", "x = self.relu(x) x = self.conv3(x) x = self.bn3(x) x", "self.relu(x) x = self.conv4(x) x = self.bn4(x) x = self.relu(x)", "self.bn5 = BatchNorm(2048) # Init weights self._init_weight() # Load pretrained", "NotImplementedError # Entry flow self.conv1 = nn.Conv2d(3, 32, 3, stride=2,", "if not grow_first: rep.append(self.relu) rep.append(SeparableConv2d(inplanes, planes, 3, 1, dilation, BatchNorm=BatchNorm))", "BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) if stride == 1 and is_last: rep.append(self.relu) rep.append(SeparableConv2d(planes,", "pad_end, pad_beg, pad_end)) return padded_inputs class SeparableConv2d(nn.Module): def __init__(self, inplanes,", "0, dilation, groups=inplanes, bias=bias) self.bn = BatchNorm(inplanes) self.pointwise = nn.Conv2d(inplanes,", "return x class Block(nn.Module): def __init__(self, inplanes, planes, reps, stride=1,", "!= 1: rep.append(self.relu) rep.append(SeparableConv2d(planes, planes, 3, 2, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) if", "start_with_relu=True, grow_first=True) self.block6 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm,", "728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block8 = Block(728,", "= self.block6(x) x = self.block7(x) x = self.block8(x) x =", "self).__init__() if planes != inplanes or stride != 1: self.skip", "= v elif k.startswith('block12'): model_dict[k.replace('block12', 'block20')] = v elif k.startswith('bn3'):", "import torch model = AlignedXception(BatchNorm=nn.BatchNorm2d, pretrained=True, output_stride=16) input = torch.rand(1,", "fixed_padding(x, self.conv1.kernel_size[0], dilation=self.conv1.dilation[0]) x = self.conv1(x) x = self.bn(x) x", "if __name__ == \"__main__\": import torch model = AlignedXception(BatchNorm=nn.BatchNorm2d, pretrained=True,", "m.weight.data.fill_(1) m.bias.data.zero_() elif isinstance(m, nn.BatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_() def _load_pretrained_model(self): pretrain_dict", "self.block7(x) x = self.block8(x) x = self.block9(x) x = self.block10(x)", "self.block10(x) x = self.block11(x) x = self.block12(x) x = self.block13(x)", "x = self.block11(x) x = self.block12(x) x = self.block13(x) x", "output_stride=16) input = torch.rand(1, 3, 512, 512) output, low_level_feat =", "BatchNorm=BatchNorm)) rep.append(BatchNorm(filters)) if not grow_first: rep.append(self.relu) rep.append(SeparableConv2d(inplanes, planes, 3, 1,", "BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block18 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,", "728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block13 = Block(728,", "= F.pad(inputs, (pad_beg, pad_end, pad_beg, pad_end)) return padded_inputs class SeparableConv2d(nn.Module):", "== 8: entry_block3_stride = 1 middle_block_dilation = 2 exit_block_dilations =", "inplanes or stride != 1: self.skip = nn.Conv2d(inplanes, planes, 1,", "x = self.bn2(x) x = self.relu(x) x = self.block1(x) #", "pad_beg = pad_total // 2 pad_end = pad_total - pad_beg", "= self.bn3(x) x = self.relu(x) x = self.conv4(x) x =", "elif k.startswith('bn3'): model_dict[k] = v model_dict[k.replace('bn3', 'bn4')] = v elif", "planes, kernel_size=3, stride=1, dilation=1, bias=False, BatchNorm=None): super(SeparableConv2d, self).__init__() self.conv1 =", "import SynchronizedBatchNorm2d def fixed_padding(inputs, kernel_size, dilation): kernel_size_effective = kernel_size +", "Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block18 =", "= self.block1(x) # add relu here x = self.relu(x) low_level_feat", "= v model_dict[k.replace('bn3', 'bn4')] = v elif k.startswith('conv4'): model_dict[k.replace('conv4', 'conv5')]", "grow_first=True) self.block14 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True,", "= self.block4(x) x = self.block5(x) x = self.block6(x) x =", "= v model_dict[k.replace('block11', 'block15')] = v model_dict[k.replace('block11', 'block16')] = v", "'block16')] = v model_dict[k.replace('block11', 'block17')] = v model_dict[k.replace('block11', 'block18')] =", "start_with_relu: rep = rep[1:] self.rep = nn.Sequential(*rep) def forward(self, inp):", "BatchNorm=BatchNorm, start_with_relu=True, grow_first=True, is_last=True) # Middle flow self.block4 = Block(728,", "v elif k.startswith('block12'): model_dict[k.replace('block12', 'block20')] = v elif k.startswith('bn3'): model_dict[k]", "BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block15 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,", "BatchNorm=BatchNorm, start_with_relu=False) self.block2 = Block(128, 256, reps=2, stride=2, BatchNorm=BatchNorm, start_with_relu=False,", "self.block17 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)", "if not start_with_relu: rep = rep[1:] self.rep = nn.Sequential(*rep) def", "is_last=True) # Middle flow self.block4 = Block(728, 728, reps=3, stride=1,", "+ (kernel_size - 1) * (dilation - 1) pad_total =", "if stride != 1: rep.append(self.relu) rep.append(SeparableConv2d(planes, planes, 3, 2, BatchNorm=BatchNorm))", "grow_first=True) self.block5 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True,", "self.relu(x) x = self.conv5(x) x = self.bn5(x) x = self.relu(x)", "= planes for i in range(reps - 1): rep.append(self.relu) rep.append(SeparableConv2d(filters,", "1, 1, bias=bias) def forward(self, x): x = fixed_padding(x, self.conv1.kernel_size[0],", "1, dilation, BatchNorm=BatchNorm)) rep.append(BatchNorm(filters)) if not grow_first: rep.append(self.relu) rep.append(SeparableConv2d(inplanes, planes,", "pretrain_dict = model_zoo.load_url('http://data.lip6.fr/cadene/pretrainedmodels/xception-b5690688.pth') model_dict = {} state_dict = self.state_dict() for", "1536, 3, stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm) self.bn3 = BatchNorm(1536) self.conv4 =", "self.block13(x) x = self.block14(x) x = self.block15(x) x = self.block16(x)", "'block12')] = v model_dict[k.replace('block11', 'block13')] = v model_dict[k.replace('block11', 'block14')] =", "F import torch.utils.model_zoo as model_zoo from model.sync_batchnorm.batchnorm import SynchronizedBatchNorm2d def", "dilation, groups=inplanes, bias=bias) self.bn = BatchNorm(inplanes) self.pointwise = nn.Conv2d(inplanes, planes,", "Init weights self._init_weight() # Load pretrained model if pretrained: self._load_pretrained_model()", "state_dict = self.state_dict() for k, v in pretrain_dict.items(): if k", "x = self.block20(x) x = self.relu(x) x = self.conv3(x) x", "= self.relu(x) x = self.conv2(x) x = self.bn2(x) x =", "= self.skip(inp) skip = self.skipbn(skip) else: skip = inp x", "Block(256, 728, reps=2, stride=entry_block3_stride, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True, is_last=True) # Middle", "start_with_relu=True, grow_first=True) self.block12 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm,", "BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block14 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,", "= BatchNorm(2048) # Init weights self._init_weight() # Load pretrained model", "= v model_dict[k.replace('block11', 'block14')] = v model_dict[k.replace('block11', 'block15')] = v", "'block17')] = v model_dict[k.replace('block11', 'block18')] = v model_dict[k.replace('block11', 'block19')] =", "model_dict[k.replace('block12', 'block20')] = v elif k.startswith('bn3'): model_dict[k] = v model_dict[k.replace('bn3',", "BatchNorm=BatchNorm, start_with_relu=False, grow_first=True) self.block3 = Block(256, 728, reps=2, stride=entry_block3_stride, BatchNorm=BatchNorm,", "rep.append(SeparableConv2d(planes, planes, 3, 2, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) if stride == 1", "= self.relu(x) x = self.conv5(x) x = self.bn5(x) x =", "x = self.relu(x) return x, low_level_feat def _init_weight(self): for m", "if k in model_dict: if 'pointwise' in k: v =", "BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) filters = planes for i in range(reps -", "x): x = fixed_padding(x, self.conv1.kernel_size[0], dilation=self.conv1.dilation[0]) x = self.conv1(x) x", "output_stride, BatchNorm, pretrained=True): super(AlignedXception, self).__init__() if output_stride == 16: entry_block3_stride", "- 1 pad_beg = pad_total // 2 pad_end = pad_total", "None: skip = self.skip(inp) skip = self.skipbn(skip) else: skip =", "start_with_relu=True, grow_first=True) self.block8 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm,", "stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block18 = Block(728, 728, reps=3,", "is not None: skip = self.skip(inp) skip = self.skipbn(skip) else:", "self.load_state_dict(state_dict) if __name__ == \"__main__\": import torch model = AlignedXception(BatchNorm=nn.BatchNorm2d,", "rep.append(BatchNorm(planes)) if not start_with_relu: rep = rep[1:] self.rep = nn.Sequential(*rep)", "else: raise NotImplementedError # Entry flow self.conv1 = nn.Conv2d(3, 32,", "pad_beg, pad_end)) return padded_inputs class SeparableConv2d(nn.Module): def __init__(self, inplanes, planes,", "x = self.block1(x) # add relu here x = self.relu(x)", "(1, 2) elif output_stride == 8: entry_block3_stride = 1 middle_block_dilation", "= m.kernel_size[0] * m.kernel_size[1] * m.out_channels m.weight.data.normal_(0, math.sqrt(2. / n))", "= BatchNorm(64) self.block1 = Block(64, 128, reps=2, stride=2, BatchNorm=BatchNorm, start_with_relu=False)", "self.skipbn(skip) else: skip = inp x = x + skip", "dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block14 = Block(728, 728, reps=3, stride=1,", "start_with_relu=True, grow_first=True) self.block19 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm,", "in pretrain_dict.items(): if k in model_dict: if 'pointwise' in k:", "Modified Alighed Xception \"\"\" def __init__(self, output_stride, BatchNorm, pretrained=True): super(AlignedXception,", "Block(728, 1024, reps=2, stride=1, dilation=exit_block_dilations[0], BatchNorm=BatchNorm, start_with_relu=True, grow_first=False, is_last=True) self.conv3", "self.bn2(x) x = self.relu(x) x = self.block1(x) # add relu", "Middle flow x = self.block4(x) x = self.block5(x) x =", "x): # Entry flow x = self.conv1(x) x = self.bn1(x)", "x = self.block19(x) # Exit flow x = self.block20(x) x", "SeparableConv2d(nn.Module): def __init__(self, inplanes, planes, kernel_size=3, stride=1, dilation=1, bias=False, BatchNorm=None):", "x = self.relu(x) x = self.conv4(x) x = self.bn4(x) x", "BatchNorm=None): super(SeparableConv2d, self).__init__() self.conv1 = nn.Conv2d(inplanes, inplanes, kernel_size, stride, 0,", "= Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block13", "= self.conv1(x) x = self.bn1(x) x = self.relu(x) x =", "!= inplanes or stride != 1: self.skip = nn.Conv2d(inplanes, planes,", "if pretrained: self._load_pretrained_model() def forward(self, x): # Entry flow x", "nn.Conv2d(inplanes, planes, 1, 1, 0, 1, 1, bias=bias) def forward(self,", "* m.kernel_size[1] * m.out_channels m.weight.data.normal_(0, math.sqrt(2. / n)) elif isinstance(m,", "v.unsqueeze(-1).unsqueeze(-1) if k.startswith('block11'): model_dict[k] = v model_dict[k.replace('block11', 'block12')] = v", "x = self.block15(x) x = self.block16(x) x = self.block17(x) x", "= Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block18", "AlignedXception(nn.Module): \"\"\" Modified Alighed Xception \"\"\" def __init__(self, output_stride, BatchNorm,", "SeparableConv2d(1536, 2048, 3, stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm) self.bn5 = BatchNorm(2048) #", "= self.block11(x) x = self.block12(x) x = self.block13(x) x =", "else: model_dict[k] = v state_dict.update(model_dict) self.load_state_dict(state_dict) if __name__ == \"__main__\":", "kernel_size, dilation): kernel_size_effective = kernel_size + (kernel_size - 1) *", "__init__(self, inplanes, planes, kernel_size=3, stride=1, dilation=1, bias=False, BatchNorm=None): super(SeparableConv2d, self).__init__()", "\"\"\" Modified Alighed Xception \"\"\" def __init__(self, output_stride, BatchNorm, pretrained=True):", "model_dict: if 'pointwise' in k: v = v.unsqueeze(-1).unsqueeze(-1) if k.startswith('block11'):", "pad_total - pad_beg padded_inputs = F.pad(inputs, (pad_beg, pad_end, pad_beg, pad_end))", "super(AlignedXception, self).__init__() if output_stride == 16: entry_block3_stride = 2 middle_block_dilation", "2 middle_block_dilation = 1 exit_block_dilations = (1, 2) elif output_stride", "stride=1, dilation=exit_block_dilations[0], BatchNorm=BatchNorm, start_with_relu=True, grow_first=False, is_last=True) self.conv3 = SeparableConv2d(1024, 1536,", "x = self.bn1(x) x = self.relu(x) x = self.conv2(x) x", "model_dict[k.replace('block11', 'block18')] = v model_dict[k.replace('block11', 'block19')] = v elif k.startswith('block12'):", "nn.Conv2d(inplanes, inplanes, kernel_size, stride, 0, dilation, groups=inplanes, bias=bias) self.bn =", "elif k.startswith('conv4'): model_dict[k.replace('conv4', 'conv5')] = v elif k.startswith('bn4'): model_dict[k.replace('bn4', 'bn5')]", "1: rep.append(self.relu) rep.append(SeparableConv2d(planes, planes, 3, 2, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) if stride", "Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block15 =", "v elif k.startswith('bn3'): model_dict[k] = v model_dict[k.replace('bn3', 'bn4')] = v", "from model.sync_batchnorm.batchnorm import SynchronizedBatchNorm2d def fixed_padding(inputs, kernel_size, dilation): kernel_size_effective =", "start_with_relu=True, grow_first=True) self.block14 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm,", "return padded_inputs class SeparableConv2d(nn.Module): def __init__(self, inplanes, planes, kernel_size=3, stride=1,", "rep.append(self.relu) rep.append(SeparableConv2d(planes, planes, 3, 1, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) if not start_with_relu:", "self.skipbn = BatchNorm(planes) else: self.skip = None self.relu = nn.ReLU(inplace=True)", "rep.append(SeparableConv2d(inplanes, planes, 3, 1, dilation, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) filters = planes", "if isinstance(m, nn.Conv2d): n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels", "m.kernel_size[1] * m.out_channels m.weight.data.normal_(0, math.sqrt(2. / n)) elif isinstance(m, SynchronizedBatchNorm2d):", "= self.block18(x) x = self.block19(x) # Exit flow x =", "inp): x = self.rep(inp) if self.skip is not None: skip", "self.block15 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)", "# Exit flow x = self.block20(x) x = self.relu(x) x", "grow_first=True) self.block11 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True,", "Entry flow x = self.conv1(x) x = self.bn1(x) x =", "planes, reps, stride=1, dilation=1, BatchNorm=None, start_with_relu=True, grow_first=True, is_last=False): super(Block, self).__init__()", "Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block8 =", "for i in range(reps - 1): rep.append(self.relu) rep.append(SeparableConv2d(filters, filters, 3,", "BatchNorm=BatchNorm) self.bn4 = BatchNorm(1536) self.conv5 = SeparableConv2d(1536, 2048, 3, stride=1,", "[] filters = inplanes if grow_first: rep.append(self.relu) rep.append(SeparableConv2d(inplanes, planes, 3,", "start_with_relu=True, grow_first=True) self.block13 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm,", "planes, 1, 1, 0, 1, 1, bias=bias) def forward(self, x):", "v state_dict.update(model_dict) self.load_state_dict(state_dict) if __name__ == \"__main__\": import torch model", "dilation=exit_block_dilations[0], BatchNorm=BatchNorm, start_with_relu=True, grow_first=False, is_last=True) self.conv3 = SeparableConv2d(1024, 1536, 3,", "self.rep(inp) if self.skip is not None: skip = self.skip(inp) skip", "3, 2, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) if stride == 1 and is_last:", "start_with_relu=True, grow_first=False, is_last=True) self.conv3 = SeparableConv2d(1024, 1536, 3, stride=1, dilation=exit_block_dilations[1],", "inplanes, kernel_size, stride, 0, dilation, groups=inplanes, bias=bias) self.bn = BatchNorm(inplanes)", "self.block19 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)", "x = self.bn(x) x = self.pointwise(x) return x class Block(nn.Module):", "self.block1(x) # add relu here x = self.relu(x) low_level_feat =", "'block13')] = v model_dict[k.replace('block11', 'block14')] = v model_dict[k.replace('block11', 'block15')] =", "is_last=False): super(Block, self).__init__() if planes != inplanes or stride !=", "1, 0, 1, 1, bias=bias) def forward(self, x): x =", "not grow_first: rep.append(self.relu) rep.append(SeparableConv2d(inplanes, planes, 3, 1, dilation, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes))", "\"__main__\": import torch model = AlignedXception(BatchNorm=nn.BatchNorm2d, pretrained=True, output_stride=16) input =", "state_dict.update(model_dict) self.load_state_dict(state_dict) if __name__ == \"__main__\": import torch model =", "stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block6 = Block(728, 728, reps=3,", "add relu here x = self.relu(x) low_level_feat = x x", "planes, 3, 1, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) if not start_with_relu: rep =", "x x = self.block2(x) x = self.block3(x) # Middle flow", "self.bn = BatchNorm(inplanes) self.pointwise = nn.Conv2d(inplanes, planes, 1, 1, 0,", "dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block8 = Block(728, 728, reps=3, stride=1,", "nn.Conv2d(32, 64, 3, stride=1, padding=1, bias=False) self.bn2 = BatchNorm(64) self.block1", "728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block12 = Block(728,", "Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block14 =", "self.block11(x) x = self.block12(x) x = self.block13(x) x = self.block14(x)", "x = self.block12(x) x = self.block13(x) x = self.block14(x) x", "pretrain_dict.items(): if k in model_dict: if 'pointwise' in k: v", "model_dict[k.replace('block11', 'block12')] = v model_dict[k.replace('block11', 'block13')] = v model_dict[k.replace('block11', 'block14')]", "Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block9 =", "v model_dict[k.replace('block11', 'block17')] = v model_dict[k.replace('block11', 'block18')] = v model_dict[k.replace('block11',", "padded_inputs class SeparableConv2d(nn.Module): def __init__(self, inplanes, planes, kernel_size=3, stride=1, dilation=1,", "v model_dict[k.replace('block11', 'block15')] = v model_dict[k.replace('block11', 'block16')] = v model_dict[k.replace('block11',", "# Entry flow self.conv1 = nn.Conv2d(3, 32, 3, stride=2, padding=1,", "3, 1, dilation, BatchNorm=BatchNorm)) rep.append(BatchNorm(filters)) if not grow_first: rep.append(self.relu) rep.append(SeparableConv2d(inplanes,", "= v model_dict[k.replace('block11', 'block12')] = v model_dict[k.replace('block11', 'block13')] = v", "x, low_level_feat def _init_weight(self): for m in self.modules(): if isinstance(m,", "BatchNorm=BatchNorm, start_with_relu=True, grow_first=False, is_last=True) self.conv3 = SeparableConv2d(1024, 1536, 3, stride=1,", "* m.out_channels m.weight.data.normal_(0, math.sqrt(2. / n)) elif isinstance(m, SynchronizedBatchNorm2d): m.weight.data.fill_(1)", "Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block5 =", "x class Block(nn.Module): def __init__(self, inplanes, planes, reps, stride=1, dilation=1,", "skip = self.skipbn(skip) else: skip = inp x = x", "'bn5')] = v else: model_dict[k] = v state_dict.update(model_dict) self.load_state_dict(state_dict) if", "self.skip(inp) skip = self.skipbn(skip) else: skip = inp x =", "stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block19 = Block(728, 728, reps=3,", "self.block4 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)", "relu here x = self.relu(x) low_level_feat = x x =", "self.conv1.kernel_size[0], dilation=self.conv1.dilation[0]) x = self.conv1(x) x = self.bn(x) x =", "self.bn4(x) x = self.relu(x) x = self.conv5(x) x = self.bn5(x)", "= self.conv2(x) x = self.bn2(x) x = self.relu(x) x =", "k, v in pretrain_dict.items(): if k in model_dict: if 'pointwise'", "self.relu(x) x = self.conv3(x) x = self.bn3(x) x = self.relu(x)", "'bn4')] = v elif k.startswith('conv4'): model_dict[k.replace('conv4', 'conv5')] = v elif", "in model_dict: if 'pointwise' in k: v = v.unsqueeze(-1).unsqueeze(-1) if", "for k, v in pretrain_dict.items(): if k in model_dict: if", "grow_first=True) self.block10 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True,", "start_with_relu=True, grow_first=True) self.block16 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm,", "dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block7 = Block(728, 728, reps=3, stride=1,", "raise NotImplementedError # Entry flow self.conv1 = nn.Conv2d(3, 32, 3,", "'block20')] = v elif k.startswith('bn3'): model_dict[k] = v model_dict[k.replace('bn3', 'bn4')]", "= v elif k.startswith('bn3'): model_dict[k] = v model_dict[k.replace('bn3', 'bn4')] =", "start_with_relu=True, grow_first=True) self.block7 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm,", "x = self.block3(x) # Middle flow x = self.block4(x) x", "Xception \"\"\" def __init__(self, output_stride, BatchNorm, pretrained=True): super(AlignedXception, self).__init__() if", "= self.relu(x) return x, low_level_feat def _init_weight(self): for m in", "= AlignedXception(BatchNorm=nn.BatchNorm2d, pretrained=True, output_stride=16) input = torch.rand(1, 3, 512, 512)", "728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block14 = Block(728,", "fixed_padding(inputs, kernel_size, dilation): kernel_size_effective = kernel_size + (kernel_size - 1)", "= nn.ReLU(inplace=True) self.conv2 = nn.Conv2d(32, 64, 3, stride=1, padding=1, bias=False)", "math import torch import torch.nn as nn import torch.nn.functional as", "= Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block19", "elif k.startswith('bn4'): model_dict[k.replace('bn4', 'bn5')] = v else: model_dict[k] = v", "= self.conv5(x) x = self.bn5(x) x = self.relu(x) return x,", "= SeparableConv2d(1536, 2048, 3, stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm) self.bn5 = BatchNorm(2048)", "self.block8(x) x = self.block9(x) x = self.block10(x) x = self.block11(x)", "flow x = self.block4(x) x = self.block5(x) x = self.block6(x)", "self.bn1 = BatchNorm(32) self.relu = nn.ReLU(inplace=True) self.conv2 = nn.Conv2d(32, 64,", "self.conv2(x) x = self.bn2(x) x = self.relu(x) x = self.block1(x)", "= BatchNorm(inplanes) self.pointwise = nn.Conv2d(inplanes, planes, 1, 1, 0, 1,", "728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block7 = Block(728,", "= self.bn1(x) x = self.relu(x) x = self.conv2(x) x =", "flow x = self.block20(x) x = self.relu(x) x = self.conv3(x)", "flow x = self.conv1(x) x = self.bn1(x) x = self.relu(x)", "3, stride=1, padding=1, bias=False) self.bn2 = BatchNorm(64) self.block1 = Block(64,", "x = self.block2(x) x = self.block3(x) # Middle flow x", "= self.bn4(x) x = self.relu(x) x = self.conv5(x) x =", "BatchNorm, pretrained=True): super(AlignedXception, self).__init__() if output_stride == 16: entry_block3_stride =", "low_level_feat = x x = self.block2(x) x = self.block3(x) #", "def forward(self, x): x = fixed_padding(x, self.conv1.kernel_size[0], dilation=self.conv1.dilation[0]) x =", "stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block16 = Block(728, 728, reps=3,", "= self.block16(x) x = self.block17(x) x = self.block18(x) x =", "self.conv5(x) x = self.bn5(x) x = self.relu(x) return x, low_level_feat", "skip return x class AlignedXception(nn.Module): \"\"\" Modified Alighed Xception \"\"\"", "self.relu = nn.ReLU(inplace=True) self.conv2 = nn.Conv2d(32, 64, 3, stride=1, padding=1,", "bias=False) self.skipbn = BatchNorm(planes) else: self.skip = None self.relu =", "stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block13 = Block(728, 728, reps=3,", "3, stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm) self.bn3 = BatchNorm(1536) self.conv4 = SeparableConv2d(1536,", "= self.block7(x) x = self.block8(x) x = self.block9(x) x =", "x = self.block10(x) x = self.block11(x) x = self.block12(x) x", "k.startswith('block11'): model_dict[k] = v model_dict[k.replace('block11', 'block12')] = v model_dict[k.replace('block11', 'block13')]", "= v model_dict[k.replace('block11', 'block13')] = v model_dict[k.replace('block11', 'block14')] = v", "model_dict[k.replace('block11', 'block16')] = v model_dict[k.replace('block11', 'block17')] = v model_dict[k.replace('block11', 'block18')]", "reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block7 = Block(728, 728,", "= self.bn5(x) x = self.relu(x) return x, low_level_feat def _init_weight(self):", "stride=2, padding=1, bias=False) self.bn1 = BatchNorm(32) self.relu = nn.ReLU(inplace=True) self.conv2", "x = self.bn5(x) x = self.relu(x) return x, low_level_feat def", "input = torch.rand(1, 3, 512, 512) output, low_level_feat = model(input)", "range(reps - 1): rep.append(self.relu) rep.append(SeparableConv2d(filters, filters, 3, 1, dilation, BatchNorm=BatchNorm))", "x = self.relu(x) low_level_feat = x x = self.block2(x) x", "8: entry_block3_stride = 1 middle_block_dilation = 2 exit_block_dilations = (2,", "= nn.Sequential(*rep) def forward(self, inp): x = self.rep(inp) if self.skip", "= inplanes if grow_first: rep.append(self.relu) rep.append(SeparableConv2d(inplanes, planes, 3, 1, dilation,", "Alighed Xception \"\"\" def __init__(self, output_stride, BatchNorm, pretrained=True): super(AlignedXception, self).__init__()", "k.startswith('conv4'): model_dict[k.replace('conv4', 'conv5')] = v elif k.startswith('bn4'): model_dict[k.replace('bn4', 'bn5')] =", "rep = [] filters = inplanes if grow_first: rep.append(self.relu) rep.append(SeparableConv2d(inplanes,", "self.conv5 = SeparableConv2d(1536, 2048, 3, stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm) self.bn5 =", "3, 1, dilation, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) if stride != 1: rep.append(self.relu)", "x = self.block9(x) x = self.block10(x) x = self.block11(x) x", "1, bias=bias) def forward(self, x): x = fixed_padding(x, self.conv1.kernel_size[0], dilation=self.conv1.dilation[0])", "torch.nn as nn import torch.nn.functional as F import torch.utils.model_zoo as", "self.block4(x) x = self.block5(x) x = self.block6(x) x = self.block7(x)", "stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) # Exit flow self.block20 =", "exit_block_dilations = (1, 2) elif output_stride == 8: entry_block3_stride =", "grow_first: rep.append(self.relu) rep.append(SeparableConv2d(inplanes, planes, 3, 1, dilation, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) filters", "nn.ReLU(inplace=True) rep = [] filters = inplanes if grow_first: rep.append(self.relu)", "start_with_relu=True, grow_first=True) self.block5 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm,", "= x x = self.block2(x) x = self.block3(x) # Middle", "= Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block11", "dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block11 = Block(728, 728, reps=3, stride=1,", "= model_zoo.load_url('http://data.lip6.fr/cadene/pretrainedmodels/xception-b5690688.pth') model_dict = {} state_dict = self.state_dict() for k,", "pretrained=True): super(AlignedXception, self).__init__() if output_stride == 16: entry_block3_stride = 2", "kernel_size + (kernel_size - 1) * (dilation - 1) pad_total", "= pad_total - pad_beg padded_inputs = F.pad(inputs, (pad_beg, pad_end, pad_beg,", "as model_zoo from model.sync_batchnorm.batchnorm import SynchronizedBatchNorm2d def fixed_padding(inputs, kernel_size, dilation):", "start_with_relu=True, grow_first=True) self.block9 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm,", "reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block18 = Block(728, 728,", "# Middle flow x = self.block4(x) x = self.block5(x) x", "1 exit_block_dilations = (1, 2) elif output_stride == 8: entry_block3_stride", "= self.pointwise(x) return x class Block(nn.Module): def __init__(self, inplanes, planes,", "pretrained model if pretrained: self._load_pretrained_model() def forward(self, x): # Entry", "here x = self.relu(x) low_level_feat = x x = self.block2(x)", "728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block9 = Block(728,", "model_dict[k.replace('block11', 'block19')] = v elif k.startswith('block12'): model_dict[k.replace('block12', 'block20')] = v", "self.skip = None self.relu = nn.ReLU(inplace=True) rep = [] filters", "= 2 exit_block_dilations = (2, 4) else: raise NotImplementedError #", "stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block5 = Block(728, 728, reps=3,", "start_with_relu=True, grow_first=True) self.block15 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm,", "= self.block15(x) x = self.block16(x) x = self.block17(x) x =", "grow_first=True) self.block8 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True,", "grow_first=True) self.block6 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True,", "pad_total // 2 pad_end = pad_total - pad_beg padded_inputs =", "= Block(728, 1024, reps=2, stride=1, dilation=exit_block_dilations[0], BatchNorm=BatchNorm, start_with_relu=True, grow_first=False, is_last=True)", "start_with_relu=False) self.block2 = Block(128, 256, reps=2, stride=2, BatchNorm=BatchNorm, start_with_relu=False, grow_first=True)", "== 1 and is_last: rep.append(self.relu) rep.append(SeparableConv2d(planes, planes, 3, 1, BatchNorm=BatchNorm))", "= nn.Conv2d(inplanes, inplanes, kernel_size, stride, 0, dilation, groups=inplanes, bias=bias) self.bn", "stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block14 = Block(728, 728, reps=3,", "k in model_dict: if 'pointwise' in k: v = v.unsqueeze(-1).unsqueeze(-1)", "= (2, 4) else: raise NotImplementedError # Entry flow self.conv1", "self.block6(x) x = self.block7(x) x = self.block8(x) x = self.block9(x)", "BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block17 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,", "v in pretrain_dict.items(): if k in model_dict: if 'pointwise' in", "reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block16 = Block(728, 728,", "filters = inplanes if grow_first: rep.append(self.relu) rep.append(SeparableConv2d(inplanes, planes, 3, 1,", "self.block5 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)", "\"\"\" def __init__(self, output_stride, BatchNorm, pretrained=True): super(AlignedXception, self).__init__() if output_stride", "self.pointwise = nn.Conv2d(inplanes, planes, 1, 1, 0, 1, 1, bias=bias)", "728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block5 = Block(728,", "SynchronizedBatchNorm2d def fixed_padding(inputs, kernel_size, dilation): kernel_size_effective = kernel_size + (kernel_size", "super(SeparableConv2d, self).__init__() self.conv1 = nn.Conv2d(inplanes, inplanes, kernel_size, stride, 0, dilation,", "= nn.Conv2d(inplanes, planes, 1, stride=stride, bias=False) self.skipbn = BatchNorm(planes) else:", "2) elif output_stride == 8: entry_block3_stride = 1 middle_block_dilation =", "1, dilation, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) if stride != 1: rep.append(self.relu) rep.append(SeparableConv2d(planes,", "pad_beg padded_inputs = F.pad(inputs, (pad_beg, pad_end, pad_beg, pad_end)) return padded_inputs", "self.block10 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)", "nn.Conv2d(inplanes, planes, 1, stride=stride, bias=False) self.skipbn = BatchNorm(planes) else: self.skip", "= SeparableConv2d(1536, 1536, 3, stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm) self.bn4 = BatchNorm(1536)", "stride, 0, dilation, groups=inplanes, bias=bias) self.bn = BatchNorm(inplanes) self.pointwise =", "self.block9 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)", "is_last=True) self.conv3 = SeparableConv2d(1024, 1536, 3, stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm) self.bn3", "# Init weights self._init_weight() # Load pretrained model if pretrained:", "{} state_dict = self.state_dict() for k, v in pretrain_dict.items(): if", "BatchNorm(inplanes) self.pointwise = nn.Conv2d(inplanes, planes, 1, 1, 0, 1, 1,", "self.block6 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)", "as nn import torch.nn.functional as F import torch.utils.model_zoo as model_zoo", "torch.nn.functional as F import torch.utils.model_zoo as model_zoo from model.sync_batchnorm.batchnorm import", "3, stride=2, padding=1, bias=False) self.bn1 = BatchNorm(32) self.relu = nn.ReLU(inplace=True)", "BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block10 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,", "planes, 3, 2, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) if stride == 1 and", "v model_dict[k.replace('block11', 'block12')] = v model_dict[k.replace('block11', 'block13')] = v model_dict[k.replace('block11',", "k.startswith('bn4'): model_dict[k.replace('bn4', 'bn5')] = v else: model_dict[k] = v state_dict.update(model_dict)", "self.conv3 = SeparableConv2d(1024, 1536, 3, stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm) self.bn3 =", "padded_inputs = F.pad(inputs, (pad_beg, pad_end, pad_beg, pad_end)) return padded_inputs class", "inplanes, planes, kernel_size=3, stride=1, dilation=1, bias=False, BatchNorm=None): super(SeparableConv2d, self).__init__() self.conv1", "self.conv1(x) x = self.bn(x) x = self.pointwise(x) return x class", "BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) if not start_with_relu: rep = rep[1:] self.rep =", "x = self.block6(x) x = self.block7(x) x = self.block8(x) x", "'block14')] = v model_dict[k.replace('block11', 'block15')] = v model_dict[k.replace('block11', 'block16')] =", "= v elif k.startswith('conv4'): model_dict[k.replace('conv4', 'conv5')] = v elif k.startswith('bn4'):", "3, 1, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) if not start_with_relu: rep = rep[1:]", "bias=False, BatchNorm=None): super(SeparableConv2d, self).__init__() self.conv1 = nn.Conv2d(inplanes, inplanes, kernel_size, stride,", "BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block7 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,", "SeparableConv2d(1536, 1536, 3, stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm) self.bn4 = BatchNorm(1536) self.conv5", "padding=1, bias=False) self.bn1 = BatchNorm(32) self.relu = nn.ReLU(inplace=True) self.conv2 =", "= self.block10(x) x = self.block11(x) x = self.block12(x) x =", "# Entry flow x = self.conv1(x) x = self.bn1(x) x", "dilation=exit_block_dilations[1], BatchNorm=BatchNorm) self.bn4 = BatchNorm(1536) self.conv5 = SeparableConv2d(1536, 2048, 3,", "elif isinstance(m, SynchronizedBatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_() elif isinstance(m, nn.BatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_()", "(kernel_size - 1) * (dilation - 1) pad_total = kernel_size_effective", "self.bn1(x) x = self.relu(x) x = self.conv2(x) x = self.bn2(x)", "x = self.conv5(x) x = self.bn5(x) x = self.relu(x) return", "= Block(128, 256, reps=2, stride=2, BatchNorm=BatchNorm, start_with_relu=False, grow_first=True) self.block3 =", "grow_first=True) self.block13 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True,", "BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block6 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,", "self.block2(x) x = self.block3(x) # Middle flow x = self.block4(x)", "v else: model_dict[k] = v state_dict.update(model_dict) self.load_state_dict(state_dict) if __name__ ==", "m.kernel_size[0] * m.kernel_size[1] * m.out_channels m.weight.data.normal_(0, math.sqrt(2. / n)) elif", "def _init_weight(self): for m in self.modules(): if isinstance(m, nn.Conv2d): n", "else: skip = inp x = x + skip return", "nn.Conv2d): n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels m.weight.data.normal_(0, math.sqrt(2.", "in k: v = v.unsqueeze(-1).unsqueeze(-1) if k.startswith('block11'): model_dict[k] = v", "v model_dict[k.replace('block11', 'block14')] = v model_dict[k.replace('block11', 'block15')] = v model_dict[k.replace('block11',", "forward(self, inp): x = self.rep(inp) if self.skip is not None:", "2 pad_end = pad_total - pad_beg padded_inputs = F.pad(inputs, (pad_beg,", "m in self.modules(): if isinstance(m, nn.Conv2d): n = m.kernel_size[0] *", "self.relu = nn.ReLU(inplace=True) rep = [] filters = inplanes if", "x = self.relu(x) x = self.conv5(x) x = self.bn5(x) x", "i in range(reps - 1): rep.append(self.relu) rep.append(SeparableConv2d(filters, filters, 3, 1,", "reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block9 = Block(728, 728,", "or stride != 1: self.skip = nn.Conv2d(inplanes, planes, 1, stride=stride,", "forward(self, x): # Entry flow x = self.conv1(x) x =", "nn.Sequential(*rep) def forward(self, inp): x = self.rep(inp) if self.skip is", "'pointwise' in k: v = v.unsqueeze(-1).unsqueeze(-1) if k.startswith('block11'): model_dict[k] =", "grow_first=True) self.block9 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True,", "m.weight.data.fill_(1) m.bias.data.zero_() def _load_pretrained_model(self): pretrain_dict = model_zoo.load_url('http://data.lip6.fr/cadene/pretrainedmodels/xception-b5690688.pth') model_dict = {}", "x = self.block16(x) x = self.block17(x) x = self.block18(x) x", "if 'pointwise' in k: v = v.unsqueeze(-1).unsqueeze(-1) if k.startswith('block11'): model_dict[k]", "= [] filters = inplanes if grow_first: rep.append(self.relu) rep.append(SeparableConv2d(inplanes, planes,", "1, 1, 0, 1, 1, bias=bias) def forward(self, x): x", "= fixed_padding(x, self.conv1.kernel_size[0], dilation=self.conv1.dilation[0]) x = self.conv1(x) x = self.bn(x)", "_load_pretrained_model(self): pretrain_dict = model_zoo.load_url('http://data.lip6.fr/cadene/pretrainedmodels/xception-b5690688.pth') model_dict = {} state_dict = self.state_dict()", "model_dict[k] = v state_dict.update(model_dict) self.load_state_dict(state_dict) if __name__ == \"__main__\": import", "nn.BatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_() def _load_pretrained_model(self): pretrain_dict = model_zoo.load_url('http://data.lip6.fr/cadene/pretrainedmodels/xception-b5690688.pth') model_dict =", "model if pretrained: self._load_pretrained_model() def forward(self, x): # Entry flow", "= self.conv4(x) x = self.bn4(x) x = self.relu(x) x =", "= self.block17(x) x = self.block18(x) x = self.block19(x) # Exit", "= BatchNorm(planes) else: self.skip = None self.relu = nn.ReLU(inplace=True) rep", "= Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block5", "1) pad_total = kernel_size_effective - 1 pad_beg = pad_total //", "self.bn(x) x = self.pointwise(x) return x class Block(nn.Module): def __init__(self,", "middle_block_dilation = 2 exit_block_dilations = (2, 4) else: raise NotImplementedError", "self.block14 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)", "self._load_pretrained_model() def forward(self, x): # Entry flow x = self.conv1(x)", "inplanes if grow_first: rep.append(self.relu) rep.append(SeparableConv2d(inplanes, planes, 3, 1, dilation, BatchNorm=BatchNorm))", "bias=bias) self.bn = BatchNorm(inplanes) self.pointwise = nn.Conv2d(inplanes, planes, 1, 1,", "self.conv1 = nn.Conv2d(3, 32, 3, stride=2, padding=1, bias=False) self.bn1 =", "model_dict[k.replace('block11', 'block13')] = v model_dict[k.replace('block11', 'block14')] = v model_dict[k.replace('block11', 'block15')]", "stride == 1 and is_last: rep.append(self.relu) rep.append(SeparableConv2d(planes, planes, 3, 1,", "'conv5')] = v elif k.startswith('bn4'): model_dict[k.replace('bn4', 'bn5')] = v else:", "self.bn2 = BatchNorm(64) self.block1 = Block(64, 128, reps=2, stride=2, BatchNorm=BatchNorm,", "stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block9 = Block(728, 728, reps=3,", "728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block19 = Block(728,", "# Exit flow self.block20 = Block(728, 1024, reps=2, stride=1, dilation=exit_block_dilations[0],", "= BatchNorm(1536) self.conv4 = SeparableConv2d(1536, 1536, 3, stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm)", "1 middle_block_dilation = 2 exit_block_dilations = (2, 4) else: raise", "x = self.conv1(x) x = self.bn1(x) x = self.relu(x) x", "flow self.block4 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True,", "self.block13 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)", "self.block19(x) # Exit flow x = self.block20(x) x = self.relu(x)", "1, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) if not start_with_relu: rep = rep[1:] self.rep", "stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block12 = Block(728, 728, reps=3,", "= Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block17", "in self.modules(): if isinstance(m, nn.Conv2d): n = m.kernel_size[0] * m.kernel_size[1]", "import torch.nn.functional as F import torch.utils.model_zoo as model_zoo from model.sync_batchnorm.batchnorm", "model_dict[k.replace('block11', 'block14')] = v model_dict[k.replace('block11', 'block15')] = v model_dict[k.replace('block11', 'block16')]", "is_last: rep.append(self.relu) rep.append(SeparableConv2d(planes, planes, 3, 1, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) if not", "grow_first=True, is_last=True) # Middle flow self.block4 = Block(728, 728, reps=3,", "Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) # Exit", "= self.bn2(x) x = self.relu(x) x = self.block1(x) # add", "x = self.block18(x) x = self.block19(x) # Exit flow x", "self.block12 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)", "None self.relu = nn.ReLU(inplace=True) rep = [] filters = inplanes", "model_zoo.load_url('http://data.lip6.fr/cadene/pretrainedmodels/xception-b5690688.pth') model_dict = {} state_dict = self.state_dict() for k, v", "model = AlignedXception(BatchNorm=nn.BatchNorm2d, pretrained=True, output_stride=16) input = torch.rand(1, 3, 512,", "= self.block9(x) x = self.block10(x) x = self.block11(x) x =", "3, 1, dilation, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) filters = planes for i", "reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block12 = Block(728, 728,", "dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) # Exit flow self.block20 = Block(728,", "// 2 pad_end = pad_total - pad_beg padded_inputs = F.pad(inputs,", "skip = self.skip(inp) skip = self.skipbn(skip) else: skip = inp", "dilation, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) filters = planes for i in range(reps", "__name__ == \"__main__\": import torch model = AlignedXception(BatchNorm=nn.BatchNorm2d, pretrained=True, output_stride=16)", "= v model_dict[k.replace('block11', 'block16')] = v model_dict[k.replace('block11', 'block17')] = v", "weights self._init_weight() # Load pretrained model if pretrained: self._load_pretrained_model() def", "stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block15 = Block(728, 728, reps=3,", "= self.relu(x) low_level_feat = x x = self.block2(x) x =", "self.block9(x) x = self.block10(x) x = self.block11(x) x = self.block12(x)", "BatchNorm(32) self.relu = nn.ReLU(inplace=True) self.conv2 = nn.Conv2d(32, 64, 3, stride=1,", "# Load pretrained model if pretrained: self._load_pretrained_model() def forward(self, x):", "import torch.utils.model_zoo as model_zoo from model.sync_batchnorm.batchnorm import SynchronizedBatchNorm2d def fixed_padding(inputs,", "= BatchNorm(1536) self.conv5 = SeparableConv2d(1536, 2048, 3, stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm)", "reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block17 = Block(728, 728,", "rep.append(BatchNorm(filters)) if not grow_first: rep.append(self.relu) rep.append(SeparableConv2d(inplanes, planes, 3, 1, dilation,", "entry_block3_stride = 1 middle_block_dilation = 2 exit_block_dilations = (2, 4)", "nn import torch.nn.functional as F import torch.utils.model_zoo as model_zoo from", "if k.startswith('block11'): model_dict[k] = v model_dict[k.replace('block11', 'block12')] = v model_dict[k.replace('block11',", "v model_dict[k.replace('block11', 'block16')] = v model_dict[k.replace('block11', 'block17')] = v model_dict[k.replace('block11',", "BatchNorm=BatchNorm) self.bn3 = BatchNorm(1536) self.conv4 = SeparableConv2d(1536, 1536, 3, stride=1,", "reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block11 = Block(728, 728,", "728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block17 = Block(728,", "dilation=1, BatchNorm=None, start_with_relu=True, grow_first=True, is_last=False): super(Block, self).__init__() if planes !=", "self.block5(x) x = self.block6(x) x = self.block7(x) x = self.block8(x)", "x = self.block17(x) x = self.block18(x) x = self.block19(x) #", "isinstance(m, nn.BatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_() def _load_pretrained_model(self): pretrain_dict = model_zoo.load_url('http://data.lip6.fr/cadene/pretrainedmodels/xception-b5690688.pth') model_dict", "x = self.block13(x) x = self.block14(x) x = self.block15(x) x", "stride=entry_block3_stride, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True, is_last=True) # Middle flow self.block4 =", "stride=2, BatchNorm=BatchNorm, start_with_relu=False, grow_first=True) self.block3 = Block(256, 728, reps=2, stride=entry_block3_stride,", "__init__(self, output_stride, BatchNorm, pretrained=True): super(AlignedXception, self).__init__() if output_stride == 16:", "planes != inplanes or stride != 1: self.skip = nn.Conv2d(inplanes,", "rep.append(self.relu) rep.append(SeparableConv2d(inplanes, planes, 3, 1, dilation, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) filters =", "self.relu(x) low_level_feat = x x = self.block2(x) x = self.block3(x)", "start_with_relu=True, grow_first=True) self.block18 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm,", "= 2 middle_block_dilation = 1 exit_block_dilations = (1, 2) elif", "rep.append(SeparableConv2d(filters, filters, 3, 1, dilation, BatchNorm=BatchNorm)) rep.append(BatchNorm(filters)) if not grow_first:", "= SeparableConv2d(1024, 1536, 3, stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm) self.bn3 = BatchNorm(1536)", "dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block19 = Block(728, 728, reps=3, stride=1,", "= None self.relu = nn.ReLU(inplace=True) rep = [] filters =", "= self.block19(x) # Exit flow x = self.block20(x) x =", "n)) elif isinstance(m, SynchronizedBatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_() elif isinstance(m, nn.BatchNorm2d): m.weight.data.fill_(1)", "= pad_total // 2 pad_end = pad_total - pad_beg padded_inputs", "728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) # Exit flow", "self.block20(x) x = self.relu(x) x = self.conv3(x) x = self.bn3(x)", "dilation=exit_block_dilations[1], BatchNorm=BatchNorm) self.bn3 = BatchNorm(1536) self.conv4 = SeparableConv2d(1536, 1536, 3,", "728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block6 = Block(728,", "1: self.skip = nn.Conv2d(inplanes, planes, 1, stride=stride, bias=False) self.skipbn =", "if stride == 1 and is_last: rep.append(self.relu) rep.append(SeparableConv2d(planes, planes, 3,", "self.conv4 = SeparableConv2d(1536, 1536, 3, stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm) self.bn4 =", "stride != 1: rep.append(self.relu) rep.append(SeparableConv2d(planes, planes, 3, 2, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes))", "Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block10 =", "x = self.conv3(x) x = self.bn3(x) x = self.relu(x) x", "self.modules(): if isinstance(m, nn.Conv2d): n = m.kernel_size[0] * m.kernel_size[1] *", "flow self.block20 = Block(728, 1024, reps=2, stride=1, dilation=exit_block_dilations[0], BatchNorm=BatchNorm, start_with_relu=True,", "self.block15(x) x = self.block16(x) x = self.block17(x) x = self.block18(x)", "BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block5 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,", "model_dict[k.replace('bn3', 'bn4')] = v elif k.startswith('conv4'): model_dict[k.replace('conv4', 'conv5')] = v", "x = self.conv2(x) x = self.bn2(x) x = self.relu(x) x", "self.block16(x) x = self.block17(x) x = self.block18(x) x = self.block19(x)", "v model_dict[k.replace('bn3', 'bn4')] = v elif k.startswith('conv4'): model_dict[k.replace('conv4', 'conv5')] =", "x = self.block7(x) x = self.block8(x) x = self.block9(x) x", "self.skip is not None: skip = self.skip(inp) skip = self.skipbn(skip)", "self.state_dict() for k, v in pretrain_dict.items(): if k in model_dict:", "filters = planes for i in range(reps - 1): rep.append(self.relu)", "1) * (dilation - 1) pad_total = kernel_size_effective - 1", "skip = inp x = x + skip return x", "= Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block9", "n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels m.weight.data.normal_(0, math.sqrt(2. /", "Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block6 =", "dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block9 = Block(728, 728, reps=3, stride=1,", "16: entry_block3_stride = 2 middle_block_dilation = 1 exit_block_dilations = (1,", "torch model = AlignedXception(BatchNorm=nn.BatchNorm2d, pretrained=True, output_stride=16) input = torch.rand(1, 3,", "reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) # Exit flow self.block20", "BatchNorm(2048) # Init weights self._init_weight() # Load pretrained model if", "rep = rep[1:] self.rep = nn.Sequential(*rep) def forward(self, inp): x", "kernel_size_effective - 1 pad_beg = pad_total // 2 pad_end =", "planes for i in range(reps - 1): rep.append(self.relu) rep.append(SeparableConv2d(filters, filters,", "x = self.relu(x) x = self.conv2(x) x = self.bn2(x) x", "BatchNorm=None, start_with_relu=True, grow_first=True, is_last=False): super(Block, self).__init__() if planes != inplanes", "== \"__main__\": import torch model = AlignedXception(BatchNorm=nn.BatchNorm2d, pretrained=True, output_stride=16) input", "= v else: model_dict[k] = v state_dict.update(model_dict) self.load_state_dict(state_dict) if __name__", "x = fixed_padding(x, self.conv1.kernel_size[0], dilation=self.conv1.dilation[0]) x = self.conv1(x) x =", "class AlignedXception(nn.Module): \"\"\" Modified Alighed Xception \"\"\" def __init__(self, output_stride,", "self.block8 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)", "= Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block14", "= Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block16", "32, 3, stride=2, padding=1, bias=False) self.bn1 = BatchNorm(32) self.relu =", "stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm) self.bn5 = BatchNorm(2048) # Init weights self._init_weight()", "grow_first=False, is_last=True) self.conv3 = SeparableConv2d(1024, 1536, 3, stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm)", "x = self.conv4(x) x = self.bn4(x) x = self.relu(x) x", "v model_dict[k.replace('block11', 'block19')] = v elif k.startswith('block12'): model_dict[k.replace('block12', 'block20')] =", "BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block13 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,", "inplanes, planes, reps, stride=1, dilation=1, BatchNorm=None, start_with_relu=True, grow_first=True, is_last=False): super(Block,", "stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block17 = Block(728, 728, reps=3,", "bias=bias) def forward(self, x): x = fixed_padding(x, self.conv1.kernel_size[0], dilation=self.conv1.dilation[0]) x", "isinstance(m, SynchronizedBatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_() elif isinstance(m, nn.BatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_() def", "self.relu(x) x = self.block1(x) # add relu here x =", "low_level_feat def _init_weight(self): for m in self.modules(): if isinstance(m, nn.Conv2d):", "start_with_relu=True, grow_first=True) # Exit flow self.block20 = Block(728, 1024, reps=2,", "BatchNorm(1536) self.conv5 = SeparableConv2d(1536, 2048, 3, stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm) self.bn5", "* (dilation - 1) pad_total = kernel_size_effective - 1 pad_beg", "728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block11 = Block(728,", "m.out_channels m.weight.data.normal_(0, math.sqrt(2. / n)) elif isinstance(m, SynchronizedBatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_()", "BatchNorm(1536) self.conv4 = SeparableConv2d(1536, 1536, 3, stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm) self.bn4", "reps, stride=1, dilation=1, BatchNorm=None, start_with_relu=True, grow_first=True, is_last=False): super(Block, self).__init__() if", "1, dilation, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) filters = planes for i in", "filters, 3, 1, dilation, BatchNorm=BatchNorm)) rep.append(BatchNorm(filters)) if not grow_first: rep.append(self.relu)", "x + skip return x class AlignedXception(nn.Module): \"\"\" Modified Alighed", "Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block19 =", "self.block18 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)", "self.block12(x) x = self.block13(x) x = self.block14(x) x = self.block15(x)", "= inp x = x + skip return x class", "stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block7 = Block(728, 728, reps=3,", "728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block18 = Block(728,", "rep.append(SeparableConv2d(inplanes, planes, 3, 1, dilation, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) if stride !=", "model_dict[k.replace('block11', 'block17')] = v model_dict[k.replace('block11', 'block18')] = v model_dict[k.replace('block11', 'block19')]", "stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block10 = Block(728, 728, reps=3,", "728, reps=2, stride=entry_block3_stride, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True, is_last=True) # Middle flow", "rep.append(BatchNorm(planes)) if stride != 1: rep.append(self.relu) rep.append(SeparableConv2d(planes, planes, 3, 2,", "stride=2, BatchNorm=BatchNorm, start_with_relu=False) self.block2 = Block(128, 256, reps=2, stride=2, BatchNorm=BatchNorm,", "elif k.startswith('block12'): model_dict[k.replace('block12', 'block20')] = v elif k.startswith('bn3'): model_dict[k] =", "pretrained: self._load_pretrained_model() def forward(self, x): # Entry flow x =", "inp x = x + skip return x class AlignedXception(nn.Module):", "= Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block12", "v model_dict[k.replace('block11', 'block13')] = v model_dict[k.replace('block11', 'block14')] = v model_dict[k.replace('block11',", "Middle flow self.block4 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm,", "x class AlignedXception(nn.Module): \"\"\" Modified Alighed Xception \"\"\" def __init__(self,", "def _load_pretrained_model(self): pretrain_dict = model_zoo.load_url('http://data.lip6.fr/cadene/pretrainedmodels/xception-b5690688.pth') model_dict = {} state_dict =", "'block18')] = v model_dict[k.replace('block11', 'block19')] = v elif k.startswith('block12'): model_dict[k.replace('block12',", "torch import torch.nn as nn import torch.nn.functional as F import", "728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block10 = Block(728,", "import math import torch import torch.nn as nn import torch.nn.functional", "(dilation - 1) pad_total = kernel_size_effective - 1 pad_beg =", "padding=1, bias=False) self.bn2 = BatchNorm(64) self.block1 = Block(64, 128, reps=2,", "self.block3(x) # Middle flow x = self.block4(x) x = self.block5(x)", "rep.append(BatchNorm(planes)) if stride == 1 and is_last: rep.append(self.relu) rep.append(SeparableConv2d(planes, planes,", "x = self.rep(inp) if self.skip is not None: skip =", "grow_first=True) self.block7 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True,", "x = self.block14(x) x = self.block15(x) x = self.block16(x) x", "self.block3 = Block(256, 728, reps=2, stride=entry_block3_stride, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True, is_last=True)", "pad_end)) return padded_inputs class SeparableConv2d(nn.Module): def __init__(self, inplanes, planes, kernel_size=3,", "torch.utils.model_zoo as model_zoo from model.sync_batchnorm.batchnorm import SynchronizedBatchNorm2d def fixed_padding(inputs, kernel_size,", "rep[1:] self.rep = nn.Sequential(*rep) def forward(self, inp): x = self.rep(inp)", "as F import torch.utils.model_zoo as model_zoo from model.sync_batchnorm.batchnorm import SynchronizedBatchNorm2d", "flow self.conv1 = nn.Conv2d(3, 32, 3, stride=2, padding=1, bias=False) self.bn1", "output_stride == 16: entry_block3_stride = 2 middle_block_dilation = 1 exit_block_dilations", "= self.block12(x) x = self.block13(x) x = self.block14(x) x =", "= v.unsqueeze(-1).unsqueeze(-1) if k.startswith('block11'): model_dict[k] = v model_dict[k.replace('block11', 'block12')] =", "Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block17 =", "rep.append(SeparableConv2d(planes, planes, 3, 1, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) if not start_with_relu: rep", "import torch.nn as nn import torch.nn.functional as F import torch.utils.model_zoo", "self.rep = nn.Sequential(*rep) def forward(self, inp): x = self.rep(inp) if", "dilation, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) if stride != 1: rep.append(self.relu) rep.append(SeparableConv2d(planes, planes,", "x = self.block8(x) x = self.block9(x) x = self.block10(x) x", "self.bn3(x) x = self.relu(x) x = self.conv4(x) x = self.bn4(x)", "'block15')] = v model_dict[k.replace('block11', 'block16')] = v model_dict[k.replace('block11', 'block17')] =", "self).__init__() if output_stride == 16: entry_block3_stride = 2 middle_block_dilation =", "if output_stride == 16: entry_block3_stride = 2 middle_block_dilation = 1", "= self.block8(x) x = self.block9(x) x = self.block10(x) x =", "start_with_relu=True, grow_first=True) self.block11 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm,", "= Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block8", "dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block18 = Block(728, 728, reps=3, stride=1,", "self.pointwise(x) return x class Block(nn.Module): def __init__(self, inplanes, planes, reps,", "Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block11 =", "# add relu here x = self.relu(x) low_level_feat = x", "- 1) * (dilation - 1) pad_total = kernel_size_effective -", "start_with_relu=False, grow_first=True) self.block3 = Block(256, 728, reps=2, stride=entry_block3_stride, BatchNorm=BatchNorm, start_with_relu=True,", "= 1 exit_block_dilations = (1, 2) elif output_stride == 8:", "and is_last: rep.append(self.relu) rep.append(SeparableConv2d(planes, planes, 3, 1, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) if", "Entry flow self.conv1 = nn.Conv2d(3, 32, 3, stride=2, padding=1, bias=False)", "self.bn5(x) x = self.relu(x) return x, low_level_feat def _init_weight(self): for", "start_with_relu=True, grow_first=True, is_last=False): super(Block, self).__init__() if planes != inplanes or", "stride=1, dilation=1, BatchNorm=None, start_with_relu=True, grow_first=True, is_last=False): super(Block, self).__init__() if planes", "!= 1: self.skip = nn.Conv2d(inplanes, planes, 1, stride=stride, bias=False) self.skipbn", "def forward(self, inp): x = self.rep(inp) if self.skip is not", "stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm) self.bn4 = BatchNorm(1536) self.conv5 = SeparableConv2d(1536, 2048,", "SeparableConv2d(1024, 1536, 3, stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm) self.bn3 = BatchNorm(1536) self.conv4", "model_zoo from model.sync_batchnorm.batchnorm import SynchronizedBatchNorm2d def fixed_padding(inputs, kernel_size, dilation): kernel_size_effective", "def forward(self, x): # Entry flow x = self.conv1(x) x", "(pad_beg, pad_end, pad_beg, pad_end)) return padded_inputs class SeparableConv2d(nn.Module): def __init__(self,", "if grow_first: rep.append(self.relu) rep.append(SeparableConv2d(inplanes, planes, 3, 1, dilation, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes))", "dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block17 = Block(728, 728, reps=3, stride=1,", "1, stride=stride, bias=False) self.skipbn = BatchNorm(planes) else: self.skip = None", "self.block18(x) x = self.block19(x) # Exit flow x = self.block20(x)", "dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block6 = Block(728, 728, reps=3, stride=1,", "bias=False) self.bn2 = BatchNorm(64) self.block1 = Block(64, 128, reps=2, stride=2,", "Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block13 =", "= kernel_size_effective - 1 pad_beg = pad_total // 2 pad_end", "= Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block7", "reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block10 = Block(728, 728,", "== 16: entry_block3_stride = 2 middle_block_dilation = 1 exit_block_dilations =", "reps=3, stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block15 = Block(728, 728,", "class SeparableConv2d(nn.Module): def __init__(self, inplanes, planes, kernel_size=3, stride=1, dilation=1, bias=False,", "2, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) if stride == 1 and is_last: rep.append(self.relu)", "stride=1, dilation=middle_block_dilation, BatchNorm=BatchNorm, start_with_relu=True, grow_first=True) self.block11 = Block(728, 728, reps=3,", "rep.append(self.relu) rep.append(SeparableConv2d(planes, planes, 3, 2, BatchNorm=BatchNorm)) rep.append(BatchNorm(planes)) if stride ==", "def fixed_padding(inputs, kernel_size, dilation): kernel_size_effective = kernel_size + (kernel_size -" ]
[ "Mail( from_email=from_mail, to_emails=to_mails ) message.dynamic_template_data = { 'name': username, 'title1'", "event1, event2): for user in users: send_mail(DEFAULT_MAIL, user.username, user.email, event1,", "in users: send_mail(DEFAULT_MAIL, user.username, user.email, event1, event2) def send_mail(from_mail, username,", ": link(event1.urlsafe), 'loc1': event1.location, 'date1': event1.date.strftime('%d-%m-%Y %H:%M'), 'title2' : event2.title,", "+ urlsafe def send_newsletter(users, event1, event2): for user in users:", "'<KEY>' API_KEY_ID = '<KEY>' ENCODING = \"utf-8\" DEFAULT_MAIL=\"<EMAIL>\" def link(urlsafe):", "event2) def send_mail(from_mail, username, to_mails, event1, event2): message = Mail(", "client = SendGridAPIClient(API_KEY) response = client.send(message) code = response.status_code print('after')", "urlsafe def send_newsletter(users, event1, event2): for user in users: send_mail(DEFAULT_MAIL,", "users: send_mail(DEFAULT_MAIL, user.username, user.email, event1, event2) def send_mail(from_mail, username, to_mails,", "Substitution API_KEY = '<KEY>' API_KEY_ID = '<KEY>' ENCODING = \"utf-8\"", "lambda ret_code: ret_code // 100 in (2, 3) if not", "= response.status_code print('after') was_successful = lambda ret_code: ret_code // 100", "'src2' : link(event2.urlsafe), 'loc2': event2.location, 'date2': event2.date.strftime('%d-%m-%Y %H:%M') } print('before')", "link(event1.urlsafe), 'loc1': event1.location, 'date1': event1.date.strftime('%d-%m-%Y %H:%M'), 'title2' : event2.title, 'src2'", "event1.location, 'date1': event1.date.strftime('%d-%m-%Y %H:%M'), 'title2' : event2.title, 'src2' : link(event2.urlsafe),", "user.email, event1, event2) def send_mail(from_mail, username, to_mails, event1, event2): message", "username, to_mails, event1, event2): message = Mail( from_email=from_mail, to_emails=to_mails )", "link(event2.urlsafe), 'loc2': event2.location, 'date2': event2.date.strftime('%d-%m-%Y %H:%M') } print('before') message.template_id =", "'loc1': event1.location, 'date1': event1.date.strftime('%d-%m-%Y %H:%M'), 'title2' : event2.title, 'src2' :", "to_mails, event1, event2): message = Mail( from_email=from_mail, to_emails=to_mails ) message.dynamic_template_data", "def link(urlsafe): return \"https://develop-dot-autonomus.appspot.com/events/details?event_id=\" + urlsafe def send_newsletter(users, event1, event2):", ": link(event2.urlsafe), 'loc2': event2.location, 'date2': event2.date.strftime('%d-%m-%Y %H:%M') } print('before') message.template_id", "print('before') message.template_id = 'd-6607926b2aba4f8fba984dccdaa9ece6' client = SendGridAPIClient(API_KEY) response = client.send(message)", "event1, event2): message = Mail( from_email=from_mail, to_emails=to_mails ) message.dynamic_template_data =", "username, 'title1' : event1.title, 'src1' : link(event1.urlsafe), 'loc1': event1.location, 'date1':", "event1.title, 'src1' : link(event1.urlsafe), 'loc1': event1.location, 'date1': event1.date.strftime('%d-%m-%Y %H:%M'), 'title2'", "'src1' : link(event1.urlsafe), 'loc1': event1.location, 'date1': event1.date.strftime('%d-%m-%Y %H:%M'), 'title2' :", "send_mail(DEFAULT_MAIL, user.username, user.email, event1, event2) def send_mail(from_mail, username, to_mails, event1,", "API_KEY = '<KEY>' API_KEY_ID = '<KEY>' ENCODING = \"utf-8\" DEFAULT_MAIL=\"<EMAIL>\"", "event1, event2) def send_mail(from_mail, username, to_mails, event1, event2): message =", "import SendGridAPIClient from sendgrid.helpers.mail import Mail, Substitution API_KEY = '<KEY>'", "response = client.send(message) code = response.status_code print('after') was_successful = lambda", "to_emails=to_mails ) message.dynamic_template_data = { 'name': username, 'title1' : event1.title,", "in (2, 3) if not was_successful(code): raise Exception(\"Couldn't send e-mail:", "= SendGridAPIClient(API_KEY) response = client.send(message) code = response.status_code print('after') was_successful", ") message.dynamic_template_data = { 'name': username, 'title1' : event1.title, 'src1'", "\"https://develop-dot-autonomus.appspot.com/events/details?event_id=\" + urlsafe def send_newsletter(users, event1, event2): for user in", "= lambda ret_code: ret_code // 100 in (2, 3) if", "<gh_stars>1-10 from sendgrid import SendGridAPIClient from sendgrid.helpers.mail import Mail, Substitution", "SendGridAPIClient(API_KEY) response = client.send(message) code = response.status_code print('after') was_successful =", "= client.send(message) code = response.status_code print('after') was_successful = lambda ret_code:", "= Mail( from_email=from_mail, to_emails=to_mails ) message.dynamic_template_data = { 'name': username,", ": event1.title, 'src1' : link(event1.urlsafe), 'loc1': event1.location, 'date1': event1.date.strftime('%d-%m-%Y %H:%M'),", "= \"utf-8\" DEFAULT_MAIL=\"<EMAIL>\" def link(urlsafe): return \"https://develop-dot-autonomus.appspot.com/events/details?event_id=\" + urlsafe def", "ret_code: ret_code // 100 in (2, 3) if not was_successful(code):", "if not was_successful(code): raise Exception(\"Couldn't send e-mail: {} {}\".format(code, response.body))", "response.status_code print('after') was_successful = lambda ret_code: ret_code // 100 in", "SendGridAPIClient from sendgrid.helpers.mail import Mail, Substitution API_KEY = '<KEY>' API_KEY_ID", "from sendgrid.helpers.mail import Mail, Substitution API_KEY = '<KEY>' API_KEY_ID =", "send_mail(from_mail, username, to_mails, event1, event2): message = Mail( from_email=from_mail, to_emails=to_mails", "return \"https://develop-dot-autonomus.appspot.com/events/details?event_id=\" + urlsafe def send_newsletter(users, event1, event2): for user", "ret_code // 100 in (2, 3) if not was_successful(code): raise", "{ 'name': username, 'title1' : event1.title, 'src1' : link(event1.urlsafe), 'loc1':", "100 in (2, 3) if not was_successful(code): raise Exception(\"Couldn't send", "for user in users: send_mail(DEFAULT_MAIL, user.username, user.email, event1, event2) def", "event2): for user in users: send_mail(DEFAULT_MAIL, user.username, user.email, event1, event2)", "'name': username, 'title1' : event1.title, 'src1' : link(event1.urlsafe), 'loc1': event1.location,", "link(urlsafe): return \"https://develop-dot-autonomus.appspot.com/events/details?event_id=\" + urlsafe def send_newsletter(users, event1, event2): for", "'<KEY>' ENCODING = \"utf-8\" DEFAULT_MAIL=\"<EMAIL>\" def link(urlsafe): return \"https://develop-dot-autonomus.appspot.com/events/details?event_id=\" +", "'date1': event1.date.strftime('%d-%m-%Y %H:%M'), 'title2' : event2.title, 'src2' : link(event2.urlsafe), 'loc2':", "'loc2': event2.location, 'date2': event2.date.strftime('%d-%m-%Y %H:%M') } print('before') message.template_id = 'd-6607926b2aba4f8fba984dccdaa9ece6'", "API_KEY_ID = '<KEY>' ENCODING = \"utf-8\" DEFAULT_MAIL=\"<EMAIL>\" def link(urlsafe): return", "'title2' : event2.title, 'src2' : link(event2.urlsafe), 'loc2': event2.location, 'date2': event2.date.strftime('%d-%m-%Y", "print('after') was_successful = lambda ret_code: ret_code // 100 in (2,", "from_email=from_mail, to_emails=to_mails ) message.dynamic_template_data = { 'name': username, 'title1' :", "def send_mail(from_mail, username, to_mails, event1, event2): message = Mail( from_email=from_mail,", "Mail, Substitution API_KEY = '<KEY>' API_KEY_ID = '<KEY>' ENCODING =", "message.dynamic_template_data = { 'name': username, 'title1' : event1.title, 'src1' :", "event2): message = Mail( from_email=from_mail, to_emails=to_mails ) message.dynamic_template_data = {", "3) if not was_successful(code): raise Exception(\"Couldn't send e-mail: {} {}\".format(code,", "event2.date.strftime('%d-%m-%Y %H:%M') } print('before') message.template_id = 'd-6607926b2aba4f8fba984dccdaa9ece6' client = SendGridAPIClient(API_KEY)", "user in users: send_mail(DEFAULT_MAIL, user.username, user.email, event1, event2) def send_mail(from_mail,", "'d-6607926b2aba4f8fba984dccdaa9ece6' client = SendGridAPIClient(API_KEY) response = client.send(message) code = response.status_code", "'date2': event2.date.strftime('%d-%m-%Y %H:%M') } print('before') message.template_id = 'd-6607926b2aba4f8fba984dccdaa9ece6' client =", "// 100 in (2, 3) if not was_successful(code): raise Exception(\"Couldn't", "client.send(message) code = response.status_code print('after') was_successful = lambda ret_code: ret_code", "send_newsletter(users, event1, event2): for user in users: send_mail(DEFAULT_MAIL, user.username, user.email,", "ENCODING = \"utf-8\" DEFAULT_MAIL=\"<EMAIL>\" def link(urlsafe): return \"https://develop-dot-autonomus.appspot.com/events/details?event_id=\" + urlsafe", "%H:%M') } print('before') message.template_id = 'd-6607926b2aba4f8fba984dccdaa9ece6' client = SendGridAPIClient(API_KEY) response", "from sendgrid import SendGridAPIClient from sendgrid.helpers.mail import Mail, Substitution API_KEY", "= { 'name': username, 'title1' : event1.title, 'src1' : link(event1.urlsafe),", "DEFAULT_MAIL=\"<EMAIL>\" def link(urlsafe): return \"https://develop-dot-autonomus.appspot.com/events/details?event_id=\" + urlsafe def send_newsletter(users, event1,", "sendgrid.helpers.mail import Mail, Substitution API_KEY = '<KEY>' API_KEY_ID = '<KEY>'", "message.template_id = 'd-6607926b2aba4f8fba984dccdaa9ece6' client = SendGridAPIClient(API_KEY) response = client.send(message) code", "event1.date.strftime('%d-%m-%Y %H:%M'), 'title2' : event2.title, 'src2' : link(event2.urlsafe), 'loc2': event2.location,", "= '<KEY>' ENCODING = \"utf-8\" DEFAULT_MAIL=\"<EMAIL>\" def link(urlsafe): return \"https://develop-dot-autonomus.appspot.com/events/details?event_id=\"", "\"utf-8\" DEFAULT_MAIL=\"<EMAIL>\" def link(urlsafe): return \"https://develop-dot-autonomus.appspot.com/events/details?event_id=\" + urlsafe def send_newsletter(users,", ": event2.title, 'src2' : link(event2.urlsafe), 'loc2': event2.location, 'date2': event2.date.strftime('%d-%m-%Y %H:%M')", "%H:%M'), 'title2' : event2.title, 'src2' : link(event2.urlsafe), 'loc2': event2.location, 'date2':", "sendgrid import SendGridAPIClient from sendgrid.helpers.mail import Mail, Substitution API_KEY =", "user.username, user.email, event1, event2) def send_mail(from_mail, username, to_mails, event1, event2):", "'title1' : event1.title, 'src1' : link(event1.urlsafe), 'loc1': event1.location, 'date1': event1.date.strftime('%d-%m-%Y", "code = response.status_code print('after') was_successful = lambda ret_code: ret_code //", "message = Mail( from_email=from_mail, to_emails=to_mails ) message.dynamic_template_data = { 'name':", "} print('before') message.template_id = 'd-6607926b2aba4f8fba984dccdaa9ece6' client = SendGridAPIClient(API_KEY) response =", "= 'd-6607926b2aba4f8fba984dccdaa9ece6' client = SendGridAPIClient(API_KEY) response = client.send(message) code =", "= '<KEY>' API_KEY_ID = '<KEY>' ENCODING = \"utf-8\" DEFAULT_MAIL=\"<EMAIL>\" def", "event2.title, 'src2' : link(event2.urlsafe), 'loc2': event2.location, 'date2': event2.date.strftime('%d-%m-%Y %H:%M') }", "def send_newsletter(users, event1, event2): for user in users: send_mail(DEFAULT_MAIL, user.username,", "import Mail, Substitution API_KEY = '<KEY>' API_KEY_ID = '<KEY>' ENCODING", "was_successful = lambda ret_code: ret_code // 100 in (2, 3)", "event2.location, 'date2': event2.date.strftime('%d-%m-%Y %H:%M') } print('before') message.template_id = 'd-6607926b2aba4f8fba984dccdaa9ece6' client", "(2, 3) if not was_successful(code): raise Exception(\"Couldn't send e-mail: {}" ]
[ "heatmap if necessary and modifying the aspect ratio. Does not", "X. If a user passes feature names in, those features", "X, y=None, **kwargs): \"\"\" Calls the internal `transform` method of", "features=None, classes=None, scale=True, projection=2, proj_features=False, colors=None, colormap=None, alpha=0.75, random_state=None, colorbar=True,", "If None is passed in, the current axes will be", "compatibility if make_axes_locatable is None: raise YellowbrickValueError( ( \"heatmap requires", "figure on. If None is passed in, the current axes", ") @property def uax(self): \"\"\" The axes of the colorbar,", "YellowbrickValueError(\"Projection dimensions must be either 2 or 3\") return self.ax", "Variable scaling can be controlled using the ``scale`` argument. Parameters", "feature value to the component. Parameters ---------- Xp : array-like", "used as an index to access or modify data in", "about the magnitude of each feature in the pricipal components.", "be passed among all layout calls. \"\"\" # Ensure matplotlib", "This is the colorbar for heatmap and not for the", "super class ensures that a colorbar is drawn when target", "components. This is primarily used to draw the biplots. classes_", "then the colors are treated as a cycle. colormap :", "Axes object Returns the axes that the scatter plot was", "to show the magnitude of each feature value to the", "# Data Parameters self.scale = scale self.proj_features = proj_features #", "the principal components. Also draws a colorbar for readability purpose.", "visualizer visualizer = PCA( ax=ax, features=features, scale=scale, projection=projection, proj_features=proj_features, colors=colors,", "scatterplot of points that represented the decomposition, `pca_features_`, of the", "to create the individual colors. In the discrete case it", "bool, default: True Boolean that indicates if user wants to", "plt.colorbar( im, cax=self.uax, orientation=\"horizontal\", ticks=[self.pca_components_.min(), 0, self.pca_components_.max()], ) return self.ax", "max_z = max(Xp[:, 1]) for i in range(self.pca_components_.shape[1]): self.ax.plot( [0,", ") # Data Parameters self.scale = scale self.proj_features = proj_features", "show=True, **kwargs ): \"\"\" Produce a two or three dimensional", "values in the target. Only available if the target type", "Boolean that indicates if the user wants to project the", "1.05, self.features_[i], color=\"r\", ) else: raise YellowbrickValueError(\"Projection dimensions must be", "colors=None, colormap=None, alpha=0.75, random_state=None, colorbar=True, heatmap=False, **kwargs ): super(PCA, self).__init__(", "alpha # Visual Parameters self.heatmap = heatmap self._uax, self._lax =", "2) or (n, 3) The matrix produced by the ``transform()``", "that define the discrete values in the target. Only available", "method of the Yellowbrick visualizer, finally returning a new array", "class values. ax : matplotlib Axes, default: None The axes", "Call super fit to compute features, classes, colors, etc. super(PCA,", "The axes of the heatmap below scatter plot. \"\"\" if", "method. y : array-like of shape (n,), optional The target,", "plot. This is the colorbar for heatmap and not for", "to the visualizer, otherwise one will be created using the", "passed in, the current axes will be used (or generated", "is larger than 500x500 and the number of components to", "**kwargs ): super(PCA, self).__init__( ax=ax, features=features, classes=classes, colors=colors, colormap=colormap, projection=projection,", "**kwargs): \"\"\" Draws the title, labels, legends, heatmap, and colorbar", "3d plots as they do not have permit axes #", ") self.alpha = alpha # Visual Parameters self.heatmap = heatmap", "iris.target >>> visualizer = PCA() >>> visualizer.fit_transform(X, y) >>> visualizer.show()", "``X``. Next calls the ``draw`` method of the Yellowbrick visualizer,", "into either 2 or 3 dimensions. If 2 dimensions are", ": int or string, default: 2 The number of axes", "visualization with PCA. \"\"\" ########################################################################## ## Imports ########################################################################## # NOTE:", "classes, colors, etc. super(PCA, self).fit(X=X, y=y, **kwargs) self.pca_transformer.fit(X) self.pca_components_ =", "of n instances with m features. y : ndarray or", "len(self.features_))) self.lax.set_xticklabels([]) # Makes the labels centered. self.lax.set_xticks(np.arange(0, len(self.features_)), minor=True)", "proj_features=proj_features, colors=colors, colormap=colormap, alpha=alpha, random_state=random_state, colorbar=colorbar, heatmap=heatmap, **kwargs ) #", "as they do not have permit axes # division. if", "is ``False`` or ``None``. Parameters ---------- divider: AxesDivider An AxesDivider", "self.ax.set_zlabel(\"$PC_3$\") if self.heatmap == True: self.lax.set_xticks(np.arange(-0.5, len(self.features_))) self.lax.set_xticklabels([]) # Makes", "target. Only available if the target type is discrete. This", "on. \"\"\" x_vector = self.pca_components_[0] y_vector = self.pca_components_[1] max_x =", "labels centered. self.lax.set_xticks(np.arange(0, len(self.features_)), minor=True) self.lax.set_xticklabels( self.features_, rotation=90, fontsize=12, minor=True", "fit(self, X, y=None, **kwargs): \"\"\" Fits the PCA transformer, transforms", "scale=True, projection=2, proj_features=False, colors=None, colormap=None, alpha=0.75, random_state=None, colorbar=True, heatmap=False, show=True,", "# -*- coding: utf-8 -*- # yellowbrick.features.pca # Decomposition based", "------- self : visualizer Returns self for use in Pipelines.", "primarily used to draw the biplots. classes_ : ndarray, shape", "common practice to scale the data array ``X`` before applying", "if self.projection == 3 and self.heatmap: raise YellowbrickValueError( \"heatmap and", "colormap=None, alpha=0.75, random_state=None, colorbar=True, heatmap=False, show=True, **kwargs ): \"\"\" Produce", "of the smallest dimension of the data, then the more", "in y. Note that the length of this list must", "projected space. If True the plot will be similar to", "== 3: z_vector = self.pca_components_[2] max_z = max(Xp[:, 1]) for", "visualizer.finalize() # Returns the visualizer object. return visualizer # Alias", "used in the visualizer that can be used as an", "enough colors per class are specified then the colors are", "self.proj_features = proj_features # Create the PCA transformer self.pca_transformer =", "the minimum and maximum values in the target. Only available", "range(self.pca_components_.shape[1]): self.ax.plot( [0, x_vector[i] * max_x], [0, y_vector[i] * max_y],", "transformed space. Parameters ---------- Xp : array-like of shape (n,", "= iris.data >>> y = iris.target >>> pca_decomposition(X, y, colors=['r',", "is drawn when target is # continuous. super(PCA, self).layout(divider) if", "argument. Parameters ---------- ax : matplotlib Axes, default: None The", "of scatter plot. This is the colorbar for heatmap and", "colors per class are specified then the colors are treated", "projection features in transformed space. self._draw_projection_features(Xp, y) if self.projection ==", "2)) self.lax.set_yticklabels([\"$PC_1$\", \"$PC_2$\"], va=\"bottom\", fontsize=10) self.fig.tight_layout() ########################################################################## ## Quick Method", ": visualizer Returns self for use in Pipelines. \"\"\" #", "None The names of the features specified by the columns", "= max(Xp[:, 1]) if self.projection == 2: for i in", "features=features, scale=scale, projection=projection, proj_features=proj_features, colors=colors, colormap=colormap, alpha=alpha, random_state=random_state, colorbar=colorbar, heatmap=heatmap,", "#615 spec im = self.lax.imshow( self.pca_components_, interpolation=\"none\", cmap=self.colormap, aspect=\"auto\", )", "dimension of the data, then the more efficient `randomized` solver", "list, default: None The names of the features specified by", "is None: raise AttributeError(\"This visualizer does not have an axes", "3\") return self.ax def finalize(self, **kwargs): \"\"\" Draws the title,", "turn calls ``plt.show()`` however you cannot call ``plt.savefig`` from this", "\"\"\" Plots a scatterplot of points that represented the decomposition,", "* 1.05, self.features_[i], color=\"r\", ) elif self.projection == 3: z_vector", "colorbar(for heatmap). if self._uax is None: self._uax = divider.append_axes(\"bottom\", size=\"10%\",", "heatmap=False, **kwargs ): super(PCA, self).__init__( ax=ax, features=features, classes=classes, colors=colors, colormap=colormap,", "or three dimensional principal component plot of the data array", "not compatible with 3d projections\" ) @property def uax(self): \"\"\"", "visualizer = PCA( ax=ax, features=features, scale=scale, projection=projection, proj_features=proj_features, colors=colors, colormap=colormap,", "alpha=0.75, random_state=None, colorbar=True, heatmap=False, show=True, **kwargs ): \"\"\" Produce a", "True: self.lax.set_xticks(np.arange(-0.5, len(self.features_))) self.lax.set_xticklabels([]) # Makes the labels centered. self.lax.set_xticks(np.arange(0,", "to color each instance according to its class in the", "if the classes are a different type. features_ : ndarray,", "\"\"\" The axes of the colorbar, bottom of scatter plot.", "Author: <NAME> # Author: <NAME> # Created: Tue May 23", ">>> y = iris.target >>> visualizer = PCA() >>> visualizer.fit_transform(X,", "necessary and modifying the aspect ratio. Does not modify the", "indicates if the user wants to project the features in", "optional The target, used to specify the colors of the", "plot will be similar to a biplot. colors : list", "Copyright (C) 2017 The scikit-yb developers # For license information,", "bool, default: True If True, calls ``show()``, which in turn", "as an index to access or modify data in X.", "\"\"\" Fits the PCA transformer, transforms the data in X,", "``X`` projected onto its largest sequential principal components. It is", "keyword arguments. \"\"\" super(PCA, self).finalize() self.ax.set_title(\"Principal Component Plot\") self.ax.set_xlabel(\"$PC_1$\") self.ax.set_ylabel(\"$PC_2$\")", "the axes that the scatter plot was drawn on. \"\"\"", "class labels for each class in y, ordered by sorted", ") elif self.projection == 3: z_vector = self.pca_components_[2] max_z =", "axes for the heatmap if necessary and modifying the aspect", "of shape n x m Returns a new array-like object", "compute features, classes, colors, etc. super(PCA, self).fit(X=X, y=y, **kwargs) self.pca_transformer.fit(X)", "self.fig.tight_layout() ########################################################################## ## Quick Method ########################################################################## def pca_decomposition( X, y=None,", "controlled using the ``scale`` argument. Parameters ---------- ax : matplotlib", "finally returning a new array of transformed features of shape", "**kwargs ) # Data Parameters self.scale = scale self.proj_features =", "np import matplotlib.pyplot as plt from mpl_toolkits.axes_grid1 import make_axes_locatable from", "an exception is raised. This parameter is only used in", "`X`, projected into either 2 or 3 dimensions. If 2", "encoder for the legend, identifying integer classes or renaming string", "or class values. Returns ------- self : visualizer Returns self", "except NotFittedError: raise NotFitted.from_estimator(self, \"transform\") def draw(self, Xp, y): \"\"\"", "with PCA. # # Author: <NAME> # Author: <NAME> #", "readability purpose. The heatmap is accessible using lax property and", ": dict Keyword arguments that are passed to the base", "values. Returns ------- Xp : ndarray or DataFrame of shape", ") self.lax.set_yticks(np.arange(0.5, 2)) self.lax.set_yticklabels([\"$PC_1$\", \"$PC_2$\"], va=\"bottom\", fontsize=10) self.fig.tight_layout() ########################################################################## ##", "y) visualizer.transform(X, y) if show: visualizer.show() else: visualizer.finalize() # Returns", "wants to project the features in the projected space. If", "or DataFrame of shape n x m A matrix of", "Next calls the ``draw`` method of the Yellowbrick visualizer, finally", "data in X, then draws the decomposition in either 2D", "cannot call ``plt.savefig`` from this signature, nor ``clear_figure``. If False,", "n x m A matrix of n instances with m", "super(PCA, self).finalize() self.ax.set_title(\"Principal Component Plot\") self.ax.set_xlabel(\"$PC_1$\") self.ax.set_ylabel(\"$PC_2$\") if self.projection ==", "classes or renaming string labels. If omitted, the class labels", "<EMAIL> $ \"\"\" Decomposition based feature visualization with PCA. \"\"\"", "if the user wants to project the features in the", "heatmap showing contribution of each feature in the principal components.", "X = iris.data >>> y = iris.target >>> pca_decomposition(X, y,", "used or just simply the indices of the data array.", "datasets.load_iris() >>> X = iris.data >>> y = iris.target >>>", "space. Parameters ---------- Xp : array-like of shape (n, 2)", "wants to scale data. projection : int or string, default:", "None: raise YellowbrickValueError( ( \"heatmap requires matplotlib 2.0.2 or greater", "heatmap below scatter plot. \"\"\" if self._lax is None: raise", "The class labels that define the discrete values in the", "bool, default: False Add a heatmap showing contribution of each", "if necessary and modifying the aspect ratio. Does not modify", "the labels centered. self.lax.set_xticks(np.arange(0, len(self.features_)), minor=True) self.lax.set_xticklabels( self.features_, rotation=90, fontsize=12,", "'b'], projection=3) \"\"\" # Instantiate the visualizer visualizer = PCA(", "the ``draw`` method of the Yellowbrick visualizer, finally returning a", "max_y * 1.05, z_vector[i] * max_z * 1.05, self.features_[i], color=\"r\",", "super fit to compute features, classes, colors, etc. super(PCA, self).fit(X=X,", "yellowbrick.style import palettes from yellowbrick.features.projection import ProjectionVisualizer from yellowbrick.exceptions import", "[ (\"scale\", StandardScaler(with_std=self.scale)), (\"pca\", PCATransformer(self.projection, random_state=random_state)), ] ) self.alpha =", "array ``X`` before applying a PC decomposition. Variable scaling can", "visualizer = PCA() >>> visualizer.fit_transform(X, y) >>> visualizer.show() \"\"\" def", "y, colors=['r', 'g', 'b'], projection=3) \"\"\" # Instantiate the visualizer", ">>> visualizer.show() \"\"\" def __init__( self, ax=None, features=None, classes=None, scale=True,", "not modify the axes or the layout if ``self.heatmap`` is", "new array of transformed features of shape ``(len(X), projection)``. Parameters", "``draw`` method of the Yellowbrick visualizer, finally returning a new", "in range(self.pca_components_.shape[1]): self.ax.plot( [0, x_vector[i] * max_x], [0, y_vector[i] *", "principal component plot of the data array ``X`` projected onto", "visualizer does not have an axes for colorbar\") return self._uax", "AxesDivider to be passed among all layout calls. \"\"\" #", "the Yellowbrick visualizer, finally returning a new array of transformed", "data array ``X`` projected onto its largest sequential principal components.", "in Pipelines. \"\"\" # Call super fit to compute features,", "n An array or series of target or class values.", "plot. \"\"\" if self._lax is None: raise AttributeError(\"This visualizer does", "available if the target type is discrete. This is guaranteed", ">>> X = iris.data >>> y = iris.target >>> pca_decomposition(X,", "random_state=random_state)), ] ) self.alpha = alpha # Visual Parameters self.heatmap", "= heatmap self._uax, self._lax = None, None # No heatmap", "or None, optional (default None) This parameter sets the random", "match the number of unique values in y, otherwise an", "this list must match the number of columns in X,", "default: None A single color to plot all instances as", "Plot\") self.ax.set_xlabel(\"$PC_1$\") self.ax.set_ylabel(\"$PC_2$\") if self.projection == 3: self.ax.set_zlabel(\"$PC_3$\") if self.heatmap", "default: True If the target_type is \"continous\" draw a colorbar", "and not for the scatter plot. \"\"\" if self._uax is", "returning a new array of transformed features of shape ``(len(X),", "2 The number of axes to project into, either 2d", "this signature, nor ``clear_figure``. If False, simply calls ``finalize()`` kwargs", "shape n x m A matrix of n instances with", "3D projection import numpy as np import matplotlib.pyplot as plt", "Returns the visualizer object. return visualizer # Alias for PCA", "the ``transform()`` method. y : array-like of shape (n,), optional", "y = iris.target >>> visualizer = PCA() >>> visualizer.fit_transform(X, y)", "of the original features, `X`, projected into either 2 or", "colorbar for readability purpose. The heatmap is accessible using lax", "heatmap if divider is None: divider = make_axes_locatable(self.ax) # Call", "an axes for colorbar\") return self._uax @property def lax(self): \"\"\"", "projection import numpy as np import matplotlib.pyplot as plt from", "the colors are treated as a cycle. colormap : string", "# # Copyright (C) 2017 The scikit-yb developers # For", "self.projection == 3 and self.heatmap: raise YellowbrickValueError( \"heatmap and colorbar", "transformed space. self._draw_projection_features(Xp, y) if self.projection == 2: if self.heatmap:", "axes that the scatter plot was drawn on. \"\"\" x_vector", "If the input X is larger than 500x500 and the", "created using the current figure. proj_features : bool, default: False", "internal `transform` method of the scikit-learn PCA transformer, which performs", "PCA() >>> visualizer.fit_transform(X, y) >>> visualizer.show() \"\"\" def __init__( self,", "``X`` before applying a PC decomposition. Variable scaling can be", "= alpha # Visual Parameters self.heatmap = heatmap self._uax, self._lax", "feature in the principal components. Also draws a colorbar for", "Keyword arguments that are passed to the base class and", "define the discrete values in the target. Only available if", "to super draw which draws the scatter plot. super(PCA, self).draw(Xp,", "use in Pipelines. \"\"\" # Call super fit to compute", "for use in Pipelines. \"\"\" # Call super fit to", "in the visualizer that can be used as an index", "is # continuous. super(PCA, self).layout(divider) if self.heatmap: # Axes for", "self.lax.set_yticks(np.arange(0.5, 2)) self.lax.set_yticklabels([\"$PC_1$\", \"$PC_2$\"], va=\"bottom\", fontsize=10) self.fig.tight_layout() ########################################################################## ## Quick", "either 2 or 3\") return self.ax def finalize(self, **kwargs): \"\"\"", ") return self.ax def _draw_projection_features(self, Xp, y): \"\"\" Draw the", "PCATransformer from sklearn.preprocessing import StandardScaler from sklearn.exceptions import NotFittedError ##########################################################################", "self, ax=None, features=None, classes=None, scale=True, projection=2, proj_features=False, colors=None, colormap=None, alpha=0.75,", "self._uax, self._lax = None, None # No heatmap can be", "(calls draw) visualizer.fit(X, y) visualizer.transform(X, y) if show: visualizer.show() else:", ": list, default: None The names of the features specified", "self.features_[i], color=\"r\", ) else: raise YellowbrickValueError(\"Projection dimensions must be either", "2 dimensions are selected, a colorbar and heatmap can also", "color=\"r\", ) elif self.projection == 3: z_vector = self.pca_components_[2] max_z", "* max_x, dy=y_vector[i] * max_y, color=\"r\", head_width=0.05, width=0.005, ) self.ax.text(", "or used in the visualizer that can be used as", "decomposition, `pca_features_`, of the original features, `X`, projected into either", "23 18:34:27 2017 -0400 # # Copyright (C) 2017 The", "**kwargs ) # Fit and transform the visualizer (calls draw)", "array projected onto its largest sequential principal components. It is", "data, then the more efficient `randomized` solver is enabled. colorbar", "the visualizer visualizer = PCA( ax=ax, features=features, scale=scale, projection=projection, proj_features=proj_features,", "in, those features are used. Otherwise the columns of a", "``(len(X), projection)``. Parameters ---------- X : ndarray or DataFrame of", "if self.heatmap: # Axes for colorbar(for heatmap). if self._uax is", "class in the discrete case or as an ordered colormap", "discrete case it is used to compute the number of", "to the right of the scatter plot. The colobar axes", "Does not modify the axes or the layout if ``self.heatmap``", "matplotlib 2.0.2 or greater \" \"please upgrade matplotlib or set", "be created using the current figure. proj_features : bool, default:", "None The axes to plot the figure on. If None", "show the magnitude of each feature value to the component.", "according to its class in the discrete case or as", "create a sequential color map based on the range of", ": ndarray, shape (n_features,) The names of the features discovered", "Call to super class ensures that a colorbar is drawn", "user wants to project the features in the projected space.", "the base class and may influence the visualization as defined", "= iris.data >>> y = iris.target >>> visualizer = PCA()", "it is used to compute the number of colors needed", "------- Xp : ndarray or DataFrame of shape n x", "Produce a two or three dimensional principal component plot of", "-*- # yellowbrick.features.pca # Decomposition based feature visualization with PCA.", "on. \"\"\" # Call to super draw which draws the", "X = iris.data >>> y = iris.target >>> visualizer =", "self._lax = None, None # No heatmap can be drawn", "\"please upgrade matplotlib or set heatmap=False on the visualizer\" )", "calls ``show()``, which in turn calls ``plt.show()`` however you cannot", "compatible with 3d projections\" ) @property def uax(self): \"\"\" The", "or a list of colors to color each instance according", "3d projections\" ) @property def uax(self): \"\"\" The axes of", "for heatmap\") return self._lax def layout(self, divider=None): \"\"\" Creates the", "for heatmap if self._lax is None: self._lax = divider.append_axes(\"bottom\", size=\"15%\",", "minor=True ) self.lax.set_yticks(np.arange(0.5, 2)) self.lax.set_yticklabels([\"$PC_1$\", \"$PC_2$\"], va=\"bottom\", fontsize=10) self.fig.tight_layout() ##########################################################################", "alpha=alpha, random_state=random_state, colorbar=colorbar, heatmap=heatmap, **kwargs ) # Fit and transform", "instance or None, optional (default None) This parameter sets the", "uax property. show : bool, default: True If True, calls", "colors of the points. Returns ------- self.ax : matplotlib Axes", "PCA( ax=ax, features=features, scale=scale, projection=projection, proj_features=proj_features, colors=colors, colormap=colormap, alpha=alpha, random_state=random_state,", "heatmap self._uax, self._lax = None, None # No heatmap can", "matplotlib, please ensure a 3d axes is passed to the", "self.pca_transformer = Pipeline( [ (\"scale\", StandardScaler(with_std=self.scale)), (\"pca\", PCATransformer(self.projection, random_state=random_state)), ]", "features in transformed space. self._draw_projection_features(Xp, y) if self.projection == 2:", "axes that the scatter plot was drawn on. \"\"\" #", "used to specify the colors of the points. Returns -------", "AttributeError(\"This visualizer does not have an axes for colorbar\") return", "features, classes, colors, etc. super(PCA, self).fit(X=X, y=y, **kwargs) self.pca_transformer.fit(X) self.pca_components_", "unique values in y, otherwise an exception is raised. This", "be used (or generated if required). features : list, default:", "Only available if the target type is continuous. Examples --------", "= palettes.DEFAULT_SEQUENCE # TODO: change to pcolormesh instead of imshow", "max_y * 1.05, self.features_[i], color=\"r\", ) elif self.projection == 3:", "\"heatmap requires matplotlib 2.0.2 or greater \" \"please upgrade matplotlib", "and self.heatmap: raise YellowbrickValueError( \"heatmap and colorbar are not compatible", "does not have an axes for colorbar\") return self._uax @property", "self.pca_components_[1] max_x = max(Xp[:, 0]) max_y = max(Xp[:, 1]) if", "not have permit axes # division. if self.projection == 3", "* max_x * 1.05, y_vector[i] * max_y * 1.05, self.features_[i],", "from yellowbrick.style import palettes from yellowbrick.features.projection import ProjectionVisualizer from yellowbrick.exceptions", "Xp, y): \"\"\" Draw the projection of features in the", "# Created: Tue May 23 18:34:27 2017 -0400 # #", "ndarray, shape (n_features, n_components) This tells about the magnitude of", "m A matrix of n instances with m features. y", "map based on the range of the target. alpha :", "``scale`` argument. Parameters ---------- X : ndarray or DataFrame of", "2 or 3\") return self.ax def finalize(self, **kwargs): \"\"\" Draws", "arguments. \"\"\" super(PCA, self).finalize() self.ax.set_title(\"Principal Component Plot\") self.ax.set_xlabel(\"$PC_1$\") self.ax.set_ylabel(\"$PC_2$\") if", "visible. random_state : int, RandomState instance or None, optional (default", "shape ``(len(X), projection)``. Parameters ---------- X : ndarray or DataFrame", "Draws the title, labels, legends, heatmap, and colorbar as specified", "on the range of the target. alpha : float, default:", "the columns of a DataFrame are used or just simply", "information, see LICENSE.txt # # ID: pca.py [] <EMAIL> $", "m Returns a new array-like object of transformed features of", "shape ``(len(X), projection)``. \"\"\" try: Xp = self.pca_transformer.transform(X) self.draw(Xp, y)", "target is # continuous. super(PCA, self).layout(divider) if self.heatmap: # Axes", "`transform` method of the scikit-learn PCA transformer, which performs a", "continuous. Examples -------- >>> from sklearn import datasets >>> iris", "def uax(self): \"\"\" The axes of the colorbar, bottom of", "utf-8 -*- # yellowbrick.features.pca # Decomposition based feature visualization with", "projection of features in the transformed space. Parameters ---------- Xp", "= PCA( ax=ax, features=features, scale=scale, projection=projection, proj_features=proj_features, colors=colors, colormap=colormap, alpha=alpha,", "max_y = max(Xp[:, 1]) if self.projection == 2: for i", "features of shape ``(len(X), projection)``. Parameters ---------- X : ndarray", "proj_features=False, colors=None, colormap=None, alpha=0.75, random_state=None, colorbar=True, heatmap=False, show=True, **kwargs ):", "version compatibility if make_axes_locatable is None: raise YellowbrickValueError( ( \"heatmap", "strings even if the classes are a different type. features_", "available if the target type is continuous. Examples -------- >>>", "heatmap is accessible using lax property and colorbar using uax", "YellowbrickValueError( ( \"heatmap requires matplotlib 2.0.2 or greater \" \"please", "new axes for the colorbar and heatmap if divider is", "is \"continous\" draw a colorbar to the right of the", "dimensions. If 2 dimensions are selected, a colorbar and heatmap", "if self.heatmap == True: self.lax.set_xticks(np.arange(-0.5, len(self.features_))) self.lax.set_xticklabels([]) # Makes the", "lax property and colorbar using uax property. kwargs : dict", "set heatmap=False on the visualizer\" ) ) # Create the", "int or string, default: 2 The number of axes to", "classes are a different type. features_ : ndarray, shape (n_features,)", "PCA. \"\"\" ########################################################################## ## Imports ########################################################################## # NOTE: must import", "a colorbar and heatmap can also be optionally included to", "y=None, **kwargs): \"\"\" Fits the PCA transformer, transforms the data", "= scale self.proj_features = proj_features # Create the PCA transformer", "matplotlib Axes, default: None The axes to plot the figure", "ignored otherwise. scale : bool, default: True Boolean that indicates", "of colors needed for each class and in the continuous", "the target. Only available if the target type is continuous.", "self._draw_projection_features(Xp, y) if self.projection == 2: if self.heatmap: if not", "alpha=0.75, random_state=None, colorbar=True, heatmap=False, **kwargs ): super(PCA, self).__init__( ax=ax, features=features,", "with 3d plots as they do not have permit axes", "########################################################################## # NOTE: must import mplot3d to load the 3D", "the data in X, then draws the decomposition in either", "y_vector[i] * max_y * 1.05, z_vector[i] * max_z * 1.05,", "uax property. kwargs : dict Keyword arguments that are passed", "Parameters ---------- Xp : array-like of shape (n, 2) or", "= iris.target >>> visualizer = PCA() >>> visualizer.fit_transform(X, y) >>>", "self.heatmap == True: self.lax.set_xticks(np.arange(-0.5, len(self.features_))) self.lax.set_xticklabels([]) # Makes the labels", "* 1.05, self.features_[i], color=\"r\", ) else: raise YellowbrickValueError(\"Projection dimensions must", "the individual colors. In the discrete case it is used", "discrete values in the target. Only available if the target", "Yellowbrick visualizer, finally returning a new array of transformed features", "3: self.ax.set_zlabel(\"$PC_3$\") if self.heatmap == True: self.lax.set_xticks(np.arange(-0.5, len(self.features_))) self.lax.set_xticklabels([]) #", "the sequential case. If not enough colors per class are", "# Author: <NAME> # Author: <NAME> # Created: Tue May", "y=0, dx=x_vector[i] * max_x, dy=y_vector[i] * max_y, color=\"r\", head_width=0.05, width=0.005,", "as or a list of colors to color each instance", "If omitted, the class labels will be taken from the", "the magnitude of each feature in the pricipal components. This", "that represented the decomposition, `pca_features_`, of the original features, `X`,", "project into, either 2d or 3d. To plot 3d plots", "\"\"\" # Instantiate the visualizer visualizer = PCA( ax=ax, features=features,", "to project into, either 2d or 3d. To plot 3d", "components. It is common practice to scale the data array", "values. ax : matplotlib Axes, default: None The axes to", "state on this solver. If the input X is larger", "self.heatmap = heatmap self._uax, self._lax = None, None # No", "= self.pca_transformer.transform(X) self.draw(Xp, y) return Xp except NotFittedError: raise NotFitted.from_estimator(self,", "below scatter plot. \"\"\" if self._lax is None: raise AttributeError(\"This", "2D and 3D PCA Visualizer ########################################################################## class PCA(ProjectionVisualizer): \"\"\" Produce", "otherwise. scale : bool, default: True Boolean that indicates if", "None) This parameter sets the random state on this solver.", "1]) if self.projection == 2: for i in range(self.pca_components_.shape[1]): self.ax.arrow(", "is accessible using the cax property. heatmap : bool, default:", "or Series of length n An array or series of", "describes the minimum and maximum values in the target. Only", "and maximum values in the target. Only available if the", "different type. features_ : ndarray, shape (n_features,) The names of", "greater \" \"please upgrade matplotlib or set heatmap=False on the", "shape n x m Returns a new array-like object of", "modifying the aspect ratio. Does not modify the axes or", "the current axes will be used (or generated if required).", "bool, default: True If the target_type is \"continous\" draw a", "and the number of components to extract is lower than", "_draw_projection_features(self, Xp, y): \"\"\" Draw the projection of features in", "n x m Returns a new array-like object of transformed", "a two or three dimensional principal component plot of a", "ID: pca.py [] <EMAIL> $ \"\"\" Decomposition based feature visualization", "# Author: <NAME> # Created: Tue May 23 18:34:27 2017", "heatmap=False, show=True, **kwargs ): \"\"\" Produce a two or three", "to be strings even if the classes are a different", "== 2: for i in range(self.pca_components_.shape[1]): self.ax.arrow( x=0, y=0, dx=x_vector[i]", "length of this list must match the number of unique", "super(PCA, self).__init__( ax=ax, features=features, classes=classes, colors=colors, colormap=colormap, projection=projection, alpha=alpha, colorbar=colorbar,", "the scatter plot. super(PCA, self).draw(Xp, y) if self.proj_features: # Draws", "are specified then the colors are treated as a cycle.", "the class labels will be taken from the unique values", "transformer, which performs a dimensionality reduction on the input features", "is passed to the visualizer, otherwise one will be created", "default: True If True, calls ``show()``, which in turn calls", "(n_features, n_components) This tells about the magnitude of each feature", "the projected space. If True the plot will be similar", "Axes, default: None The axes to plot the figure on.", "be similar to a biplot. colors : list or tuple,", "object. return visualizer # Alias for PCA PCADecomposition = PCA", "``finalize()`` kwargs : dict Keyword arguments that are passed to", "``scale`` argument. Parameters ---------- ax : matplotlib Axes, default: None", "try: Xp = self.pca_transformer.transform(X) self.draw(Xp, y) return Xp except NotFittedError:", "# # ID: pca.py [] <EMAIL> $ \"\"\" Decomposition based", "scaling can be controlled using the ``scale`` argument. Parameters ----------", "property. kwargs : dict Keyword arguments that are passed to", "number of components to extract is lower than 80% of", "names of the features discovered or used in the visualizer", "'g', 'b'], projection=3) \"\"\" # Instantiate the visualizer visualizer =", "Boolean that indicates if user wants to scale data. projection", "labels for each class in y, ordered by sorted class", "len(self.features_)), minor=True) self.lax.set_xticklabels( self.features_, rotation=90, fontsize=12, minor=True ) self.lax.set_yticks(np.arange(0.5, 2))", "or series of target or class values. Returns ------- Xp", "Makes the labels centered. self.lax.set_xticks(np.arange(0, len(self.features_)), minor=True) self.lax.set_xticklabels( self.features_, rotation=90,", "if self.heatmap: if not self.colormap: self.colormap = palettes.DEFAULT_SEQUENCE # TODO:", "visualizer\" ) ) # Create the new axes for the", "passed among all layout calls. \"\"\" # Ensure matplotlib version", "by the ``transform()`` method. y : array-like of shape (n,),", "\"\"\" ########################################################################## ## Imports ########################################################################## # NOTE: must import mplot3d", "of transformed features of shape ``(len(X), projection)``. \"\"\" try: Xp", "each feature in the principal components. Also draws a colorbar", "Xp : ndarray or DataFrame of shape n x m", "that indicates if user wants to scale data. projection :", "make_axes_locatable(self.ax) # Call to super class ensures that a colorbar", "index to access or modify data in X. If a", "elif self.projection == 3: z_vector = self.pca_components_[2] max_z = max(Xp[:,", "be either 2 or 3\") return self.ax def finalize(self, **kwargs):", "the unique values in y. Note that the length of", "in the sequential case. If not enough colors per class", "input features ``X``. Next calls the ``draw`` method of the", "optionally included to show the magnitude of each feature value", "the features in the projected space. If True the plot", "Pipeline( [ (\"scale\", StandardScaler(with_std=self.scale)), (\"pca\", PCATransformer(self.projection, random_state=random_state)), ] ) self.alpha", "LICENSE.txt # # ID: pca.py [] <EMAIL> $ \"\"\" Decomposition", "Returns a new array-like object of transformed features of shape", "colorbar and heatmap can also be optionally included to show", "is None: self._lax = divider.append_axes(\"bottom\", size=\"15%\", pad=0.5) def fit(self, X,", "plot was drawn on. \"\"\" x_vector = self.pca_components_[0] y_vector =", "If False, simply calls ``finalize()`` kwargs : dict Keyword arguments", "PCA as PCATransformer from sklearn.preprocessing import StandardScaler from sklearn.exceptions import", "iris = datasets.load_iris() >>> X = iris.data >>> y =", "iris.data >>> y = iris.target >>> visualizer = PCA() >>>", "used to draw the biplots. classes_ : ndarray, shape (n_classes,)", "z_vector[i] * max_z * 1.05, self.features_[i], color=\"r\", ) else: raise", "axes to project into, either 2d or 3d. To plot", "tuple, default: None A single color to plot all instances", "the more efficient `randomized` solver is enabled. colorbar : bool,", "self.lax.set_xticklabels([]) # Makes the labels centered. self.lax.set_xticks(np.arange(0, len(self.features_)), minor=True) self.lax.set_xticklabels(", "data array projected onto its largest sequential principal components. It", "axes is passed to the visualizer, otherwise one will be", "all instances as or a list of colors to color", "taken from the unique values in y. Note that the", "raise YellowbrickValueError( \"heatmap and colorbar are not compatible with 3d", "False Boolean that indicates if the user wants to project", "**kwargs) self.pca_transformer.fit(X) self.pca_components_ = self.pca_transformer.named_steps[\"pca\"].components_ return self def transform(self, X,", "labels, legends, heatmap, and colorbar as specified by the keyword", "are not compatible with 3d projections\" ) @property def uax(self):", "500x500 and the number of components to extract is lower", "title, labels, legends, heatmap, and colorbar as specified by the", "range_ : (min y, max y) A tuple that describes", "The class labels for each class in y, ordered by", "import numpy as np import matplotlib.pyplot as plt from mpl_toolkits.axes_grid1", "of columns in X, otherwise an exception will be raised", "feature visualization with PCA. \"\"\" ########################################################################## ## Imports ########################################################################## #", "\"\"\" def __init__( self, ax=None, features=None, classes=None, scale=True, projection=2, proj_features=False,", "array-like of shape (n, 2) or (n, 3) The matrix", "self.heatmap: raise YellowbrickValueError( \"heatmap and colorbar are not compatible with", "False, simply calls ``finalize()`` kwargs : dict Keyword arguments that", "x m A matrix of n instances with m features.", "the points. Returns ------- self.ax : matplotlib Axes object Returns", "passed to the base class and may influence the visualization", "passes feature names in, those features are used. Otherwise the", "YellowbrickValueError( \"heatmap and colorbar are not compatible with 3d projections\"", "########################################################################## ## Quick Method ########################################################################## def pca_decomposition( X, y=None, ax=None,", "Calls the internal `transform` method of the scikit-learn PCA transformer,", "of each feature in the principal components. Also draws a", "can also be optionally included to show the magnitude of", ": list or tuple, default: None A single color to", "is only used in the discrete target type case and", "########################################################################## class PCA(ProjectionVisualizer): \"\"\" Produce a two or three dimensional", "the legend, identifying integer classes or renaming string labels. If", ": ndarray or DataFrame of shape n x m A", "# Axes for heatmap if self._lax is None: self._lax =", "2 or 3 dimensions. If 2 dimensions are selected, a", "ticks=[self.pca_components_.min(), 0, self.pca_components_.max()], ) return self.ax def _draw_projection_features(self, Xp, y):", "solver. If the input X is larger than 500x500 and", "yellowbrick.features.pca # Decomposition based feature visualization with PCA. # #", "a label encoder for the legend, identifying integer classes or", "datasets >>> iris = datasets.load_iris() >>> X = iris.data >>>", "dimensional principal component plot of a data array projected onto", "of each feature value to the component. Parameters ---------- Xp", "scatter plot. The colobar axes is accessible using the cax", "of a data array projected onto its largest sequential principal", "clustered points more visible. random_state : int, RandomState instance or", "x=0, y=0, dx=x_vector[i] * max_x, dy=y_vector[i] * max_y, color=\"r\", head_width=0.05,", "colorbar are not compatible with 3d projections\" ) @property def", "string or cmap, default: None The colormap used to create", "colors=colors, colormap=colormap, alpha=alpha, random_state=random_state, colorbar=colorbar, heatmap=heatmap, **kwargs ) # Fit", "y, ordered by sorted class index. These names act as", "NotFittedError ########################################################################## # 2D and 3D PCA Visualizer ########################################################################## class", "2d or 3d. To plot 3d plots with matplotlib, please", "onto its largest sequential principal components. It is common practice", "the colorbar for heatmap and not for the scatter plot.", "return Xp except NotFittedError: raise NotFitted.from_estimator(self, \"transform\") def draw(self, Xp,", "(min y, max y) A tuple that describes the minimum", "plot of a data array projected onto its largest sequential", "load the 3D projection import numpy as np import matplotlib.pyplot", "densely clustered points more visible. random_state : int, RandomState instance", "user wants to scale data. projection : int or string,", "the scatter plot. \"\"\" if self._uax is None: raise AttributeError(\"This", "# division. if self.projection == 3 and self.heatmap: raise YellowbrickValueError(", "Instantiate the visualizer visualizer = PCA( ax=ax, features=features, scale=scale, projection=projection,", ">>> X = iris.data >>> y = iris.target >>> visualizer", "sequential color map based on the range of the target.", ": bool, default: True If the target_type is \"continous\" draw", "draw a colorbar to the right of the scatter plot.", "the number of columns in X, otherwise an exception will", "in other Visualizers. Attributes ---------- pca_components_ : ndarray, shape (n_features,", "axes or the layout if ``self.heatmap`` is ``False`` or ``None``.", "The matrix produced by the ``transform()`` method. y : array-like", "a scatter plot. Parameters ---------- X : ndarray or DataFrame", "method of the scikit-learn PCA transformer, which performs a dimensionality", "or cmap, default: None The colormap used to create the", "discrete. This is guaranteed to be strings even if the", "drawn with 3d plots as they do not have permit", "matrix of n instances with m features. y : ndarray", ">>> iris = datasets.load_iris() >>> X = iris.data >>> y", "the target_type is \"continous\" draw a colorbar to the right", "heatmap, adding new axes for the heatmap if necessary and", "pca_decomposition( X, y=None, ax=None, features=None, classes=None, scale=True, projection=2, proj_features=False, colors=None,", "Returns the axes that the scatter plot was drawn on.", "plot. \"\"\" if self._uax is None: raise AttributeError(\"This visualizer does", "features specified by the columns of the input dataset. This", "y = iris.target >>> pca_decomposition(X, y, colors=['r', 'g', 'b'], projection=3)", "legend, identifying integer classes or renaming string labels. If omitted,", "Quick Method ########################################################################## def pca_decomposition( X, y=None, ax=None, features=None, classes=None,", "tells about the magnitude of each feature in the pricipal", "transform the visualizer (calls draw) visualizer.fit(X, y) visualizer.transform(X, y) if", "in X, otherwise an exception will be raised on ``fit()``.", "accessible using lax property and colorbar using uax property. kwargs", "two or three dimensional principal component plot of a data", "heatmap : bool, default: False Add a heatmap showing contribution", "from sklearn.pipeline import Pipeline from sklearn.decomposition import PCA as PCATransformer", "if the target type is discrete. This is guaranteed to", "an axes for heatmap\") return self._lax def layout(self, divider=None): \"\"\"", "heatmap can also be optionally included to show the magnitude", "reduction on the input features ``X``. Next calls the ``draw``", "``show()``, which in turn calls ``plt.show()`` however you cannot call", "3D space as a scatter plot. Parameters ---------- X :", "values in y. Note that the length of this list", "\"\"\" super(PCA, self).finalize() self.ax.set_title(\"Principal Component Plot\") self.ax.set_xlabel(\"$PC_1$\") self.ax.set_ylabel(\"$PC_2$\") if self.projection", ": int, RandomState instance or None, optional (default None) This", "axes for heatmap\") return self._lax def layout(self, divider=None): \"\"\" Creates", "y_vector[i] * max_y], [0, z_vector[i] * max_z], color=\"r\", ) self.ax.text(", "target type is discrete. This is guaranteed to be strings", "visualizer, finally returning a new array of transformed features of", "The target, used to specify the colors of the points.", "(n_classes,) The class labels that define the discrete values in", "used (or generated if required). features : list, default: None", "labels will be taken from the unique values in y.", "(n_features,) The names of the features discovered or used in", "however you cannot call ``plt.savefig`` from this signature, nor ``clear_figure``.", "the layout if ``self.heatmap`` is ``False`` or ``None``. Parameters ----------", "features : list, default: None The names of the features", "property and colorbar using uax property. kwargs : dict Keyword", "for i in range(self.pca_components_.shape[1]): self.ax.arrow( x=0, y=0, dx=x_vector[i] * max_x,", "y. Note that the length of this list must match", "create the individual colors. In the discrete case it is", "self.alpha = alpha # Visual Parameters self.heatmap = heatmap self._uax,", "``plt.show()`` however you cannot call ``plt.savefig`` from this signature, nor", "otherwise an exception is raised. This parameter is only used", "= max(Xp[:, 1]) for i in range(self.pca_components_.shape[1]): self.ax.plot( [0, x_vector[i]", "None: self._lax = divider.append_axes(\"bottom\", size=\"15%\", pad=0.5) def fit(self, X, y=None,", ">>> y = iris.target >>> pca_decomposition(X, y, colors=['r', 'g', 'b'],", "colorbar to the right of the scatter plot. The colobar", "to the base class and may influence the visualization as", "applying a PC decomposition. Variable scaling can be controlled using", "that describes the minimum and maximum values in the target.", "color=\"r\", head_width=0.05, width=0.005, ) self.ax.text( x_vector[i] * max_x * 1.05,", "\"\"\" Calls the internal `transform` method of the scikit-learn PCA", "y): \"\"\" Draw the projection of features in the transformed", "2: for i in range(self.pca_components_.shape[1]): self.ax.arrow( x=0, y=0, dx=x_vector[i] *", ": matplotlib Axes object Returns the axes that the scatter", "True the plot will be similar to a biplot. colors", "* max_y, color=\"r\", head_width=0.05, width=0.005, ) self.ax.text( x_vector[i] * max_x", "array. range_ : (min y, max y) A tuple that", ": bool, default: True If True, calls ``show()``, which in", "projections\" ) @property def uax(self): \"\"\" The axes of the", "as a cycle. colormap : string or cmap, default: None", "self.pca_components_[0] y_vector = self.pca_components_[1] max_x = max(Xp[:, 0]) max_y =", "as defined in other Visualizers. Attributes ---------- pca_components_ : ndarray,", "scatter plot. \"\"\" if self._lax is None: raise AttributeError(\"This visualizer", "see LICENSE.txt # # ID: pca.py [] <EMAIL> $ \"\"\"", "labels that define the discrete values in the target. Only", "its largest sequential principal components. It is common practice to", "to access or modify data in X. If a user", "based on the range of the target. alpha : float,", "y, otherwise an exception is raised. This parameter is only", "): super(PCA, self).__init__( ax=ax, features=features, classes=classes, colors=colors, colormap=colormap, projection=projection, alpha=alpha,", "dimensional principal component plot of the data array ``X`` projected", "and colorbar using uax property. kwargs : dict Keyword arguments", "is guaranteed to be strings even if the classes are", "width=0.005, ) self.ax.text( x_vector[i] * max_x * 1.05, y_vector[i] *", "on ``fit()``. classes : list, default: None The class labels", "the PCA transformer self.pca_transformer = Pipeline( [ (\"scale\", StandardScaler(with_std=self.scale)), (\"pca\",", "color=\"r\", ) else: raise YellowbrickValueError(\"Projection dimensions must be either 2", "and 0 is completely transparent. This property makes densely clustered", "layout if ``self.heatmap`` is ``False`` or ``None``. Parameters ---------- divider:", "Note that the length of this list must match the", "self.heatmap: if not self.colormap: self.colormap = palettes.DEFAULT_SEQUENCE # TODO: change", "the features discovered or used in the visualizer that can", "im = self.lax.imshow( self.pca_components_, interpolation=\"none\", cmap=self.colormap, aspect=\"auto\", ) plt.colorbar( im,", "using uax property. kwargs : dict Keyword arguments that are", "y) if self.proj_features: # Draws projection features in transformed space.", "colormap used to create the individual colors. In the discrete", "colobar axes is accessible using the cax property. heatmap :", "If a user passes feature names in, those features are", "lax(self): \"\"\" The axes of the heatmap below scatter plot.", "if self.projection == 2: for i in range(self.pca_components_.shape[1]): self.ax.arrow( x=0,", "can be used as an index to access or modify", "the scatter plot was drawn on. \"\"\" x_vector = self.pca_components_[0]", "self.features_[i], color=\"r\", ) elif self.projection == 3: z_vector = self.pca_components_[2]", "either 2d or 3d. To plot 3d plots with matplotlib,", "Visualizer ########################################################################## class PCA(ProjectionVisualizer): \"\"\" Produce a two or three", "influence the visualization as defined in other Visualizers. Attributes ----------", "Only available if the target type is discrete. This is", ": ndarray, shape (n_features, n_components) This tells about the magnitude", "of the points. Returns ------- self.ax : matplotlib Axes object", "pricipal components. This is primarily used to draw the biplots.", "visualizer.fit(X, y) visualizer.transform(X, y) if show: visualizer.show() else: visualizer.finalize() #", "if user wants to scale data. projection : int or", "the range of the target. alpha : float, default: 0.75", "component plot of the data array ``X`` projected onto its", "visualizer (calls draw) visualizer.fit(X, y) visualizer.transform(X, y) if show: visualizer.show()", "PCA transformer, which performs a dimensionality reduction on the input", "of target or class values. ax : matplotlib Axes, default:", ": array-like of shape (n,), optional The target, used to", "an exception will be raised on ``fit()``. classes : list,", "max(Xp[:, 1]) if self.projection == 2: for i in range(self.pca_components_.shape[1]):", "self.scale = scale self.proj_features = proj_features # Create the PCA", "the indices of the data array. range_ : (min y,", "float, default: 0.75 Specify a transparency where 1 is completely", "---------- X : ndarray or DataFrame of shape n x", "dimensions must be either 2 or 3\") return self.ax def", "NotFitted.from_estimator(self, \"transform\") def draw(self, Xp, y): \"\"\" Plots a scatterplot", "self).__init__( ax=ax, features=features, classes=classes, colors=colors, colormap=colormap, projection=projection, alpha=alpha, colorbar=colorbar, **kwargs", "specify the colors of the points. Returns ------- self.ax :", "from sklearn.decomposition import PCA as PCATransformer from sklearn.preprocessing import StandardScaler", "range of the target. alpha : float, default: 0.75 Specify", "case. If not enough colors per class are specified then", "\"\"\" try: Xp = self.pca_transformer.transform(X) self.draw(Xp, y) return Xp except", "is lower than 80% of the smallest dimension of the", "= self.lax.imshow( self.pca_components_, interpolation=\"none\", cmap=self.colormap, aspect=\"auto\", ) plt.colorbar( im, cax=self.uax,", "that indicates if the user wants to project the features", "\" \"please upgrade matplotlib or set heatmap=False on the visualizer\"", "0 is completely transparent. This property makes densely clustered points", "legends, heatmap, and colorbar as specified by the keyword arguments.", "array ``X`` projected onto its largest sequential principal components. It", "current figure. proj_features : bool, default: False Boolean that indicates", "aspect=\"auto\", ) plt.colorbar( im, cax=self.uax, orientation=\"horizontal\", ticks=[self.pca_components_.min(), 0, self.pca_components_.max()], )", "specified then the colors are treated as a cycle. colormap", "or (n, 3) The matrix produced by the ``transform()`` method.", "as a label encoder for the legend, identifying integer classes", "the colors of the points. Returns ------- self.ax : matplotlib", "points more visible. random_state : int, RandomState instance or None,", "must import mplot3d to load the 3D projection import numpy", "(default None) This parameter sets the random state on this", "] ) self.alpha = alpha # Visual Parameters self.heatmap =", "otherwise an exception will be raised on ``fit()``. classes :", "feature in the pricipal components. This is primarily used to", "projection=projection, alpha=alpha, colorbar=colorbar, **kwargs ) # Data Parameters self.scale =", "class ensures that a colorbar is drawn when target is", "is None: self._uax = divider.append_axes(\"bottom\", size=\"10%\", pad=0.7) # Axes for", "raise AttributeError(\"This visualizer does not have an axes for heatmap\")", "be taken from the unique values in y. Note that", "`randomized` solver is enabled. colorbar : bool, default: True If", "axes for colorbar\") return self._uax @property def lax(self): \"\"\" The", "a transparency where 1 is completely opaque and 0 is", "a DataFrame are used or just simply the indices of", "of components to extract is lower than 80% of the", "Examples -------- >>> from sklearn import datasets >>> iris =", "colormap in the sequential case. If not enough colors per", "the scatter plot. The colobar axes is accessible using the", "max_x * 1.05, y_vector[i] * max_y * 1.05, self.features_[i], color=\"r\",", "sklearn.decomposition import PCA as PCATransformer from sklearn.preprocessing import StandardScaler from", "transformed features of shape ``(len(X), projection)``. \"\"\" try: Xp =", "as np import matplotlib.pyplot as plt from mpl_toolkits.axes_grid1 import make_axes_locatable", "makes densely clustered points more visible. random_state : int, RandomState", "list must match the number of columns in X, otherwise", "of target or class values. Returns ------- self : visualizer", "in X, then draws the decomposition in either 2D or", "None A single color to plot all instances as or", "signature, nor ``clear_figure``. If False, simply calls ``finalize()`` kwargs :", "3: z_vector = self.pca_components_[2] max_z = max(Xp[:, 1]) for i", "fit to compute features, classes, colors, etc. super(PCA, self).fit(X=X, y=y,", "## Quick Method ########################################################################## def pca_decomposition( X, y=None, ax=None, features=None,", "``False`` or ``None``. Parameters ---------- divider: AxesDivider An AxesDivider to", "import PCA as PCATransformer from sklearn.preprocessing import StandardScaler from sklearn.exceptions", "divider.append_axes(\"bottom\", size=\"10%\", pad=0.7) # Axes for heatmap if self._lax is", "# Copyright (C) 2017 The scikit-yb developers # For license", "or class values. Returns ------- Xp : ndarray or DataFrame", "when target is # continuous. super(PCA, self).layout(divider) if self.heatmap: #", "Create the PCA transformer self.pca_transformer = Pipeline( [ (\"scale\", StandardScaler(with_std=self.scale)),", "features, `X`, projected into either 2 or 3 dimensions. If", "self.ax : matplotlib Axes object Returns the axes that the", "Parameters ---------- X : ndarray or DataFrame of shape n", "is the colorbar for heatmap and not for the scatter", "transform(self, X, y=None, **kwargs): \"\"\" Calls the internal `transform` method", "# ID: pca.py [] <EMAIL> $ \"\"\" Decomposition based feature", "# Author: <NAME> # Author: <NAME> # Author: <NAME> #", "scatter plot. super(PCA, self).draw(Xp, y) if self.proj_features: # Draws projection", "are used or just simply the indices of the data", "of the data array. range_ : (min y, max y)", "None: raise AttributeError(\"This visualizer does not have an axes for", "True, calls ``show()``, which in turn calls ``plt.show()`` however you", "## Imports ########################################################################## # NOTE: must import mplot3d to load", "self.projection == 2: if self.heatmap: if not self.colormap: self.colormap =", "parameter sets the random state on this solver. If the", "developers # For license information, see LICENSE.txt # # ID:", "for each class and in the continuous case it is", "if the target type is continuous. Examples -------- >>> from", "alpha : float, default: 0.75 Specify a transparency where 1", "be controlled using the ``scale`` argument. Parameters ---------- ax :", "division. if self.projection == 3 and self.heatmap: raise YellowbrickValueError( \"heatmap", "with 3d projections\" ) @property def uax(self): \"\"\" The axes", "self.ax.set_ylabel(\"$PC_2$\") if self.projection == 3: self.ax.set_zlabel(\"$PC_3$\") if self.heatmap == True:", "property. heatmap : bool, default: False Add a heatmap showing", "the data array. range_ : (min y, max y) A", "scale=True, projection=2, proj_features=False, colors=None, colormap=None, alpha=0.75, random_state=None, colorbar=True, heatmap=False, **kwargs", "continuous. super(PCA, self).layout(divider) if self.heatmap: # Axes for colorbar(for heatmap).", "the random state on this solver. If the input X", "1.05, self.features_[i], color=\"r\", ) elif self.projection == 3: z_vector =", "An array or series of target or class values. Returns", "raise AttributeError(\"This visualizer does not have an axes for colorbar\")", "x_vector = self.pca_components_[0] y_vector = self.pca_components_[1] max_x = max(Xp[:, 0])", "pad=0.5) def fit(self, X, y=None, **kwargs): \"\"\" Fits the PCA", "magnitude of each feature value to the component. Parameters ----------", "<reponame>percygautam/yellowbrick # -*- coding: utf-8 -*- # yellowbrick.features.pca # Decomposition", "with PCA. \"\"\" ########################################################################## ## Imports ########################################################################## # NOTE: must", "scale data. projection : int or string, default: 2 The", "PCATransformer(self.projection, random_state=random_state)), ] ) self.alpha = alpha # Visual Parameters", "are treated as a cycle. colormap : string or cmap,", "---------- pca_components_ : ndarray, shape (n_features, n_components) This tells about", "ndarray, shape (n_classes,) The class labels that define the discrete", "Creates the layout for colorbar and heatmap, adding new axes", "discrete target type case and is ignored otherwise. scale :", "colorbar : bool, default: True If the target_type is \"continous\"", "upgrade matplotlib or set heatmap=False on the visualizer\" ) )", "a colorbar for readability purpose. The heatmap is accessible using", "draw(self, Xp, y): \"\"\" Plots a scatterplot of points that", "opaque and 0 is completely transparent. This property makes densely", "y) return Xp except NotFittedError: raise NotFitted.from_estimator(self, \"transform\") def draw(self,", "= PCA() >>> visualizer.fit_transform(X, y) >>> visualizer.show() \"\"\" def __init__(", "target. Only available if the target type is continuous. Examples", "the input X is larger than 500x500 and the number", "n_components) This tells about the magnitude of each feature in", "= Pipeline( [ (\"scale\", StandardScaler(with_std=self.scale)), (\"pca\", PCATransformer(self.projection, random_state=random_state)), ] )", "default: False Add a heatmap showing contribution of each feature", "the visualizer object. return visualizer # Alias for PCA PCADecomposition", "each feature in the pricipal components. This is primarily used", "using lax property and colorbar using uax property. kwargs :", "simply the indices of the data array. range_ : (min", "the data array ``X`` before applying a PC decomposition. Variable", ") self.ax.text( x_vector[i] * max_x * 1.05, y_vector[i] * max_y", "the heatmap if necessary and modifying the aspect ratio. Does", "0]) max_y = max(Xp[:, 1]) if self.projection == 2: for", "random_state=None, colorbar=True, heatmap=False, show=True, **kwargs ): \"\"\" Produce a two", "self).layout(divider) if self.heatmap: # Axes for colorbar(for heatmap). if self._uax", "series of target or class values. ax : matplotlib Axes,", "in the projected space. If True the plot will be", "random_state=None, colorbar=True, heatmap=False, **kwargs ): super(PCA, self).__init__( ax=ax, features=features, classes=classes,", "a colorbar is drawn when target is # continuous. super(PCA,", ": ndarray, shape (n_classes,) The class labels that define the", "have an axes for colorbar\") return self._uax @property def lax(self):", "Fits the PCA transformer, transforms the data in X, then", "i in range(self.pca_components_.shape[1]): self.ax.arrow( x=0, y=0, dx=x_vector[i] * max_x, dy=y_vector[i]", "ax : matplotlib Axes, default: None The axes to plot", "self.pca_components_.max()], ) return self.ax def _draw_projection_features(self, Xp, y): \"\"\" Draw", "and in the continuous case it is used to create", "An AxesDivider to be passed among all layout calls. \"\"\"", "data array. range_ : (min y, max y) A tuple", "of shape (n,), optional The target, used to specify the", "string, default: 2 The number of axes to project into,", "colors. In the discrete case it is used to compute", "the discrete case or as an ordered colormap in the", "is ignored otherwise. scale : bool, default: True Boolean that", ": string or cmap, default: None The colormap used to", "the axes or the layout if ``self.heatmap`` is ``False`` or", "number of columns in X, otherwise an exception will be", "X, then draws the decomposition in either 2D or 3D", "will be raised on ``fit()``. classes : list, default: None", "cmap=self.colormap, aspect=\"auto\", ) plt.colorbar( im, cax=self.uax, orientation=\"horizontal\", ticks=[self.pca_components_.min(), 0, self.pca_components_.max()],", "using lax property and colorbar using uax property. show :", "principal component plot of a data array projected onto its", "type is continuous. Examples -------- >>> from sklearn import datasets", "colorbar=colorbar, **kwargs ) # Data Parameters self.scale = scale self.proj_features", "self.pca_components_, interpolation=\"none\", cmap=self.colormap, aspect=\"auto\", ) plt.colorbar( im, cax=self.uax, orientation=\"horizontal\", ticks=[self.pca_components_.min(),", "AttributeError(\"This visualizer does not have an axes for heatmap\") return", "self.ax.set_xlabel(\"$PC_1$\") self.ax.set_ylabel(\"$PC_2$\") if self.projection == 3: self.ax.set_zlabel(\"$PC_3$\") if self.heatmap ==", "import ProjectionVisualizer from yellowbrick.exceptions import YellowbrickValueError, NotFitted from sklearn.pipeline import", "self._uax @property def lax(self): \"\"\" The axes of the heatmap", "numpy as np import matplotlib.pyplot as plt from mpl_toolkits.axes_grid1 import", "the current figure. proj_features : bool, default: False Boolean that", "self.pca_transformer.transform(X) self.draw(Xp, y) return Xp except NotFittedError: raise NotFitted.from_estimator(self, \"transform\")", "yellowbrick.exceptions import YellowbrickValueError, NotFitted from sklearn.pipeline import Pipeline from sklearn.decomposition", "Decomposition based feature visualization with PCA. # # Author: <NAME>", "the discrete values in the target. Only available if the", "projection=3) \"\"\" # Instantiate the visualizer visualizer = PCA( ax=ax,", "-*- coding: utf-8 -*- # yellowbrick.features.pca # Decomposition based feature", "data. projection : int or string, default: 2 The number", "the biplots. classes_ : ndarray, shape (n_classes,) The class labels", "the scatter plot was drawn on. \"\"\" # Call to", "# continuous. super(PCA, self).layout(divider) if self.heatmap: # Axes for colorbar(for", "random_state=random_state, colorbar=colorbar, heatmap=heatmap, **kwargs ) # Fit and transform the", "may influence the visualization as defined in other Visualizers. Attributes", "match the number of columns in X, otherwise an exception", "where 1 is completely opaque and 0 is completely transparent.", "or series of target or class values. Returns ------- self", ">>> visualizer.fit_transform(X, y) >>> visualizer.show() \"\"\" def __init__( self, ax=None,", "alpha=alpha, colorbar=colorbar, **kwargs ) # Data Parameters self.scale = scale", "be strings even if the classes are a different type.", "among all layout calls. \"\"\" # Ensure matplotlib version compatibility", ": matplotlib Axes, default: None The axes to plot the", "self.ax.arrow( x=0, y=0, dx=x_vector[i] * max_x, dy=y_vector[i] * max_y, color=\"r\",", "of a DataFrame are used or just simply the indices", "to extract is lower than 80% of the smallest dimension", "the right of the scatter plot. The colobar axes is", "size=\"10%\", pad=0.7) # Axes for heatmap if self._lax is None:", "using the ``scale`` argument. Parameters ---------- X : ndarray or", "(\"pca\", PCATransformer(self.projection, random_state=random_state)), ] ) self.alpha = alpha # Visual", "Created: Tue May 23 18:34:27 2017 -0400 # # Copyright", "colors are treated as a cycle. colormap : string or", "scale the data array ``X`` before applying a PC decomposition.", "plot of the data array ``X`` projected onto its largest", "or class values. ax : matplotlib Axes, default: None The", "as plt from mpl_toolkits.axes_grid1 import make_axes_locatable from yellowbrick.style import palettes", "is enabled. colorbar : bool, default: True If the target_type", "right of the scatter plot. The colobar axes is accessible", "n instances with m features. y : ndarray or Series", "its class in the discrete case or as an ordered", "the continuous case it is used to create a sequential", "@property def uax(self): \"\"\" The axes of the colorbar, bottom", "target, used to specify the colors of the points. Returns", "completely opaque and 0 is completely transparent. This property makes", "all layout calls. \"\"\" # Ensure matplotlib version compatibility if", "return self def transform(self, X, y=None, **kwargs): \"\"\" Calls the", "the number of colors needed for each class and in", "coding: utf-8 -*- # yellowbrick.features.pca # Decomposition based feature visualization", "True Boolean that indicates if user wants to scale data.", "self.lax.set_yticklabels([\"$PC_1$\", \"$PC_2$\"], va=\"bottom\", fontsize=10) self.fig.tight_layout() ########################################################################## ## Quick Method ##########################################################################", "projection=projection, proj_features=proj_features, colors=colors, colormap=colormap, alpha=alpha, random_state=random_state, colorbar=colorbar, heatmap=heatmap, **kwargs )", "heatmap can be drawn with 3d plots as they do", "three dimensional principal component plot of the data array ``X``", "None: divider = make_axes_locatable(self.ax) # Call to super class ensures", "which in turn calls ``plt.show()`` however you cannot call ``plt.savefig``", "self.proj_features: # Draws projection features in transformed space. self._draw_projection_features(Xp, y)", "array or series of target or class values. ax :", "# Call super fit to compute features, classes, colors, etc.", "the discrete case it is used to compute the number", "divider=None): \"\"\" Creates the layout for colorbar and heatmap, adding", "single color to plot all instances as or a list", "if self.proj_features: # Draws projection features in transformed space. self._draw_projection_features(Xp,", "size=\"15%\", pad=0.5) def fit(self, X, y=None, **kwargs): \"\"\" Fits the", "random_state : int, RandomState instance or None, optional (default None)", "of this list must match the number of unique values", "from sklearn import datasets >>> iris = datasets.load_iris() >>> X", "Fit and transform the visualizer (calls draw) visualizer.fit(X, y) visualizer.transform(X,", "# Create the new axes for the colorbar and heatmap", "value to the component. Parameters ---------- Xp : array-like of", "max_y, color=\"r\", head_width=0.05, width=0.005, ) self.ax.text( x_vector[i] * max_x *", "self.pca_components_ = self.pca_transformer.named_steps[\"pca\"].components_ return self def transform(self, X, y=None, **kwargs):", "Xp except NotFittedError: raise NotFitted.from_estimator(self, \"transform\") def draw(self, Xp, y):", "using the cax property. heatmap : bool, default: False Add", "range(self.pca_components_.shape[1]): self.ax.arrow( x=0, y=0, dx=x_vector[i] * max_x, dy=y_vector[i] * max_y,", "return self._lax def layout(self, divider=None): \"\"\" Creates the layout for", "YellowbrickValueError, NotFitted from sklearn.pipeline import Pipeline from sklearn.decomposition import PCA", "a heatmap showing contribution of each feature in the principal", "dataset. This length of this list must match the number", "Parameters self.heatmap = heatmap self._uax, self._lax = None, None #", "PCA transformer self.pca_transformer = Pipeline( [ (\"scale\", StandardScaler(with_std=self.scale)), (\"pca\", PCATransformer(self.projection,", "max y) A tuple that describes the minimum and maximum", "contribution of each feature in the principal components. Also draws", "# Makes the labels centered. self.lax.set_xticks(np.arange(0, len(self.features_)), minor=True) self.lax.set_xticklabels( self.features_,", "with matplotlib, please ensure a 3d axes is passed to", "features are used. Otherwise the columns of a DataFrame are", "indicates if user wants to scale data. projection : int", "is None: raise YellowbrickValueError( ( \"heatmap requires matplotlib 2.0.2 or", "self.ax.plot( [0, x_vector[i] * max_x], [0, y_vector[i] * max_y], [0,", "<NAME> # Created: Tue May 23 18:34:27 2017 -0400 #", "Returns self for use in Pipelines. \"\"\" # Call super", "@property def lax(self): \"\"\" The axes of the heatmap below", "scatter plot. This is the colorbar for heatmap and not", "colormap=colormap, alpha=alpha, random_state=random_state, colorbar=colorbar, heatmap=heatmap, **kwargs ) # Fit and", "the visualizer, otherwise one will be created using the current", "must match the number of unique values in y, otherwise", "the target type is discrete. This is guaranteed to be", "drawn on. \"\"\" x_vector = self.pca_components_[0] y_vector = self.pca_components_[1] max_x", "visualizer.transform(X, y) if show: visualizer.show() else: visualizer.finalize() # Returns the", "pca.py [] <EMAIL> $ \"\"\" Decomposition based feature visualization with", "the target. Only available if the target type is discrete.", "columns of a DataFrame are used or just simply the", "im, cax=self.uax, orientation=\"horizontal\", ticks=[self.pca_components_.min(), 0, self.pca_components_.max()], ) return self.ax def", "The names of the features specified by the columns of", "axes of the colorbar, bottom of scatter plot. This is", "visualizer.show() else: visualizer.finalize() # Returns the visualizer object. return visualizer", "tuple that describes the minimum and maximum values in the", "license information, see LICENSE.txt # # ID: pca.py [] <EMAIL>", "scale=scale, projection=projection, proj_features=proj_features, colors=colors, colormap=colormap, alpha=alpha, random_state=random_state, colorbar=colorbar, heatmap=heatmap, **kwargs", "string labels. If omitted, the class labels will be taken", "**kwargs): \"\"\" Fits the PCA transformer, transforms the data in", "or 3d. To plot 3d plots with matplotlib, please ensure", "feature visualization with PCA. # # Author: <NAME> # Author:", "layout for colorbar and heatmap, adding new axes for the", "then the more efficient `randomized` solver is enabled. colorbar :", "if self.projection == 3: self.ax.set_zlabel(\"$PC_3$\") if self.heatmap == True: self.lax.set_xticks(np.arange(-0.5,", "# # Author: <NAME> # Author: <NAME> # Author: <NAME>", "ndarray or DataFrame of shape n x m Returns a", "unique values in y. Note that the length of this", "NotFitted from sklearn.pipeline import Pipeline from sklearn.decomposition import PCA as", "the 3D projection import numpy as np import matplotlib.pyplot as", "This is guaranteed to be strings even if the classes", "projected onto its largest sequential principal components. It is common", "user passes feature names in, those features are used. Otherwise", "a new array of transformed features of shape ``(len(X), projection)``.", "from the unique values in y. Note that the length", "colorbar for heatmap and not for the scatter plot. \"\"\"", "data in X. If a user passes feature names in,", "using the ``scale`` argument. Parameters ---------- ax : matplotlib Axes,", "Data Parameters self.scale = scale self.proj_features = proj_features # Create", "aspect ratio. Does not modify the axes or the layout", "head_width=0.05, width=0.005, ) self.ax.text( x_vector[i] * max_x * 1.05, y_vector[i]", "The number of axes to project into, either 2d or", "= self.pca_components_[2] max_z = max(Xp[:, 1]) for i in range(self.pca_components_.shape[1]):", "of points that represented the decomposition, `pca_features_`, of the original", "selected, a colorbar and heatmap can also be optionally included", "# Instantiate the visualizer visualizer = PCA( ax=ax, features=features, scale=scale,", "self.lax.set_xticklabels( self.features_, rotation=90, fontsize=12, minor=True ) self.lax.set_yticks(np.arange(0.5, 2)) self.lax.set_yticklabels([\"$PC_1$\", \"$PC_2$\"],", "``plt.savefig`` from this signature, nor ``clear_figure``. If False, simply calls", "ordered by sorted class index. These names act as a", "The colormap used to create the individual colors. In the", "of the target. alpha : float, default: 0.75 Specify a", "if self._uax is None: self._uax = divider.append_axes(\"bottom\", size=\"10%\", pad=0.7) #", "**kwargs): \"\"\" Calls the internal `transform` method of the scikit-learn", "shape (n, 2) or (n, 3) The matrix produced by", "change to pcolormesh instead of imshow per #615 spec im", "visualizer that can be used as an index to access", "class labels will be taken from the unique values in", "\"\"\" if self._uax is None: raise AttributeError(\"This visualizer does not", "import datasets >>> iris = datasets.load_iris() >>> X = iris.data", "not self.colormap: self.colormap = palettes.DEFAULT_SEQUENCE # TODO: change to pcolormesh", "if self._uax is None: raise AttributeError(\"This visualizer does not have", ": array-like of shape (n, 2) or (n, 3) The", "type. features_ : ndarray, shape (n_features,) The names of the", "\"\"\" # Ensure matplotlib version compatibility if make_axes_locatable is None:", "A matrix of n instances with m features. y :", ") # Create the new axes for the colorbar and", "plot all instances as or a list of colors to", "ProjectionVisualizer from yellowbrick.exceptions import YellowbrickValueError, NotFitted from sklearn.pipeline import Pipeline", "input dataset. This length of this list must match the", "matplotlib version compatibility if make_axes_locatable is None: raise YellowbrickValueError( (", "heatmap if self._lax is None: self._lax = divider.append_axes(\"bottom\", size=\"15%\", pad=0.5)", "lax property and colorbar using uax property. show : bool,", "a PC decomposition. Variable scaling can be controlled using the", "default: None The colormap used to create the individual colors.", "accessible using the cax property. heatmap : bool, default: False", "of the features discovered or used in the visualizer that", "if make_axes_locatable is None: raise YellowbrickValueError( ( \"heatmap requires matplotlib", "raise YellowbrickValueError(\"Projection dimensions must be either 2 or 3\") return", "be used as an index to access or modify data", "type case and is ignored otherwise. scale : bool, default:", "else: raise YellowbrickValueError(\"Projection dimensions must be either 2 or 3\")", "Author: <NAME> # Author: <NAME> # Author: <NAME> # Created:", "= divider.append_axes(\"bottom\", size=\"15%\", pad=0.5) def fit(self, X, y=None, **kwargs): \"\"\"", "is primarily used to draw the biplots. classes_ : ndarray,", "just simply the indices of the data array. range_ :", "the original features, `X`, projected into either 2 or 3", "of features in the transformed space. Parameters ---------- Xp :", "x_vector[i] * max_x * 1.05, y_vector[i] * max_y * 1.05,", "self).fit(X=X, y=y, **kwargs) self.pca_transformer.fit(X) self.pca_components_ = self.pca_transformer.named_steps[\"pca\"].components_ return self def", "80% of the smallest dimension of the data, then the", "x_vector[i] * max_x], [0, y_vector[i] * max_y], [0, z_vector[i] *", "y) A tuple that describes the minimum and maximum values", "import palettes from yellowbrick.features.projection import ProjectionVisualizer from yellowbrick.exceptions import YellowbrickValueError,", "########################################################################## # 2D and 3D PCA Visualizer ########################################################################## class PCA(ProjectionVisualizer):", "object Returns the axes that the scatter plot was drawn", "treated as a cycle. colormap : string or cmap, default:", "dy=y_vector[i] * max_y, color=\"r\", head_width=0.05, width=0.005, ) self.ax.text( x_vector[i] *", "# Ensure matplotlib version compatibility if make_axes_locatable is None: raise", "shape (n_classes,) The class labels that define the discrete values", "the data, then the more efficient `randomized` solver is enabled.", "show: visualizer.show() else: visualizer.finalize() # Returns the visualizer object. return", "The axes of the colorbar, bottom of scatter plot. This", "self.colormap: self.colormap = palettes.DEFAULT_SEQUENCE # TODO: change to pcolormesh instead", "(C) 2017 The scikit-yb developers # For license information, see", "that the scatter plot was drawn on. \"\"\" x_vector =", "self def transform(self, X, y=None, **kwargs): \"\"\" Calls the internal", "bool, default: False Boolean that indicates if the user wants", "or modify data in X. If a user passes feature", "colormap=None, alpha=0.75, random_state=None, colorbar=True, heatmap=False, **kwargs ): super(PCA, self).__init__( ax=ax,", "for the heatmap if necessary and modifying the aspect ratio.", "property makes densely clustered points more visible. random_state : int,", "than 500x500 and the number of components to extract is", "y_vector = self.pca_components_[1] max_x = max(Xp[:, 0]) max_y = max(Xp[:,", "2017 The scikit-yb developers # For license information, see LICENSE.txt", "to draw the biplots. classes_ : ndarray, shape (n_classes,) The", "instances with m features. y : ndarray or Series of", "visualizer.fit_transform(X, y) >>> visualizer.show() \"\"\" def __init__( self, ax=None, features=None,", "PCA transformer, transforms the data in X, then draws the", "import Pipeline from sklearn.decomposition import PCA as PCATransformer from sklearn.preprocessing", "raise NotFitted.from_estimator(self, \"transform\") def draw(self, Xp, y): \"\"\" Plots a", "== 2: if self.heatmap: if not self.colormap: self.colormap = palettes.DEFAULT_SEQUENCE", "* 1.05, y_vector[i] * max_y * 1.05, z_vector[i] * max_z", "This parameter sets the random state on this solver. If", "to super class ensures that a colorbar is drawn when", "that a colorbar is drawn when target is # continuous.", "\"\"\" # Call to super draw which draws the scatter", "ax=ax, features=features, scale=scale, projection=projection, proj_features=proj_features, colors=colors, colormap=colormap, alpha=alpha, random_state=random_state, colorbar=colorbar,", "projection=2, proj_features=False, colors=None, colormap=None, alpha=0.75, random_state=None, colorbar=True, heatmap=False, **kwargs ):", "1.05, y_vector[i] * max_y * 1.05, z_vector[i] * max_z *", "columns of the input dataset. This length of this list", "with m features. y : ndarray or Series of length", "self.colormap = palettes.DEFAULT_SEQUENCE # TODO: change to pcolormesh instead of", "def _draw_projection_features(self, Xp, y): \"\"\" Draw the projection of features", "Returns ------- self.ax : matplotlib Axes object Returns the axes", "scatter plot was drawn on. \"\"\" # Call to super", "imshow per #615 spec im = self.lax.imshow( self.pca_components_, interpolation=\"none\", cmap=self.colormap,", "principal components. Also draws a colorbar for readability purpose. The", "biplot. colors : list or tuple, default: None A single", "enabled. colorbar : bool, default: True If the target_type is", "features ``X``. Next calls the ``draw`` method of the Yellowbrick", "Draws projection features in transformed space. self._draw_projection_features(Xp, y) if self.projection", "X, otherwise an exception will be raised on ``fit()``. classes", "will be taken from the unique values in y. Note", "the transformed space. Parameters ---------- Xp : array-like of shape", "(n,), optional The target, used to specify the colors of", "each class and in the continuous case it is used", "purpose. The heatmap is accessible using lax property and colorbar", "list must match the number of unique values in y,", "for the colorbar and heatmap if divider is None: divider", "return self.ax def finalize(self, **kwargs): \"\"\" Draws the title, labels,", "int, RandomState instance or None, optional (default None) This parameter", "# Create the PCA transformer self.pca_transformer = Pipeline( [ (\"scale\",", "axes is accessible using the cax property. heatmap : bool,", "rotation=90, fontsize=12, minor=True ) self.lax.set_yticks(np.arange(0.5, 2)) self.lax.set_yticklabels([\"$PC_1$\", \"$PC_2$\"], va=\"bottom\", fontsize=10)", "self._lax def layout(self, divider=None): \"\"\" Creates the layout for colorbar", "the visualizer\" ) ) # Create the new axes for", "the discrete target type case and is ignored otherwise. scale", "new array-like object of transformed features of shape ``(len(X), projection)``.", "is None: divider = make_axes_locatable(self.ax) # Call to super class", "This is primarily used to draw the biplots. classes_ :", "= datasets.load_iris() >>> X = iris.data >>> y = iris.target", "colors needed for each class and in the continuous case", "Ensure matplotlib version compatibility if make_axes_locatable is None: raise YellowbrickValueError(", "finalize(self, **kwargs): \"\"\" Draws the title, labels, legends, heatmap, and", "0.75 Specify a transparency where 1 is completely opaque and", "sorted class index. These names act as a label encoder", "of the features specified by the columns of the input", "features discovered or used in the visualizer that can be", "spec im = self.lax.imshow( self.pca_components_, interpolation=\"none\", cmap=self.colormap, aspect=\"auto\", ) plt.colorbar(", "X is larger than 500x500 and the number of components", "array-like of shape (n,), optional The target, used to specify", "as an ordered colormap in the sequential case. If not", "in turn calls ``plt.show()`` however you cannot call ``plt.savefig`` from", "transformer, transforms the data in X, then draws the decomposition", "calls. \"\"\" # Ensure matplotlib version compatibility if make_axes_locatable is", "def draw(self, Xp, y): \"\"\" Plots a scatterplot of points", "1.05, z_vector[i] * max_z * 1.05, self.features_[i], color=\"r\", ) else:", "property and colorbar using uax property. show : bool, default:", "a cycle. colormap : string or cmap, default: None The", "a two or three dimensional principal component plot of the", "classes=classes, colors=colors, colormap=colormap, projection=projection, alpha=alpha, colorbar=colorbar, **kwargs ) # Data", "heatmap\") return self._lax def layout(self, divider=None): \"\"\" Creates the layout", "(n, 2) or (n, 3) The matrix produced by the", "and transform the visualizer (calls draw) visualizer.fit(X, y) visualizer.transform(X, y)", "import StandardScaler from sklearn.exceptions import NotFittedError ########################################################################## # 2D and", "on the input features ``X``. Next calls the ``draw`` method", "Visual Parameters self.heatmap = heatmap self._uax, self._lax = None, None", "[0, x_vector[i] * max_x], [0, y_vector[i] * max_y], [0, z_vector[i]", "proj_features # Create the PCA transformer self.pca_transformer = Pipeline( [", "show : bool, default: True If True, calls ``show()``, which", "requires matplotlib 2.0.2 or greater \" \"please upgrade matplotlib or", "heatmap=False on the visualizer\" ) ) # Create the new", "divider: AxesDivider An AxesDivider to be passed among all layout", "########################################################################## ## Imports ########################################################################## # NOTE: must import mplot3d to", "Attributes ---------- pca_components_ : ndarray, shape (n_features, n_components) This tells", "False Add a heatmap showing contribution of each feature in", "modify the axes or the layout if ``self.heatmap`` is ``False``", "not have an axes for heatmap\") return self._lax def layout(self,", "centered. self.lax.set_xticks(np.arange(0, len(self.features_)), minor=True) self.lax.set_xticklabels( self.features_, rotation=90, fontsize=12, minor=True )", "on this solver. If the input X is larger than", "sets the random state on this solver. If the input", "Xp, y): \"\"\" Plots a scatterplot of points that represented", "and is ignored otherwise. scale : bool, default: True Boolean", "layout calls. \"\"\" # Ensure matplotlib version compatibility if make_axes_locatable", "kwargs : dict Keyword arguments that are passed to the", "divider.append_axes(\"bottom\", size=\"15%\", pad=0.5) def fit(self, X, y=None, **kwargs): \"\"\" Fits", "if required). features : list, default: None The names of", "If True the plot will be similar to a biplot.", "specified by the keyword arguments. \"\"\" super(PCA, self).finalize() self.ax.set_title(\"Principal Component", "default: True Boolean that indicates if user wants to scale", "this solver. If the input X is larger than 500x500", "target type case and is ignored otherwise. scale : bool,", "# Axes for colorbar(for heatmap). if self._uax is None: self._uax", "the visualizer (calls draw) visualizer.fit(X, y) visualizer.transform(X, y) if show:", ">>> visualizer = PCA() >>> visualizer.fit_transform(X, y) >>> visualizer.show() \"\"\"", "Call to super draw which draws the scatter plot. super(PCA,", "**kwargs ): \"\"\" Produce a two or three dimensional principal", "of shape (n, 2) or (n, 3) The matrix produced", "RandomState instance or None, optional (default None) This parameter sets", "This length of this list must match the number of", "If not enough colors per class are specified then the", "number of colors needed for each class and in the", "target or class values. Returns ------- Xp : ndarray or", "* max_y * 1.05, z_vector[i] * max_z * 1.05, self.features_[i],", "target or class values. ax : matplotlib Axes, default: None", "These names act as a label encoder for the legend,", "from this signature, nor ``clear_figure``. If False, simply calls ``finalize()``", "modify data in X. If a user passes feature names", "list or tuple, default: None A single color to plot", ") ) # Create the new axes for the colorbar", "\"\"\" The axes of the heatmap below scatter plot. \"\"\"", "heatmap and not for the scatter plot. \"\"\" if self._uax", "\"continous\" draw a colorbar to the right of the scatter", "class labels that define the discrete values in the target.", "to project the features in the projected space. If True", "default: False Boolean that indicates if the user wants to", "\"\"\" Draws the title, labels, legends, heatmap, and colorbar as", "= make_axes_locatable(self.ax) # Call to super class ensures that a", "that are passed to the base class and may influence", "# yellowbrick.features.pca # Decomposition based feature visualization with PCA. #", "each feature value to the component. Parameters ---------- Xp :", "the target type is continuous. Examples -------- >>> from sklearn", "max_y], [0, z_vector[i] * max_z], color=\"r\", ) self.ax.text( x_vector[i] *", "smallest dimension of the data, then the more efficient `randomized`", "that the length of this list must match the number", "from sklearn.preprocessing import StandardScaler from sklearn.exceptions import NotFittedError ########################################################################## #", "-0400 # # Copyright (C) 2017 The scikit-yb developers #", "length of this list must match the number of columns", "proj_features=False, colors=None, colormap=None, alpha=0.75, random_state=None, colorbar=True, heatmap=False, **kwargs ): super(PCA,", "array of transformed features of shape ``(len(X), projection)``. Parameters ----------", "transformer self.pca_transformer = Pipeline( [ (\"scale\", StandardScaler(with_std=self.scale)), (\"pca\", PCATransformer(self.projection, random_state=random_state)),", "transforms the data in X, then draws the decomposition in", "A single color to plot all instances as or a", ": bool, default: False Boolean that indicates if the user", "for the scatter plot. \"\"\" if self._uax is None: raise", ": float, default: 0.75 Specify a transparency where 1 is", "color to plot all instances as or a list of", "into, either 2d or 3d. To plot 3d plots with", "the cax property. heatmap : bool, default: False Add a", "ndarray or DataFrame of shape n x m A matrix", "default: None The axes to plot the figure on. If", "calls ``plt.show()`` however you cannot call ``plt.savefig`` from this signature,", "cax property. heatmap : bool, default: False Add a heatmap", "of axes to project into, either 2d or 3d. To", "colorbar=True, heatmap=False, **kwargs ): super(PCA, self).__init__( ax=ax, features=features, classes=classes, colors=colors,", "labels. If omitted, the class labels will be taken from", "Xp : array-like of shape (n, 2) or (n, 3)", "an ordered colormap in the sequential case. If not enough", "# Fit and transform the visualizer (calls draw) visualizer.fit(X, y)", "2017 -0400 # # Copyright (C) 2017 The scikit-yb developers", "generated if required). features : list, default: None The names", "names in, those features are used. Otherwise the columns of", "in transformed space. self._draw_projection_features(Xp, y) if self.projection == 2: if", "only used in the discrete target type case and is", "columns in X, otherwise an exception will be raised on", "\"\"\" Decomposition based feature visualization with PCA. \"\"\" ########################################################################## ##", "y=None, ax=None, features=None, classes=None, scale=True, projection=2, proj_features=False, colors=None, colormap=None, alpha=0.75,", "If the target_type is \"continous\" draw a colorbar to the", "self.ax def _draw_projection_features(self, Xp, y): \"\"\" Draw the projection of", "visualizer object. return visualizer # Alias for PCA PCADecomposition =", "scatter plot was drawn on. \"\"\" x_vector = self.pca_components_[0] y_vector", "dimensions are selected, a colorbar and heatmap can also be", "a scatterplot of points that represented the decomposition, `pca_features_`, of", "import YellowbrickValueError, NotFitted from sklearn.pipeline import Pipeline from sklearn.decomposition import", "* max_z], color=\"r\", ) self.ax.text( x_vector[i] * max_x * 1.05,", "case and is ignored otherwise. scale : bool, default: True", "are a different type. features_ : ndarray, shape (n_features,) The", "more visible. random_state : int, RandomState instance or None, optional", "completely transparent. This property makes densely clustered points more visible.", "== 3: self.ax.set_zlabel(\"$PC_3$\") if self.heatmap == True: self.lax.set_xticks(np.arange(-0.5, len(self.features_))) self.lax.set_xticklabels([])", "class values. Returns ------- Xp : ndarray or DataFrame of", "If 2 dimensions are selected, a colorbar and heatmap can", "exception is raised. This parameter is only used in the", "= self.pca_components_[1] max_x = max(Xp[:, 0]) max_y = max(Xp[:, 1])", "scale : bool, default: True Boolean that indicates if user", "default: 0.75 Specify a transparency where 1 is completely opaque", "(\"scale\", StandardScaler(with_std=self.scale)), (\"pca\", PCATransformer(self.projection, random_state=random_state)), ] ) self.alpha = alpha", "not for the scatter plot. \"\"\" if self._uax is None:", "# No heatmap can be drawn with 3d plots as", "iris.target >>> pca_decomposition(X, y, colors=['r', 'g', 'b'], projection=3) \"\"\" #", "self.projection == 3: z_vector = self.pca_components_[2] max_z = max(Xp[:, 1])", "This parameter is only used in the discrete target type", "max(Xp[:, 1]) for i in range(self.pca_components_.shape[1]): self.ax.plot( [0, x_vector[i] *", "Draw the projection of features in the transformed space. Parameters", "matplotlib or set heatmap=False on the visualizer\" ) ) #", "def layout(self, divider=None): \"\"\" Creates the layout for colorbar and", "the figure on. If None is passed in, the current", "# Decomposition based feature visualization with PCA. # # Author:", "defined in other Visualizers. Attributes ---------- pca_components_ : ndarray, shape", "visualization as defined in other Visualizers. Attributes ---------- pca_components_ :", "A tuple that describes the minimum and maximum values in", "palettes.DEFAULT_SEQUENCE # TODO: change to pcolormesh instead of imshow per", "def finalize(self, **kwargs): \"\"\" Draws the title, labels, legends, heatmap,", "for each class in y, ordered by sorted class index.", "plot the figure on. If None is passed in, the", "of transformed features of shape ``(len(X), projection)``. Parameters ---------- X", "StandardScaler from sklearn.exceptions import NotFittedError ########################################################################## # 2D and 3D", "a biplot. colors : list or tuple, default: None A", "Parameters self.scale = scale self.proj_features = proj_features # Create the", "== True: self.lax.set_xticks(np.arange(-0.5, len(self.features_))) self.lax.set_xticklabels([]) # Makes the labels centered.", "self._lax is None: raise AttributeError(\"This visualizer does not have an", "ensure a 3d axes is passed to the visualizer, otherwise", "DataFrame are used or just simply the indices of the", "matrix produced by the ``transform()`` method. y : array-like of", "in the transformed space. Parameters ---------- Xp : array-like of", "on the visualizer\" ) ) # Create the new axes", "they do not have permit axes # division. if self.projection", "[0, y_vector[i] * max_y], [0, z_vector[i] * max_z], color=\"r\", )", "class PCA(ProjectionVisualizer): \"\"\" Produce a two or three dimensional principal", "must match the number of columns in X, otherwise an", "To plot 3d plots with matplotlib, please ensure a 3d", "DataFrame of shape n x m A matrix of n", "features of shape ``(len(X), projection)``. \"\"\" try: Xp = self.pca_transformer.transform(X)", "(n, 3) The matrix produced by the ``transform()`` method. y", "a different type. features_ : ndarray, shape (n_features,) The names", "color map based on the range of the target. alpha", "18:34:27 2017 -0400 # # Copyright (C) 2017 The scikit-yb", "three dimensional principal component plot of a data array projected", "plot 3d plots with matplotlib, please ensure a 3d axes", "proj_features : bool, default: False Boolean that indicates if the", "Returns ------- Xp : ndarray or DataFrame of shape n", "as a scatter plot. Parameters ---------- X : ndarray or", "space as a scatter plot. Parameters ---------- X : ndarray", "target or class values. Returns ------- self : visualizer Returns", ": bool, default: False Add a heatmap showing contribution of", "to its class in the discrete case or as an", "colormap=colormap, projection=projection, alpha=alpha, colorbar=colorbar, **kwargs ) # Data Parameters self.scale", "new axes for the heatmap if necessary and modifying the", "in y, ordered by sorted class index. These names act", "or series of target or class values. ax : matplotlib", "matplotlib Axes object Returns the axes that the scatter plot", "you cannot call ``plt.savefig`` from this signature, nor ``clear_figure``. If", "the magnitude of each feature value to the component. Parameters", "The heatmap is accessible using lax property and colorbar using", "= proj_features # Create the PCA transformer self.pca_transformer = Pipeline(", "pca_decomposition(X, y, colors=['r', 'g', 'b'], projection=3) \"\"\" # Instantiate the", "continuous case it is used to create a sequential color", "arguments that are passed to the base class and may", "interpolation=\"none\", cmap=self.colormap, aspect=\"auto\", ) plt.colorbar( im, cax=self.uax, orientation=\"horizontal\", ticks=[self.pca_components_.min(), 0,", "is completely transparent. This property makes densely clustered points more", "also be optionally included to show the magnitude of each", "and heatmap if divider is None: divider = make_axes_locatable(self.ax) #", "that can be used as an index to access or", "make_axes_locatable is None: raise YellowbrickValueError( ( \"heatmap requires matplotlib 2.0.2", "in the target. Only available if the target type is", "discrete case or as an ordered colormap in the sequential", "series of target or class values. Returns ------- self :", "calls the ``draw`` method of the Yellowbrick visualizer, finally returning", "-------- >>> from sklearn import datasets >>> iris = datasets.load_iris()", ">>> from sklearn import datasets >>> iris = datasets.load_iris() >>>", "class are specified then the colors are treated as a", "for i in range(self.pca_components_.shape[1]): self.ax.plot( [0, x_vector[i] * max_x], [0,", "to a biplot. colors : list or tuple, default: None", "The scikit-yb developers # For license information, see LICENSE.txt #", "== 3 and self.heatmap: raise YellowbrickValueError( \"heatmap and colorbar are", "* max_x], [0, y_vector[i] * max_y], [0, z_vector[i] * max_z],", "the features specified by the columns of the input dataset.", "otherwise one will be created using the current figure. proj_features", "colorbar and heatmap if divider is None: divider = make_axes_locatable(self.ax)", "``transform()`` method. y : array-like of shape (n,), optional The", "---------- divider: AxesDivider An AxesDivider to be passed among all", "color=\"r\", ) self.ax.text( x_vector[i] * max_x * 1.05, y_vector[i] *", "classes_ : ndarray, shape (n_classes,) The class labels that define", "y) if self.projection == 2: if self.heatmap: if not self.colormap:", "similar to a biplot. colors : list or tuple, default:", "projection=2, proj_features=False, colors=None, colormap=None, alpha=0.75, random_state=None, colorbar=True, heatmap=False, show=True, **kwargs", "def fit(self, X, y=None, **kwargs): \"\"\" Fits the PCA transformer,", "Xp = self.pca_transformer.transform(X) self.draw(Xp, y) return Xp except NotFittedError: raise", "True If the target_type is \"continous\" draw a colorbar to", "used. Otherwise the columns of a DataFrame are used or", "None The colormap used to create the individual colors. In", "adding new axes for the heatmap if necessary and modifying", "by the columns of the input dataset. This length of", "AxesDivider An AxesDivider to be passed among all layout calls.", "iris.data >>> y = iris.target >>> pca_decomposition(X, y, colors=['r', 'g',", "Axes for heatmap if self._lax is None: self._lax = divider.append_axes(\"bottom\",", "two or three dimensional principal component plot of the data", "a dimensionality reduction on the input features ``X``. Next calls", "it is used to create a sequential color map based", "( \"heatmap requires matplotlib 2.0.2 or greater \" \"please upgrade", "guaranteed to be strings even if the classes are a", "It is common practice to scale the data array ``X``", "instance according to its class in the discrete case or", "colorbar using uax property. show : bool, default: True If", "colorbar, bottom of scatter plot. This is the colorbar for", "This tells about the magnitude of each feature in the", "or just simply the indices of the data array. range_", "and modifying the aspect ratio. Does not modify the axes", "on. If None is passed in, the current axes will", "figure. proj_features : bool, default: False Boolean that indicates if", "the scikit-learn PCA transformer, which performs a dimensionality reduction on", "by the keyword arguments. \"\"\" super(PCA, self).finalize() self.ax.set_title(\"Principal Component Plot\")", "def __init__( self, ax=None, features=None, classes=None, scale=True, projection=2, proj_features=False, colors=None,", "is common practice to scale the data array ``X`` before", "even if the classes are a different type. features_ :", "---------- ax : matplotlib Axes, default: None The axes to", "colorbar as specified by the keyword arguments. \"\"\" super(PCA, self).finalize()", "colorbar using uax property. kwargs : dict Keyword arguments that", "indices of the data array. range_ : (min y, max", "palettes from yellowbrick.features.projection import ProjectionVisualizer from yellowbrick.exceptions import YellowbrickValueError, NotFitted", "z_vector[i] * max_z], color=\"r\", ) self.ax.text( x_vector[i] * max_x *", "self.ax def finalize(self, **kwargs): \"\"\" Draws the title, labels, legends,", "from mpl_toolkits.axes_grid1 import make_axes_locatable from yellowbrick.style import palettes from yellowbrick.features.projection", "which draws the scatter plot. super(PCA, self).draw(Xp, y) if self.proj_features:", "maximum values in the target. Only available if the target", "efficient `randomized` solver is enabled. colorbar : bool, default: True", "= iris.target >>> pca_decomposition(X, y, colors=['r', 'g', 'b'], projection=3) \"\"\"", "colors=colors, colormap=colormap, projection=projection, alpha=alpha, colorbar=colorbar, **kwargs ) # Data Parameters", "or greater \" \"please upgrade matplotlib or set heatmap=False on", "instances as or a list of colors to color each", "Imports ########################################################################## # NOTE: must import mplot3d to load the", "the heatmap below scatter plot. \"\"\" if self._lax is None:", "practice to scale the data array ``X`` before applying a", "to specify the colors of the points. Returns ------- self.ax", "uax(self): \"\"\" The axes of the colorbar, bottom of scatter", "must be either 2 or 3\") return self.ax def finalize(self,", "by sorted class index. These names act as a label", ">>> pca_decomposition(X, y, colors=['r', 'g', 'b'], projection=3) \"\"\" # Instantiate", "No heatmap can be drawn with 3d plots as they", "decomposition in either 2D or 3D space as a scatter", "in the principal components. Also draws a colorbar for readability", "for readability purpose. The heatmap is accessible using lax property", "1]) for i in range(self.pca_components_.shape[1]): self.ax.plot( [0, x_vector[i] * max_x],", "default: 2 The number of axes to project into, either", "import NotFittedError ########################################################################## # 2D and 3D PCA Visualizer ##########################################################################", "axes will be used (or generated if required). features :", "features=features, classes=classes, colors=colors, colormap=colormap, projection=projection, alpha=alpha, colorbar=colorbar, **kwargs ) #", "default: None The names of the features specified by the", "specified by the columns of the input dataset. This length", "per #615 spec im = self.lax.imshow( self.pca_components_, interpolation=\"none\", cmap=self.colormap, aspect=\"auto\",", "self.pca_transformer.fit(X) self.pca_components_ = self.pca_transformer.named_steps[\"pca\"].components_ return self def transform(self, X, y=None,", "a user passes feature names in, those features are used.", "= self.pca_transformer.named_steps[\"pca\"].components_ return self def transform(self, X, y=None, **kwargs): \"\"\"", "= None, None # No heatmap can be drawn with", "m features. y : ndarray or Series of length n", "``self.heatmap`` is ``False`` or ``None``. Parameters ---------- divider: AxesDivider An", "shape (n,), optional The target, used to specify the colors", "shape (n_features, n_components) This tells about the magnitude of each", "__init__( self, ax=None, features=None, classes=None, scale=True, projection=2, proj_features=False, colors=None, colormap=None,", "before applying a PC decomposition. Variable scaling can be controlled", "y) >>> visualizer.show() \"\"\" def __init__( self, ax=None, features=None, classes=None,", "or DataFrame of shape n x m Returns a new", "ensures that a colorbar is drawn when target is #", "of the heatmap below scatter plot. \"\"\" if self._lax is", "feature names in, those features are used. Otherwise the columns", "self for use in Pipelines. \"\"\" # Call super fit", "in X. If a user passes feature names in, those", "names act as a label encoder for the legend, identifying", "super(PCA, self).layout(divider) if self.heatmap: # Axes for colorbar(for heatmap). if", "draw which draws the scatter plot. super(PCA, self).draw(Xp, y) if", "in the pricipal components. This is primarily used to draw", "the new axes for the colorbar and heatmap if divider", "3D PCA Visualizer ########################################################################## class PCA(ProjectionVisualizer): \"\"\" Produce a two", "[0, z_vector[i] * max_z], color=\"r\", ) self.ax.text( x_vector[i] * max_x", "values. Returns ------- self : visualizer Returns self for use", "########################################################################## def pca_decomposition( X, y=None, ax=None, features=None, classes=None, scale=True, projection=2,", "the colorbar and heatmap if divider is None: divider =", "project the features in the projected space. If True the", "May 23 18:34:27 2017 -0400 # # Copyright (C) 2017", "ratio. Does not modify the axes or the layout if", "parameter is only used in the discrete target type case", "self.pca_components_[2] max_z = max(Xp[:, 1]) for i in range(self.pca_components_.shape[1]): self.ax.plot(", "is discrete. This is guaranteed to be strings even if", "def transform(self, X, y=None, **kwargs): \"\"\" Calls the internal `transform`", "colormap : string or cmap, default: None The colormap used", "of the data array ``X`` projected onto its largest sequential", "points that represented the decomposition, `pca_features_`, of the original features,", "components to extract is lower than 80% of the smallest", ": (min y, max y) A tuple that describes the", "used to compute the number of colors needed for each", ") plt.colorbar( im, cax=self.uax, orientation=\"horizontal\", ticks=[self.pca_components_.min(), 0, self.pca_components_.max()], ) return", "are passed to the base class and may influence the", "draw) visualizer.fit(X, y) visualizer.transform(X, y) if show: visualizer.show() else: visualizer.finalize()", "self.lax.imshow( self.pca_components_, interpolation=\"none\", cmap=self.colormap, aspect=\"auto\", ) plt.colorbar( im, cax=self.uax, orientation=\"horizontal\",", "1.05, y_vector[i] * max_y * 1.05, self.features_[i], color=\"r\", ) elif", "ordered colormap in the sequential case. If not enough colors", "i in range(self.pca_components_.shape[1]): self.ax.plot( [0, x_vector[i] * max_x], [0, y_vector[i]", "ndarray, shape (n_features,) The names of the features discovered or", "is used to compute the number of colors needed for", "\"\"\" # Call super fit to compute features, classes, colors,", ": bool, default: True Boolean that indicates if user wants", ": ndarray or DataFrame of shape n x m Returns", "# Call to super class ensures that a colorbar is", "Plots a scatterplot of points that represented the decomposition, `pca_features_`,", "random state on this solver. If the input X is", "None, None # No heatmap can be drawn with 3d", "# Returns the visualizer object. return visualizer # Alias for", "draw the biplots. classes_ : ndarray, shape (n_classes,) The class", "are used. Otherwise the columns of a DataFrame are used", "None is passed in, the current axes will be used", "self._uax is None: self._uax = divider.append_axes(\"bottom\", size=\"10%\", pad=0.7) # Axes", "to create a sequential color map based on the range", "the projection of features in the transformed space. Parameters ----------", "y_vector[i] * max_y * 1.05, self.features_[i], color=\"r\", ) elif self.projection", "or 3D space as a scatter plot. Parameters ---------- X", "the component. Parameters ---------- Xp : array-like of shape (n,", "of target or class values. Returns ------- Xp : ndarray", "number of unique values in y, otherwise an exception is", "scikit-yb developers # For license information, see LICENSE.txt # #", "features_ : ndarray, shape (n_features,) The names of the features", "divider = make_axes_locatable(self.ax) # Call to super class ensures that", "* 1.05, y_vector[i] * max_y * 1.05, self.features_[i], color=\"r\", )", "the input dataset. This length of this list must match", "projection : int or string, default: 2 The number of", "of the scatter plot. The colobar axes is accessible using", "components. Also draws a colorbar for readability purpose. The heatmap", "2.0.2 or greater \" \"please upgrade matplotlib or set heatmap=False", "from yellowbrick.features.projection import ProjectionVisualizer from yellowbrick.exceptions import YellowbrickValueError, NotFitted from", "target. alpha : float, default: 0.75 Specify a transparency where", "projection)``. \"\"\" try: Xp = self.pca_transformer.transform(X) self.draw(Xp, y) return Xp", "max_x, dy=y_vector[i] * max_y, color=\"r\", head_width=0.05, width=0.005, ) self.ax.text( x_vector[i]", "in range(self.pca_components_.shape[1]): self.ax.arrow( x=0, y=0, dx=x_vector[i] * max_x, dy=y_vector[i] *", "in the continuous case it is used to create a", "Visualizers. Attributes ---------- pca_components_ : ndarray, shape (n_features, n_components) This", "Tue May 23 18:34:27 2017 -0400 # # Copyright (C)", "3) The matrix produced by the ``transform()`` method. y :", "colorbar=colorbar, heatmap=heatmap, **kwargs ) # Fit and transform the visualizer", "colorbar\") return self._uax @property def lax(self): \"\"\" The axes of", "etc. super(PCA, self).fit(X=X, y=y, **kwargs) self.pca_transformer.fit(X) self.pca_components_ = self.pca_transformer.named_steps[\"pca\"].components_ return", "or tuple, default: None A single color to plot all", "of shape ``(len(X), projection)``. Parameters ---------- X : ndarray or", "---------- Xp : array-like of shape (n, 2) or (n,", "self : visualizer Returns self for use in Pipelines. \"\"\"", "more efficient `randomized` solver is enabled. colorbar : bool, default:", "calls ``finalize()`` kwargs : dict Keyword arguments that are passed", "access or modify data in X. If a user passes", "the colorbar, bottom of scatter plot. This is the colorbar", "of the scikit-learn PCA transformer, which performs a dimensionality reduction", "default: None The class labels for each class in y,", "of shape n x m A matrix of n instances", "or set heatmap=False on the visualizer\" ) ) # Create", "the decomposition, `pca_features_`, of the original features, `X`, projected into", "identifying integer classes or renaming string labels. If omitted, the", "please ensure a 3d axes is passed to the visualizer,", "): \"\"\" Produce a two or three dimensional principal component", "$ \"\"\" Decomposition based feature visualization with PCA. \"\"\" ##########################################################################", "and colorbar using uax property. show : bool, default: True", "NOTE: must import mplot3d to load the 3D projection import", "X, y=None, ax=None, features=None, classes=None, scale=True, projection=2, proj_features=False, colors=None, colormap=None,", "does not have an axes for heatmap\") return self._lax def", "individual colors. In the discrete case it is used to", "def pca_decomposition( X, y=None, ax=None, features=None, classes=None, scale=True, projection=2, proj_features=False,", "classes=None, scale=True, projection=2, proj_features=False, colors=None, colormap=None, alpha=0.75, random_state=None, colorbar=True, heatmap=False,", "was drawn on. \"\"\" x_vector = self.pca_components_[0] y_vector = self.pca_components_[1]", "self.projection == 3: self.ax.set_zlabel(\"$PC_3$\") if self.heatmap == True: self.lax.set_xticks(np.arange(-0.5, len(self.features_)))", "for colorbar and heatmap, adding new axes for the heatmap", "is raised. This parameter is only used in the discrete", "to the component. Parameters ---------- Xp : array-like of shape", "self.heatmap: # Axes for colorbar(for heatmap). if self._uax is None:", "max_x = max(Xp[:, 0]) max_y = max(Xp[:, 1]) if self.projection", "is passed in, the current axes will be used (or", "either 2 or 3 dimensions. If 2 dimensions are selected,", "to pcolormesh instead of imshow per #615 spec im =", "TODO: change to pcolormesh instead of imshow per #615 spec", "the pricipal components. This is primarily used to draw the", "\"\"\" Creates the layout for colorbar and heatmap, adding new", "plot. super(PCA, self).draw(Xp, y) if self.proj_features: # Draws projection features", "PC decomposition. Variable scaling can be controlled using the ``scale``", "current axes will be used (or generated if required). features", "Axes for colorbar(for heatmap). if self._uax is None: self._uax =", "that the scatter plot was drawn on. \"\"\" # Call", "axes of the heatmap below scatter plot. \"\"\" if self._lax", "raised. This parameter is only used in the discrete target", "visualizer does not have an axes for heatmap\") return self._lax", "scatter plot. \"\"\" if self._uax is None: raise AttributeError(\"This visualizer", "can be controlled using the ``scale`` argument. Parameters ---------- ax", "self.features_, rotation=90, fontsize=12, minor=True ) self.lax.set_yticks(np.arange(0.5, 2)) self.lax.set_yticklabels([\"$PC_1$\", \"$PC_2$\"], va=\"bottom\",", "y): \"\"\" Plots a scatterplot of points that represented the", "will be similar to a biplot. colors : list or", "of colors to color each instance according to its class", "visualization with PCA. # # Author: <NAME> # Author: <NAME>", "object of transformed features of shape ``(len(X), projection)``. \"\"\" try:", "heatmap=heatmap, **kwargs ) # Fit and transform the visualizer (calls", "plot was drawn on. \"\"\" # Call to super draw", "\"\"\" Draw the projection of features in the transformed space.", "\"heatmap and colorbar are not compatible with 3d projections\" )", "projection)``. Parameters ---------- X : ndarray or DataFrame of shape", "a list of colors to color each instance according to", "of shape ``(len(X), projection)``. \"\"\" try: Xp = self.pca_transformer.transform(X) self.draw(Xp,", "of the input dataset. This length of this list must", "mplot3d to load the 3D projection import numpy as np", "colorbar and heatmap, adding new axes for the heatmap if", "array or series of target or class values. Returns -------", "the decomposition in either 2D or 3D space as a", "in, the current axes will be used (or generated if", "or 3 dimensions. If 2 dimensions are selected, a colorbar", "visualizer, otherwise one will be created using the current figure.", "values in y, otherwise an exception is raised. This parameter", "of the colorbar, bottom of scatter plot. This is the", "raise YellowbrickValueError( ( \"heatmap requires matplotlib 2.0.2 or greater \"", "if show: visualizer.show() else: visualizer.finalize() # Returns the visualizer object.", "3d. To plot 3d plots with matplotlib, please ensure a", "# Call to super draw which draws the scatter plot.", "nor ``clear_figure``. If False, simply calls ``finalize()`` kwargs : dict", "the target. alpha : float, default: 0.75 Specify a transparency", "one will be created using the current figure. proj_features :", "which performs a dimensionality reduction on the input features ``X``.", "projected into either 2 or 3 dimensions. If 2 dimensions", "in the discrete target type case and is ignored otherwise.", "the internal `transform` method of the scikit-learn PCA transformer, which", "Decomposition based feature visualization with PCA. \"\"\" ########################################################################## ## Imports", "= divider.append_axes(\"bottom\", size=\"10%\", pad=0.7) # Axes for heatmap if self._lax", "instead of imshow per #615 spec im = self.lax.imshow( self.pca_components_,", "colors, etc. super(PCA, self).fit(X=X, y=y, **kwargs) self.pca_transformer.fit(X) self.pca_components_ = self.pca_transformer.named_steps[\"pca\"].components_", "# Draws projection features in transformed space. self._draw_projection_features(Xp, y) if", "omitted, the class labels will be taken from the unique", "max_z], color=\"r\", ) self.ax.text( x_vector[i] * max_x * 1.05, y_vector[i]", "DataFrame of shape n x m Returns a new array-like", "are selected, a colorbar and heatmap can also be optionally", "space. If True the plot will be similar to a", "from sklearn.exceptions import NotFittedError ########################################################################## # 2D and 3D PCA", "either 2D or 3D space as a scatter plot. Parameters", "renaming string labels. If omitted, the class labels will be", "max_z * 1.05, self.features_[i], color=\"r\", ) else: raise YellowbrickValueError(\"Projection dimensions", "visualizer.show() \"\"\" def __init__( self, ax=None, features=None, classes=None, scale=True, projection=2,", "drawn when target is # continuous. super(PCA, self).layout(divider) if self.heatmap:", "list, default: None The class labels for each class in", "optional (default None) This parameter sets the random state on", "for colorbar\") return self._uax @property def lax(self): \"\"\" The axes", "Otherwise the columns of a DataFrame are used or just", "the aspect ratio. Does not modify the axes or the", "or 3\") return self.ax def finalize(self, **kwargs): \"\"\" Draws the", "is continuous. Examples -------- >>> from sklearn import datasets >>>", "shape (n_features,) The names of the features discovered or used", "features in the transformed space. Parameters ---------- Xp : array-like", "points. Returns ------- self.ax : matplotlib Axes object Returns the", "class and in the continuous case it is used to", "= max(Xp[:, 0]) max_y = max(Xp[:, 1]) if self.projection ==", "def lax(self): \"\"\" The axes of the heatmap below scatter", "\"\"\" Produce a two or three dimensional principal component plot", "required). features : list, default: None The names of the", "plot. Parameters ---------- X : ndarray or DataFrame of shape", "mpl_toolkits.axes_grid1 import make_axes_locatable from yellowbrick.style import palettes from yellowbrick.features.projection import", "Pipelines. \"\"\" # Call super fit to compute features, classes,", "the length of this list must match the number of", "pad=0.7) # Axes for heatmap if self._lax is None: self._lax", "or three dimensional principal component plot of a data array", "and 3D PCA Visualizer ########################################################################## class PCA(ProjectionVisualizer): \"\"\" Produce a", "# Visual Parameters self.heatmap = heatmap self._uax, self._lax = None,", "length n An array or series of target or class", "colorbar is drawn when target is # continuous. super(PCA, self).layout(divider)", "fontsize=10) self.fig.tight_layout() ########################################################################## ## Quick Method ########################################################################## def pca_decomposition( X,", "will be created using the current figure. proj_features : bool,", "the PCA transformer, transforms the data in X, then draws", "x m Returns a new array-like object of transformed features", "------- self.ax : matplotlib Axes object Returns the axes that", "features in the projected space. If True the plot will", "minimum and maximum values in the target. Only available if", ": ndarray or Series of length n An array or", "self._uax = divider.append_axes(\"bottom\", size=\"10%\", pad=0.7) # Axes for heatmap if", "if self._lax is None: raise AttributeError(\"This visualizer does not have", "Add a heatmap showing contribution of each feature in the", "controlled using the ``scale`` argument. Parameters ---------- X : ndarray", "Pipeline from sklearn.decomposition import PCA as PCATransformer from sklearn.preprocessing import", "biplots. classes_ : ndarray, shape (n_classes,) The class labels that", "larger than 500x500 and the number of components to extract", "per class are specified then the colors are treated as", "\"transform\") def draw(self, Xp, y): \"\"\" Plots a scatterplot of", "an index to access or modify data in X. If", "<NAME> # Author: <NAME> # Author: <NAME> # Created: Tue", "(or generated if required). features : list, default: None The", "compute the number of colors needed for each class and", "1 is completely opaque and 0 is completely transparent. This", "axes # division. if self.projection == 3 and self.heatmap: raise", "as specified by the keyword arguments. \"\"\" super(PCA, self).finalize() self.ax.set_title(\"Principal", "this list must match the number of unique values in", "used to create the individual colors. In the discrete case", "number of axes to project into, either 2d or 3d.", "each class in y, ordered by sorted class index. These", "or the layout if ``self.heatmap`` is ``False`` or ``None``. Parameters", "scale self.proj_features = proj_features # Create the PCA transformer self.pca_transformer", "PCA. # # Author: <NAME> # Author: <NAME> # Author:", "transparency where 1 is completely opaque and 0 is completely", "be optionally included to show the magnitude of each feature", "# For license information, see LICENSE.txt # # ID: pca.py", "y : ndarray or Series of length n An array", "needed for each class and in the continuous case it", "and colorbar as specified by the keyword arguments. \"\"\" super(PCA,", "performs a dimensionality reduction on the input features ``X``. Next", "colorbar=True, heatmap=False, show=True, **kwargs ): \"\"\" Produce a two or", "heatmap, and colorbar as specified by the keyword arguments. \"\"\"", "return self._uax @property def lax(self): \"\"\" The axes of the", "y) if show: visualizer.show() else: visualizer.finalize() # Returns the visualizer", "Specify a transparency where 1 is completely opaque and 0", "NotFittedError: raise NotFitted.from_estimator(self, \"transform\") def draw(self, Xp, y): \"\"\" Plots", "True If True, calls ``show()``, which in turn calls ``plt.show()``", "make_axes_locatable from yellowbrick.style import palettes from yellowbrick.features.projection import ProjectionVisualizer from", "ax=ax, features=features, classes=classes, colors=colors, colormap=colormap, projection=projection, alpha=alpha, colorbar=colorbar, **kwargs )", "axes to plot the figure on. If None is passed", "the input features ``X``. Next calls the ``draw`` method of", "space. self._draw_projection_features(Xp, y) if self.projection == 2: if self.heatmap: if", "For license information, see LICENSE.txt # # ID: pca.py []", "based feature visualization with PCA. \"\"\" ########################################################################## ## Imports ##########################################################################", "case or as an ordered colormap in the sequential case.", "can be controlled using the ``scale`` argument. Parameters ---------- X", "in either 2D or 3D space as a scatter plot.", "used to create a sequential color map based on the", "class values. Returns ------- self : visualizer Returns self for", "accessible using lax property and colorbar using uax property. show", "y : array-like of shape (n,), optional The target, used", "names of the features specified by the columns of the", "sklearn import datasets >>> iris = datasets.load_iris() >>> X =", "or as an ordered colormap in the sequential case. If", "Author: <NAME> # Created: Tue May 23 18:34:27 2017 -0400", "<NAME> # Author: <NAME> # Created: Tue May 23 18:34:27", "discovered or used in the visualizer that can be used", "largest sequential principal components. It is common practice to scale", "produced by the ``transform()`` method. y : array-like of shape", "cmap, default: None The colormap used to create the individual", "the smallest dimension of the data, then the more efficient", "to be passed among all layout calls. \"\"\" # Ensure", "for heatmap and not for the scatter plot. \"\"\" if", "Returns ------- self : visualizer Returns self for use in", "None # No heatmap can be drawn with 3d plots", ") # Fit and transform the visualizer (calls draw) visualizer.fit(X,", "act as a label encoder for the legend, identifying integer", "\"\"\" if self._lax is None: raise AttributeError(\"This visualizer does not", "visualizer Returns self for use in Pipelines. \"\"\" # Call", "\"\"\" x_vector = self.pca_components_[0] y_vector = self.pca_components_[1] max_x = max(Xp[:,", "to plot all instances as or a list of colors", "axes for the colorbar and heatmap if divider is None:", "or ``None``. Parameters ---------- divider: AxesDivider An AxesDivider to be", "features. y : ndarray or Series of length n An", "if not self.colormap: self.colormap = palettes.DEFAULT_SEQUENCE # TODO: change to", "of unique values in y, otherwise an exception is raised.", "a sequential color map based on the range of the", "than 80% of the smallest dimension of the data, then", "to scale the data array ``X`` before applying a PC", "cycle. colormap : string or cmap, default: None The colormap", "This property makes densely clustered points more visible. random_state :", "divider is None: divider = make_axes_locatable(self.ax) # Call to super", "``(len(X), projection)``. \"\"\" try: Xp = self.pca_transformer.transform(X) self.draw(Xp, y) return", "colors=None, colormap=None, alpha=0.75, random_state=None, colorbar=True, heatmap=False, show=True, **kwargs ): \"\"\"", "the visualizer that can be used as an index to", "self).finalize() self.ax.set_title(\"Principal Component Plot\") self.ax.set_xlabel(\"$PC_1$\") self.ax.set_ylabel(\"$PC_2$\") if self.projection == 3:", "those features are used. Otherwise the columns of a DataFrame", "the number of unique values in y, otherwise an exception", "represented the decomposition, `pca_features_`, of the original features, `X`, projected", "None: self._uax = divider.append_axes(\"bottom\", size=\"10%\", pad=0.7) # Axes for heatmap", "the ``scale`` argument. Parameters ---------- ax : matplotlib Axes, default:", "self).draw(Xp, y) if self.proj_features: # Draws projection features in transformed", "call ``plt.savefig`` from this signature, nor ``clear_figure``. If False, simply", "to compute the number of colors needed for each class", "* max_y * 1.05, self.features_[i], color=\"r\", ) elif self.projection ==", "``fit()``. classes : list, default: None The class labels for", "dimensionality reduction on the input features ``X``. Next calls the", "case it is used to create a sequential color map", "class and may influence the visualization as defined in other", "the ``scale`` argument. Parameters ---------- X : ndarray or DataFrame", "\"$PC_2$\"], va=\"bottom\", fontsize=10) self.fig.tight_layout() ########################################################################## ## Quick Method ########################################################################## def", "Parameters ---------- ax : matplotlib Axes, default: None The axes", "Parameters ---------- divider: AxesDivider An AxesDivider to be passed among", "the keyword arguments. \"\"\" super(PCA, self).finalize() self.ax.set_title(\"Principal Component Plot\") self.ax.set_xlabel(\"$PC_1$\")", "y, max y) A tuple that describes the minimum and", "transformed features of shape ``(len(X), projection)``. Parameters ---------- X :", "do not have permit axes # division. if self.projection ==", "scikit-learn PCA transformer, which performs a dimensionality reduction on the", "then draws the decomposition in either 2D or 3D space", "The colobar axes is accessible using the cax property. heatmap", "Component Plot\") self.ax.set_xlabel(\"$PC_1$\") self.ax.set_ylabel(\"$PC_2$\") if self.projection == 3: self.ax.set_zlabel(\"$PC_3$\") if", "super(PCA, self).fit(X=X, y=y, **kwargs) self.pca_transformer.fit(X) self.pca_components_ = self.pca_transformer.named_steps[\"pca\"].components_ return self", "colors : list or tuple, default: None A single color", "to load the 3D projection import numpy as np import", "orientation=\"horizontal\", ticks=[self.pca_components_.min(), 0, self.pca_components_.max()], ) return self.ax def _draw_projection_features(self, Xp,", "``None``. Parameters ---------- divider: AxesDivider An AxesDivider to be passed", "lower than 80% of the smallest dimension of the data,", "ax=None, features=None, classes=None, scale=True, projection=2, proj_features=False, colors=None, colormap=None, alpha=0.75, random_state=None,", "case it is used to compute the number of colors", "have permit axes # division. if self.projection == 3 and", "and heatmap, adding new axes for the heatmap if necessary", "the columns of the input dataset. This length of this", "layout(self, divider=None): \"\"\" Creates the layout for colorbar and heatmap,", "the layout for colorbar and heatmap, adding new axes for", "not enough colors per class are specified then the colors", "matplotlib.pyplot as plt from mpl_toolkits.axes_grid1 import make_axes_locatable from yellowbrick.style import", "a 3d axes is passed to the visualizer, otherwise one", "integer classes or renaming string labels. If omitted, the class", "list of colors to color each instance according to its", "data array ``X`` before applying a PC decomposition. Variable scaling", "pca_components_ : ndarray, shape (n_features, n_components) This tells about the", "return self.ax def _draw_projection_features(self, Xp, y): \"\"\" Draw the projection", "plots as they do not have permit axes # division.", "the data array ``X`` projected onto its largest sequential principal", "of length n An array or series of target or", "of imshow per #615 spec im = self.lax.imshow( self.pca_components_, interpolation=\"none\",", "3 and self.heatmap: raise YellowbrickValueError( \"heatmap and colorbar are not", "a data array projected onto its largest sequential principal components.", "target_type is \"continous\" draw a colorbar to the right of", "sequential case. If not enough colors per class are specified", ": list, default: None The class labels for each class", "in the discrete case or as an ordered colormap in", "y=y, **kwargs) self.pca_transformer.fit(X) self.pca_components_ = self.pca_transformer.named_steps[\"pca\"].components_ return self def transform(self,", "the plot will be similar to a biplot. colors :", "colors=['r', 'g', 'b'], projection=3) \"\"\" # Instantiate the visualizer visualizer", "`pca_features_`, of the original features, `X`, projected into either 2", "sklearn.pipeline import Pipeline from sklearn.decomposition import PCA as PCATransformer from", "PCA(ProjectionVisualizer): \"\"\" Produce a two or three dimensional principal component", "have an axes for heatmap\") return self._lax def layout(self, divider=None):", "using the current figure. proj_features : bool, default: False Boolean", "not have an axes for colorbar\") return self._uax @property def", "self._lax = divider.append_axes(\"bottom\", size=\"15%\", pad=0.5) def fit(self, X, y=None, **kwargs):", "None The class labels for each class in y, ordered", "the title, labels, legends, heatmap, and colorbar as specified by", "the number of components to extract is lower than 80%", "fontsize=12, minor=True ) self.lax.set_yticks(np.arange(0.5, 2)) self.lax.set_yticklabels([\"$PC_1$\", \"$PC_2$\"], va=\"bottom\", fontsize=10) self.fig.tight_layout()", "as PCATransformer from sklearn.preprocessing import StandardScaler from sklearn.exceptions import NotFittedError", "input X is larger than 500x500 and the number of", "exception will be raised on ``fit()``. classes : list, default:", "be raised on ``fit()``. classes : list, default: None The", "StandardScaler(with_std=self.scale)), (\"pca\", PCATransformer(self.projection, random_state=random_state)), ] ) self.alpha = alpha #", "property. show : bool, default: True If True, calls ``show()``,", "a colorbar to the right of the scatter plot. The", "self.ax.set_title(\"Principal Component Plot\") self.ax.set_xlabel(\"$PC_1$\") self.ax.set_ylabel(\"$PC_2$\") if self.projection == 3: self.ax.set_zlabel(\"$PC_3$\")", "0, self.pca_components_.max()], ) return self.ax def _draw_projection_features(self, Xp, y): \"\"\"", "if self._lax is None: self._lax = divider.append_axes(\"bottom\", size=\"15%\", pad=0.5) def", "argument. Parameters ---------- X : ndarray or DataFrame of shape", "passed to the visualizer, otherwise one will be created using", "index. These names act as a label encoder for the", "principal components. It is common practice to scale the data", "dx=x_vector[i] * max_x, dy=y_vector[i] * max_y, color=\"r\", head_width=0.05, width=0.005, )", "plt from mpl_toolkits.axes_grid1 import make_axes_locatable from yellowbrick.style import palettes from", "to compute features, classes, colors, etc. super(PCA, self).fit(X=X, y=y, **kwargs)", "solver is enabled. colorbar : bool, default: True If the", "Also draws a colorbar for readability purpose. The heatmap is", "is accessible using lax property and colorbar using uax property.", "heatmap). if self._uax is None: self._uax = divider.append_axes(\"bottom\", size=\"10%\", pad=0.7)", "max_x], [0, y_vector[i] * max_y], [0, z_vector[i] * max_z], color=\"r\",", "The axes to plot the figure on. If None is", "cax=self.uax, orientation=\"horizontal\", ticks=[self.pca_components_.min(), 0, self.pca_components_.max()], ) return self.ax def _draw_projection_features(self,", "simply calls ``finalize()`` kwargs : dict Keyword arguments that are", "2D or 3D space as a scatter plot. Parameters ----------", "An array or series of target or class values. ax", "label encoder for the legend, identifying integer classes or renaming", "or string, default: 2 The number of axes to project", "import mplot3d to load the 3D projection import numpy as", "is used to create a sequential color map based on", "plots with matplotlib, please ensure a 3d axes is passed", "scatter plot. Parameters ---------- X : ndarray or DataFrame of", "In the discrete case it is used to compute the", "plot. The colobar axes is accessible using the cax property.", "If True, calls ``show()``, which in turn calls ``plt.show()`` however", "3d axes is passed to the visualizer, otherwise one will", "sequential principal components. It is common practice to scale the", "self.pca_transformer.named_steps[\"pca\"].components_ return self def transform(self, X, y=None, **kwargs): \"\"\" Calls", "is completely opaque and 0 is completely transparent. This property", "the user wants to project the features in the projected", "color each instance according to its class in the discrete", "minor=True) self.lax.set_xticklabels( self.features_, rotation=90, fontsize=12, minor=True ) self.lax.set_yticks(np.arange(0.5, 2)) self.lax.set_yticklabels([\"$PC_1$\",", "of the data, then the more efficient `randomized` solver is", "array-like object of transformed features of shape ``(len(X), projection)``. \"\"\"", "or renaming string labels. If omitted, the class labels will", "transparent. This property makes densely clustered points more visible. random_state", "self.projection == 2: for i in range(self.pca_components_.shape[1]): self.ax.arrow( x=0, y=0,", "extract is lower than 80% of the smallest dimension of", "import make_axes_locatable from yellowbrick.style import palettes from yellowbrick.features.projection import ProjectionVisualizer", "used in the discrete target type case and is ignored", "self._uax is None: raise AttributeError(\"This visualizer does not have an", "X : ndarray or DataFrame of shape n x m", "pcolormesh instead of imshow per #615 spec im = self.lax.imshow(", "type is discrete. This is guaranteed to be strings even", "# NOTE: must import mplot3d to load the 3D projection", "self.lax.set_xticks(np.arange(0, len(self.features_)), minor=True) self.lax.set_xticklabels( self.features_, rotation=90, fontsize=12, minor=True ) self.lax.set_yticks(np.arange(0.5,", "bottom of scatter plot. This is the colorbar for heatmap", "X, y=None, **kwargs): \"\"\" Fits the PCA transformer, transforms the", "the visualization as defined in other Visualizers. Attributes ---------- pca_components_", "for colorbar(for heatmap). if self._uax is None: self._uax = divider.append_axes(\"bottom\",", "class in y, ordered by sorted class index. These names", "* 1.05, z_vector[i] * max_z * 1.05, self.features_[i], color=\"r\", )", "yellowbrick.features.projection import ProjectionVisualizer from yellowbrick.exceptions import YellowbrickValueError, NotFitted from sklearn.pipeline", "sklearn.preprocessing import StandardScaler from sklearn.exceptions import NotFittedError ########################################################################## # 2D", "z_vector = self.pca_components_[2] max_z = max(Xp[:, 1]) for i in", "raised on ``fit()``. classes : list, default: None The class", "will be used (or generated if required). features : list,", "included to show the magnitude of each feature value to", "target type is continuous. Examples -------- >>> from sklearn import", "super draw which draws the scatter plot. super(PCA, self).draw(Xp, y)", "y=None, **kwargs): \"\"\" Calls the internal `transform` method of the", ") else: raise YellowbrickValueError(\"Projection dimensions must be either 2 or", "* max_y], [0, z_vector[i] * max_z], color=\"r\", ) self.ax.text( x_vector[i]", "drawn on. \"\"\" # Call to super draw which draws", "can be drawn with 3d plots as they do not", "of each feature in the pricipal components. This is primarily", "PCA Visualizer ########################################################################## class PCA(ProjectionVisualizer): \"\"\" Produce a two or", "permit axes # division. if self.projection == 3 and self.heatmap:", "self.lax.set_xticks(np.arange(-0.5, len(self.features_))) self.lax.set_xticklabels([]) # Makes the labels centered. self.lax.set_xticks(np.arange(0, len(self.features_)),", "class index. These names act as a label encoder for", "draws a colorbar for readability purpose. The heatmap is accessible", "* max_x * 1.05, y_vector[i] * max_y * 1.05, z_vector[i]", "draws the decomposition in either 2D or 3D space as", "if ``self.heatmap`` is ``False`` or ``None``. Parameters ---------- divider: AxesDivider", "Create the new axes for the colorbar and heatmap if", "[] <EMAIL> $ \"\"\" Decomposition based feature visualization with PCA.", "if self.projection == 2: if self.heatmap: if not self.colormap: self.colormap", "of the Yellowbrick visualizer, finally returning a new array of", "series of target or class values. Returns ------- Xp :", "self.ax.text( x_vector[i] * max_x * 1.05, y_vector[i] * max_y *", "3d plots with matplotlib, please ensure a 3d axes is", "2: if self.heatmap: if not self.colormap: self.colormap = palettes.DEFAULT_SEQUENCE #", "sklearn.exceptions import NotFittedError ########################################################################## # 2D and 3D PCA Visualizer", "a new array-like object of transformed features of shape ``(len(X),", "ndarray or Series of length n An array or series", "va=\"bottom\", fontsize=10) self.fig.tight_layout() ########################################################################## ## Quick Method ########################################################################## def pca_decomposition(", "Method ########################################################################## def pca_decomposition( X, y=None, ax=None, features=None, classes=None, scale=True,", "colors to color each instance according to its class in", "max_x * 1.05, y_vector[i] * max_y * 1.05, z_vector[i] *", "to plot the figure on. If None is passed in,", "component plot of a data array projected onto its largest", "``clear_figure``. If False, simply calls ``finalize()`` kwargs : dict Keyword", "None, optional (default None) This parameter sets the random state", "each instance according to its class in the discrete case", "Series of length n An array or series of target", "for the legend, identifying integer classes or renaming string labels.", "from yellowbrick.exceptions import YellowbrickValueError, NotFitted from sklearn.pipeline import Pipeline from", "in y, otherwise an exception is raised. This parameter is", "showing contribution of each feature in the principal components. Also", "max(Xp[:, 0]) max_y = max(Xp[:, 1]) if self.projection == 2:", "other Visualizers. Attributes ---------- pca_components_ : ndarray, shape (n_features, n_components)", "self.draw(Xp, y) return Xp except NotFittedError: raise NotFitted.from_estimator(self, \"transform\") def", "else: visualizer.finalize() # Returns the visualizer object. return visualizer #", "original features, `X`, projected into either 2 or 3 dimensions.", "* max_z * 1.05, self.features_[i], color=\"r\", ) else: raise YellowbrickValueError(\"Projection", "be controlled using the ``scale`` argument. Parameters ---------- X :", "based feature visualization with PCA. # # Author: <NAME> #", "= self.pca_components_[0] y_vector = self.pca_components_[1] max_x = max(Xp[:, 0]) max_y", "super(PCA, self).draw(Xp, y) if self.proj_features: # Draws projection features in", "and heatmap can also be optionally included to show the", "component. Parameters ---------- Xp : array-like of shape (n, 2)", "The names of the features discovered or used in the", "draws the scatter plot. super(PCA, self).draw(Xp, y) if self.proj_features: #", "was drawn on. \"\"\" # Call to super draw which", "using uax property. show : bool, default: True If True,", "import matplotlib.pyplot as plt from mpl_toolkits.axes_grid1 import make_axes_locatable from yellowbrick.style", "# TODO: change to pcolormesh instead of imshow per #615", "base class and may influence the visualization as defined in", "dict Keyword arguments that are passed to the base class", "decomposition. Variable scaling can be controlled using the ``scale`` argument.", "if divider is None: divider = make_axes_locatable(self.ax) # Call to", "3 dimensions. If 2 dimensions are selected, a colorbar and", "of this list must match the number of columns in", "and colorbar are not compatible with 3d projections\" ) @property", "the classes are a different type. features_ : ndarray, shape", "classes : list, default: None The class labels for each", "be drawn with 3d plots as they do not have", "magnitude of each feature in the pricipal components. This is", "and may influence the visualization as defined in other Visualizers.", "to scale data. projection : int or string, default: 2", "self._lax is None: self._lax = divider.append_axes(\"bottom\", size=\"15%\", pad=0.5) def fit(self,", "# 2D and 3D PCA Visualizer ########################################################################## class PCA(ProjectionVisualizer): \"\"\"" ]
[ "bool: return _is_arc_sorted(fsa.get_base()) def has_self_loops(fsa: Fsa) -> bool: return _has_self_loops(fsa.get_base())", "def is_top_sorted(fsa: Fsa) -> bool: return _is_top_sorted(fsa.get_base()) def is_arc_sorted(fsa: Fsa)", "_k2host import _is_acyclic from _k2host import _is_deterministic from _k2host import", "def is_connected(fsa: Fsa) -> bool: return _is_connected(fsa.get_base()) def is_empty(fsa: Fsa)", "_has_self_loops from _k2host import _is_acyclic from _k2host import _is_deterministic from", "bool: return _is_connected(fsa.get_base()) def is_empty(fsa: Fsa) -> bool: return _is_empty(fsa.get_base())", "_k2host import _is_valid from _k2host import _is_top_sorted from _k2host import", "from _k2host import _is_deterministic from _k2host import _is_epsilon_free from _k2host", "-> bool: return _is_top_sorted(fsa.get_base()) def is_arc_sorted(fsa: Fsa) -> bool: return", "Corporation (author: <NAME>) # See ../../../LICENSE for clarification regarding multiple", "Fsa) -> bool: return _is_epsilon_free(fsa.get_base()) def is_connected(fsa: Fsa) -> bool:", "_k2host import _is_empty def is_valid(fsa: Fsa) -> bool: return _is_valid(fsa.get_base())", "return _is_valid(fsa.get_base()) def is_top_sorted(fsa: Fsa) -> bool: return _is_top_sorted(fsa.get_base()) def", "clarification regarding multiple authors import torch from torch.utils.dlpack import to_dlpack", "import _is_arc_sorted from _k2host import _has_self_loops from _k2host import _is_acyclic", "for clarification regarding multiple authors import torch from torch.utils.dlpack import", "# See ../../../LICENSE for clarification regarding multiple authors import torch", "<reponame>Jarvan-Wang/k2<filename>k2/python/host/k2host/properties.py # Copyright (c) 2020 Xiaomi Corporation (author: <NAME>) #", "from _k2host import _is_epsilon_free from _k2host import _is_connected from _k2host", "import _is_acyclic from _k2host import _is_deterministic from _k2host import _is_epsilon_free", "bool: return _is_deterministic(fsa.get_base()) def is_epsilon_free(fsa: Fsa) -> bool: return _is_epsilon_free(fsa.get_base())", "Copyright (c) 2020 Xiaomi Corporation (author: <NAME>) # See ../../../LICENSE", "_is_top_sorted from _k2host import _is_arc_sorted from _k2host import _has_self_loops from", "Fsa) -> bool: return _is_top_sorted(fsa.get_base()) def is_arc_sorted(fsa: Fsa) -> bool:", "import _is_deterministic from _k2host import _is_epsilon_free from _k2host import _is_connected", "_k2host import _is_deterministic from _k2host import _is_epsilon_free from _k2host import", "_is_epsilon_free(fsa.get_base()) def is_connected(fsa: Fsa) -> bool: return _is_connected(fsa.get_base()) def is_empty(fsa:", "from _k2host import _is_top_sorted from _k2host import _is_arc_sorted from _k2host", "regarding multiple authors import torch from torch.utils.dlpack import to_dlpack from", "_is_deterministic from _k2host import _is_epsilon_free from _k2host import _is_connected from", "_is_arc_sorted from _k2host import _has_self_loops from _k2host import _is_acyclic from", "_is_acyclic from _k2host import _is_deterministic from _k2host import _is_epsilon_free from", "Fsa) -> bool: return _is_connected(fsa.get_base()) def is_empty(fsa: Fsa) -> bool:", "torch from torch.utils.dlpack import to_dlpack from .fsa import Fsa from", "-> bool: return _is_deterministic(fsa.get_base()) def is_epsilon_free(fsa: Fsa) -> bool: return", "-> bool: return _is_epsilon_free(fsa.get_base()) def is_connected(fsa: Fsa) -> bool: return", "is_top_sorted(fsa: Fsa) -> bool: return _is_top_sorted(fsa.get_base()) def is_arc_sorted(fsa: Fsa) ->", "return _has_self_loops(fsa.get_base()) def is_acyclic(fsa: Fsa) -> bool: return _is_acyclic(fsa.get_base()) def", "is_epsilon_free(fsa: Fsa) -> bool: return _is_epsilon_free(fsa.get_base()) def is_connected(fsa: Fsa) ->", "-> bool: return _has_self_loops(fsa.get_base()) def is_acyclic(fsa: Fsa) -> bool: return", "_is_epsilon_free from _k2host import _is_connected from _k2host import _is_empty def", "-> bool: return _is_valid(fsa.get_base()) def is_top_sorted(fsa: Fsa) -> bool: return", "from _k2host import _has_self_loops from _k2host import _is_acyclic from _k2host", "bool: return _has_self_loops(fsa.get_base()) def is_acyclic(fsa: Fsa) -> bool: return _is_acyclic(fsa.get_base())", "from .fsa import Fsa from _k2host import _is_valid from _k2host", "_has_self_loops(fsa.get_base()) def is_acyclic(fsa: Fsa) -> bool: return _is_acyclic(fsa.get_base()) def is_deterministic(fsa:", "_is_arc_sorted(fsa.get_base()) def has_self_loops(fsa: Fsa) -> bool: return _has_self_loops(fsa.get_base()) def is_acyclic(fsa:", "_is_connected from _k2host import _is_empty def is_valid(fsa: Fsa) -> bool:", ".fsa import Fsa from _k2host import _is_valid from _k2host import", "-> bool: return _is_connected(fsa.get_base()) def is_empty(fsa: Fsa) -> bool: return", "def is_epsilon_free(fsa: Fsa) -> bool: return _is_epsilon_free(fsa.get_base()) def is_connected(fsa: Fsa)", "torch.utils.dlpack import to_dlpack from .fsa import Fsa from _k2host import", "Fsa) -> bool: return _is_arc_sorted(fsa.get_base()) def has_self_loops(fsa: Fsa) -> bool:", "_is_acyclic(fsa.get_base()) def is_deterministic(fsa: Fsa) -> bool: return _is_deterministic(fsa.get_base()) def is_epsilon_free(fsa:", "Fsa) -> bool: return _has_self_loops(fsa.get_base()) def is_acyclic(fsa: Fsa) -> bool:", "is_valid(fsa: Fsa) -> bool: return _is_valid(fsa.get_base()) def is_top_sorted(fsa: Fsa) ->", "import _is_top_sorted from _k2host import _is_arc_sorted from _k2host import _has_self_loops", "return _is_top_sorted(fsa.get_base()) def is_arc_sorted(fsa: Fsa) -> bool: return _is_arc_sorted(fsa.get_base()) def", "def is_deterministic(fsa: Fsa) -> bool: return _is_deterministic(fsa.get_base()) def is_epsilon_free(fsa: Fsa)", "return _is_epsilon_free(fsa.get_base()) def is_connected(fsa: Fsa) -> bool: return _is_connected(fsa.get_base()) def", "from _k2host import _is_empty def is_valid(fsa: Fsa) -> bool: return", "return _is_arc_sorted(fsa.get_base()) def has_self_loops(fsa: Fsa) -> bool: return _has_self_loops(fsa.get_base()) def", "import _is_epsilon_free from _k2host import _is_connected from _k2host import _is_empty", "_k2host import _is_epsilon_free from _k2host import _is_connected from _k2host import", "def is_arc_sorted(fsa: Fsa) -> bool: return _is_arc_sorted(fsa.get_base()) def has_self_loops(fsa: Fsa)", "authors import torch from torch.utils.dlpack import to_dlpack from .fsa import", "<NAME>) # See ../../../LICENSE for clarification regarding multiple authors import", "multiple authors import torch from torch.utils.dlpack import to_dlpack from .fsa", "has_self_loops(fsa: Fsa) -> bool: return _has_self_loops(fsa.get_base()) def is_acyclic(fsa: Fsa) ->", "return _is_acyclic(fsa.get_base()) def is_deterministic(fsa: Fsa) -> bool: return _is_deterministic(fsa.get_base()) def", "_k2host import _has_self_loops from _k2host import _is_acyclic from _k2host import", "_is_empty def is_valid(fsa: Fsa) -> bool: return _is_valid(fsa.get_base()) def is_top_sorted(fsa:", "def is_valid(fsa: Fsa) -> bool: return _is_valid(fsa.get_base()) def is_top_sorted(fsa: Fsa)", "-> bool: return _is_arc_sorted(fsa.get_base()) def has_self_loops(fsa: Fsa) -> bool: return", "def has_self_loops(fsa: Fsa) -> bool: return _has_self_loops(fsa.get_base()) def is_acyclic(fsa: Fsa)", "(c) 2020 Xiaomi Corporation (author: <NAME>) # See ../../../LICENSE for", "def is_acyclic(fsa: Fsa) -> bool: return _is_acyclic(fsa.get_base()) def is_deterministic(fsa: Fsa)", "from torch.utils.dlpack import to_dlpack from .fsa import Fsa from _k2host", "(author: <NAME>) # See ../../../LICENSE for clarification regarding multiple authors", "is_acyclic(fsa: Fsa) -> bool: return _is_acyclic(fsa.get_base()) def is_deterministic(fsa: Fsa) ->", "import _is_valid from _k2host import _is_top_sorted from _k2host import _is_arc_sorted", "bool: return _is_epsilon_free(fsa.get_base()) def is_connected(fsa: Fsa) -> bool: return _is_connected(fsa.get_base())", "../../../LICENSE for clarification regarding multiple authors import torch from torch.utils.dlpack", "_k2host import _is_connected from _k2host import _is_empty def is_valid(fsa: Fsa)", "import _is_connected from _k2host import _is_empty def is_valid(fsa: Fsa) ->", "bool: return _is_acyclic(fsa.get_base()) def is_deterministic(fsa: Fsa) -> bool: return _is_deterministic(fsa.get_base())", "2020 Xiaomi Corporation (author: <NAME>) # See ../../../LICENSE for clarification", "_is_valid from _k2host import _is_top_sorted from _k2host import _is_arc_sorted from", "_k2host import _is_top_sorted from _k2host import _is_arc_sorted from _k2host import", "from _k2host import _is_connected from _k2host import _is_empty def is_valid(fsa:", "See ../../../LICENSE for clarification regarding multiple authors import torch from", "bool: return _is_top_sorted(fsa.get_base()) def is_arc_sorted(fsa: Fsa) -> bool: return _is_arc_sorted(fsa.get_base())", "import _has_self_loops from _k2host import _is_acyclic from _k2host import _is_deterministic", "_is_deterministic(fsa.get_base()) def is_epsilon_free(fsa: Fsa) -> bool: return _is_epsilon_free(fsa.get_base()) def is_connected(fsa:", "from _k2host import _is_valid from _k2host import _is_top_sorted from _k2host", "is_arc_sorted(fsa: Fsa) -> bool: return _is_arc_sorted(fsa.get_base()) def has_self_loops(fsa: Fsa) ->", "return _is_deterministic(fsa.get_base()) def is_epsilon_free(fsa: Fsa) -> bool: return _is_epsilon_free(fsa.get_base()) def", "_is_valid(fsa.get_base()) def is_top_sorted(fsa: Fsa) -> bool: return _is_top_sorted(fsa.get_base()) def is_arc_sorted(fsa:", "import Fsa from _k2host import _is_valid from _k2host import _is_top_sorted", "-> bool: return _is_acyclic(fsa.get_base()) def is_deterministic(fsa: Fsa) -> bool: return", "_k2host import _is_arc_sorted from _k2host import _has_self_loops from _k2host import", "Xiaomi Corporation (author: <NAME>) # See ../../../LICENSE for clarification regarding", "from _k2host import _is_acyclic from _k2host import _is_deterministic from _k2host", "Fsa) -> bool: return _is_valid(fsa.get_base()) def is_top_sorted(fsa: Fsa) -> bool:", "# Copyright (c) 2020 Xiaomi Corporation (author: <NAME>) # See", "from _k2host import _is_arc_sorted from _k2host import _has_self_loops from _k2host", "bool: return _is_valid(fsa.get_base()) def is_top_sorted(fsa: Fsa) -> bool: return _is_top_sorted(fsa.get_base())", "_is_top_sorted(fsa.get_base()) def is_arc_sorted(fsa: Fsa) -> bool: return _is_arc_sorted(fsa.get_base()) def has_self_loops(fsa:", "Fsa from _k2host import _is_valid from _k2host import _is_top_sorted from", "is_deterministic(fsa: Fsa) -> bool: return _is_deterministic(fsa.get_base()) def is_epsilon_free(fsa: Fsa) ->", "is_connected(fsa: Fsa) -> bool: return _is_connected(fsa.get_base()) def is_empty(fsa: Fsa) ->", "to_dlpack from .fsa import Fsa from _k2host import _is_valid from", "import _is_empty def is_valid(fsa: Fsa) -> bool: return _is_valid(fsa.get_base()) def", "import to_dlpack from .fsa import Fsa from _k2host import _is_valid", "import torch from torch.utils.dlpack import to_dlpack from .fsa import Fsa", "Fsa) -> bool: return _is_acyclic(fsa.get_base()) def is_deterministic(fsa: Fsa) -> bool:", "Fsa) -> bool: return _is_deterministic(fsa.get_base()) def is_epsilon_free(fsa: Fsa) -> bool:" ]
[ "text = Field() voter_count = Field() def __init__(self, text: str,", "PollOption(TelegramStructure): text = Field() voter_count = Field() def __init__(self, text:", "Field() voter_count = Field() def __init__(self, text: str, voter_count: int", "int ): self.text = \\ Field(text, [str]) self.voter_count = \\", "): self.text = \\ Field(text, [str]) self.voter_count = \\ Field(voter_count,", "__init__(self, text: str, voter_count: int ): self.text = \\ Field(text,", "= Field() voter_count = Field() def __init__(self, text: str, voter_count:", "def __init__(self, text: str, voter_count: int ): self.text = \\", "import TelegramStructure, Field class PollOption(TelegramStructure): text = Field() voter_count =", "= Field() def __init__(self, text: str, voter_count: int ): self.text", "self.text = \\ Field(text, [str]) self.voter_count = \\ Field(voter_count, [int])", "text: str, voter_count: int ): self.text = \\ Field(text, [str])", "str, voter_count: int ): self.text = \\ Field(text, [str]) self.voter_count", "class PollOption(TelegramStructure): text = Field() voter_count = Field() def __init__(self,", "Field() def __init__(self, text: str, voter_count: int ): self.text =", "voter_count = Field() def __init__(self, text: str, voter_count: int ):", "voter_count: int ): self.text = \\ Field(text, [str]) self.voter_count =", "TelegramStructure, Field class PollOption(TelegramStructure): text = Field() voter_count = Field()", "from .base import TelegramStructure, Field class PollOption(TelegramStructure): text = Field()", ".base import TelegramStructure, Field class PollOption(TelegramStructure): text = Field() voter_count", "Field class PollOption(TelegramStructure): text = Field() voter_count = Field() def" ]
[ "# SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class", "Default Color Maps from 'matplotlib' (Lite Version)\"\"\" homepage = \"https://github.com/sjmgarnier/viridisLite\"", "OR MIT) from spack import * class RViridislite(RPackage): \"\"\"viridisLite: Default", "Maps from 'matplotlib' (Lite Version)\"\"\" homepage = \"https://github.com/sjmgarnier/viridisLite\" url =", "Version)\"\"\" homepage = \"https://github.com/sjmgarnier/viridisLite\" url = \"https://cloud.r-project.org/src/contrib/viridisLite_0.2.0.tar.gz\" list_url = \"https://cloud.r-project.org/src/contrib/Archive/viridisLite\"", "for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack", "url = \"https://cloud.r-project.org/src/contrib/viridisLite_0.2.0.tar.gz\" list_url = \"https://cloud.r-project.org/src/contrib/Archive/viridisLite\" version('0.3.0', sha256='780ea12e7c4024d5ba9029f3a107321c74b8d6d9165262f6e64b79e00aa0c2af') version('0.2.0', sha256='2d4d909f21c51e720bd685f05041ba158294e0a4064e0946d0bd916709818694')", "Spack Project Developers. See the top-level COPYRIGHT file for details.", "National Security, LLC and other # Spack Project Developers. See", "class RViridislite(RPackage): \"\"\"viridisLite: Default Color Maps from 'matplotlib' (Lite Version)\"\"\"", "LLC and other # Spack Project Developers. See the top-level", "RViridislite(RPackage): \"\"\"viridisLite: Default Color Maps from 'matplotlib' (Lite Version)\"\"\" homepage", "Security, LLC and other # Spack Project Developers. See the", "details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import", "import * class RViridislite(RPackage): \"\"\"viridisLite: Default Color Maps from 'matplotlib'", "MIT) from spack import * class RViridislite(RPackage): \"\"\"viridisLite: Default Color", "(Apache-2.0 OR MIT) from spack import * class RViridislite(RPackage): \"\"\"viridisLite:", "spack import * class RViridislite(RPackage): \"\"\"viridisLite: Default Color Maps from", "See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier:", "from spack import * class RViridislite(RPackage): \"\"\"viridisLite: Default Color Maps", "and other # Spack Project Developers. See the top-level COPYRIGHT", "# # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import *", "\"https://github.com/sjmgarnier/viridisLite\" url = \"https://cloud.r-project.org/src/contrib/viridisLite_0.2.0.tar.gz\" list_url = \"https://cloud.r-project.org/src/contrib/Archive/viridisLite\" version('0.3.0', sha256='780ea12e7c4024d5ba9029f3a107321c74b8d6d9165262f6e64b79e00aa0c2af') version('0.2.0',", "= \"https://github.com/sjmgarnier/viridisLite\" url = \"https://cloud.r-project.org/src/contrib/viridisLite_0.2.0.tar.gz\" list_url = \"https://cloud.r-project.org/src/contrib/Archive/viridisLite\" version('0.3.0', sha256='780ea12e7c4024d5ba9029f3a107321c74b8d6d9165262f6e64b79e00aa0c2af')", "\"https://cloud.r-project.org/src/contrib/viridisLite_0.2.0.tar.gz\" list_url = \"https://cloud.r-project.org/src/contrib/Archive/viridisLite\" version('0.3.0', sha256='780ea12e7c4024d5ba9029f3a107321c74b8d6d9165262f6e64b79e00aa0c2af') version('0.2.0', sha256='2d4d909f21c51e720bd685f05041ba158294e0a4064e0946d0bd916709818694') depends_on('r@2.10:', type=('build',", "the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0", "(Lite Version)\"\"\" homepage = \"https://github.com/sjmgarnier/viridisLite\" url = \"https://cloud.r-project.org/src/contrib/viridisLite_0.2.0.tar.gz\" list_url =", "\"\"\"viridisLite: Default Color Maps from 'matplotlib' (Lite Version)\"\"\" homepage =", "file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from", "# Spack Project Developers. See the top-level COPYRIGHT file for", "homepage = \"https://github.com/sjmgarnier/viridisLite\" url = \"https://cloud.r-project.org/src/contrib/viridisLite_0.2.0.tar.gz\" list_url = \"https://cloud.r-project.org/src/contrib/Archive/viridisLite\" version('0.3.0',", "SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RViridislite(RPackage):", "Project Developers. See the top-level COPYRIGHT file for details. #", "Lawrence Livermore National Security, LLC and other # Spack Project", "'matplotlib' (Lite Version)\"\"\" homepage = \"https://github.com/sjmgarnier/viridisLite\" url = \"https://cloud.r-project.org/src/contrib/viridisLite_0.2.0.tar.gz\" list_url", "list_url = \"https://cloud.r-project.org/src/contrib/Archive/viridisLite\" version('0.3.0', sha256='780ea12e7c4024d5ba9029f3a107321c74b8d6d9165262f6e64b79e00aa0c2af') version('0.2.0', sha256='2d4d909f21c51e720bd685f05041ba158294e0a4064e0946d0bd916709818694') depends_on('r@2.10:', type=('build', 'run'))", "Developers. See the top-level COPYRIGHT file for details. # #", "Color Maps from 'matplotlib' (Lite Version)\"\"\" homepage = \"https://github.com/sjmgarnier/viridisLite\" url", "= \"https://cloud.r-project.org/src/contrib/viridisLite_0.2.0.tar.gz\" list_url = \"https://cloud.r-project.org/src/contrib/Archive/viridisLite\" version('0.3.0', sha256='780ea12e7c4024d5ba9029f3a107321c74b8d6d9165262f6e64b79e00aa0c2af') version('0.2.0', sha256='2d4d909f21c51e720bd685f05041ba158294e0a4064e0946d0bd916709818694') depends_on('r@2.10:',", "# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other", "* class RViridislite(RPackage): \"\"\"viridisLite: Default Color Maps from 'matplotlib' (Lite", "Livermore National Security, LLC and other # Spack Project Developers.", "other # Spack Project Developers. See the top-level COPYRIGHT file", "2013-2020 Lawrence Livermore National Security, LLC and other # Spack", "Copyright 2013-2020 Lawrence Livermore National Security, LLC and other #", "COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT)", "top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR", "from 'matplotlib' (Lite Version)\"\"\" homepage = \"https://github.com/sjmgarnier/viridisLite\" url = \"https://cloud.r-project.org/src/contrib/viridisLite_0.2.0.tar.gz\"" ]
[ "type(data_len) == type([]): data_recv_len.append( [ r, data_len[0] ] ) else:", "dsp_data = DSP( DSP_type = \"DSP_MSG\", universalAesKey = aes_key_pack[\"aes_key\"], nonce", "import time class IPNC(): def __init__(self): pass def _read_yml(self,file =", "\"utf-8\")).digest(), wait = False) if get is not None: self.__CLIENT_KEYS", "self._add_node( file = self.__file_location, node = [ hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(), { resolved_data[\"username\"]", "ciphertext = key.encrypt( b64_aes_key_pack, padding.OAEP( mgf = padding.MGF1(algorithm = hashes.SHA256()),", "self.__CLIENT_KEYS[target_name] key_pack = pickle.loads(base64.b64decode(key_pack)) dsp_data = DSP( DSP_type = channel_name,", "= MAIN(file,debug,MTCL,MPCL,safeMode) self.SERVER = __parent.SERVER self.CREATE_CHANNEL = __parent.CREATE_CHANNEL self.LISTEN =", "= aes_key_pack[\"nonce\"], aad = aes_key_pack[\"aad\"] )._messanger( MSG = f\"{msg_lst[1][INDEX]}\" ).decode().center(len(msg_lst[1][INDEX])", "the outputs for w in writable: if w not in", "= False) if get is not None: self.__CLIENT_KEYS = get", "= node[1] self._write_yml(file = file, dict_data = r_yml, mode =", "[] self.INPUTS = [] self.OUTPUTS = [] self.MESSAGE_QUEUES = {}", "def SEND(self,channel_name,target_name,data): if channel_name in self.__CUSTOM_CHANNEL: key_pack = self.__CLIENT_KEYS[target_name] key_pack", "msg_lst = list(list(zip(*message_list))) req_lst = list(list(zip(*requestList))) req_res_lst = list(list(zip(*requestResList))) vari_lst", "pass def _name_generator(self,_len_ = 16, onlyText = False): lower_case =", "pass def send_func(self,Writable,message_q,message_list,requestList,requestResList,varifierList,customChannelMessageSend): while True: # print(f\"Writable : {Writable}\") #", "False) if get is not None: self.__CLIENT_KEYS = get self.__VARIFIED_DEVICES.extend(list(get.keys()))", "True): \"\"\" This class allows user to create multi-client server.", "= list(list(zip(*varifierList))) send_c_msg = list(zip(*customChannelMessageSend)) except KeyError: pass if len(msg_lst)", "key = load_ssh_public_key( bytes( resolved_data[\"data\"], \"utf-8\" ), backend=default_backend() ) ciphertext", "r in self.OUTPUTS: self.OUTPUTS.remove(r) self.INPUTS.remove(r) if r in self.WRITABLE: self.WRITABLE.remove(r)", "= None, debug : bool = False, MTCL : bool", "aes_key_pack[\"aad\"] )._messanger( MSG = f\"{req_lst[1][INDEX]}\" ).decode().center(len(req_lst[1][INDEX]) + 100, \"|\").encode(\"utf-8\") s.send(bytes(f\"{len(dsp_data)+100}\".center(16,\"|\"),\"utf-8\"))", "{\"key\" : ciphertext} dsp_data = DSP( DSP_type=\"username_secure_response\" )._messanger( MSG =", ": {channel} already exists.\") else: raise TypeError(\"When 'mutliple' is to", "str = None, port : int = None, listeners :", "s.send( dsp_data ) requestList.pop(INDEX) if len(req_res_lst) > 0: if username", "file, dict_data = r_yml, mode = \"w\") def _get_node(self,file =", "wait = True): if key == None: return self._read_yml(file) if", "p_data = self.__CUSTOM_CHANNEL_MSG_REC.pop(index) args = list(args) args.insert(0,p_data) self.__CALLBACK_LOOP.append([function,args]) else: raise", "writable, exceptions = select.select(self.INPUTS, self.OUTPUTS, self.INPUTS) # handling the inputs", "if r in self.WRITABLE: self.WRITABLE.remove(r) del self.MESSAGE_QUEUES[r] continue # handling", "= file self.READABLE = [] self.WRITABLE = [] self.INPUTS =", "print(f\"__varified_devices : {__varified_devices}\") if data[0] not in __varified_devices: _recv_ =", "queue import threading import sys import pickle import base64 import", "\"\"\" This class allows user to create multi-client server. args:", "except: pass elif _recv_.DSP_type in self.__CUSTOM_CHANNEL: try: resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg)))", "pass else: r_yml = self._read_yml(file) try: value = r_yml[key] return", "{__varified_devices}\") if data[0] not in __varified_devices: _recv_ = DSP()._convert_to_class(_data_, secure", "self.sock.bind((self.address,self.port)) self.sock.listen(listeners) print(\"[SERVER IS ACTIVATED | LISTENING]\") self.INPUTS.append(self.sock) thread1 =", "== 0 or r not in ini[0]: try: data_len =", "True): if key == None: return self._read_yml(file) if wait: while", "__request_res_lst.append( [ resolved_data[\"target_name\"], _recv_.msg ] ) __receiving_msg.remove(_data_) except: pass elif", "resolved_data[\"username\"] : b64_aes_key_pack } ] ) __receiving_msg.pop(INDEX) else: aes_key_pack =", "None): with open(file) as file: documents = yaml.full_load(file) return documents", "= {\"key\" : ciphertext} dsp_data = DSP( DSP_type=\"username_secure_response\" )._messanger( MSG", "IS ACTIVATED | LISTENING]\") self.INPUTS.append(self.sock) thread1 = threading.Thread( target =", "key_pack = pickle.loads(base64.b64decode(key_pack)) dsp_data = DSP( DSP_type = channel_name, universalAesKey=key_pack[\"aes_key\"],", "port self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.sock.setsockopt( socket.SOL_SOCKET, socket.SO_REUSEADDR, 1 )", "100, \"|\").encode(\"utf-8\") try: s.send(bytes(f\"{len(dsp_data)}\".center(16,\"|\"),\"utf-8\")) s.send( dsp_data ) message_list.pop(INDEX) except OSError:", "None ) ) ciphertext = base64.b64encode(ciphertext) prepare_data = {\"key\" :", "self.NONCE = b'\\xfe\\x1e1\\xc0\\xfc`s\\xbc6\\x9fQ\\xb2' if aad is not None: self.AAD =", "__encrypt(self,data): aesgcm = AESGCM(self.UNIVERSAL_AES_KEY,) ct = aesgcm.encrypt( self.NONCE, data, self.AAD", ": nonce, \"aad\" : aad, } pickle_qw = pickle.dumps(qw) b64_aes_key_pack", "self.__CUSTOM_CHANNEL_MSG_REC = [] self.__CUSTOM_CHANNEL_MSG_SEND = [] self.__VARIFIER_LIST = [] self.__CALLBACK_LOOP", "exceptions: self.INPUTS.remove(e) if e in self.OUTPUTS: self.OUTPUTS.remove(e) e.close() del self.MESSAGE_QUEUES[e]", "= hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(), wait = False ) if get is not", "pickle.loads(base64.b64decode(eval(_recv_.msg))) __custom_c_m_r.append(resolved_data) __receiving_msg.remove(_data_) except: pass except: pass def send_func(self,Writable,message_q,message_list,requestList,requestResList,varifierList,customChannelMessageSend): while", "== True: if secure_dict is None: raise TypeError( \"convert_to_class() missing", "is self.sock: connection,addr = r.accept() connection.setblocking(0) self.INPUTS.append(connection) self.MESSAGE_QUEUES[connection] = queue.Queue()", "AESGCM.generate_key(256) nonce = os.urandom(32) aad = bytes(self._name_generator(),\"utf-8\") qw = {", "resolved_data[\"data\"], \"utf-8\" ), backend=default_backend() ) ciphertext = key.encrypt( b64_aes_key_pack, padding.OAEP(", "None, debug : bool = False, MTCL : bool =", "from re import S import select import socket import queue", "b64_aes_key_pack = base64.b64encode(pickle_qw) key = load_ssh_public_key( bytes( resolved_data[\"data\"], \"utf-8\" ),", "as file: documents = yaml.full_load(file) return documents def _write_yml(self,file =", "done the handshake\") class MAIN(IPNC): def __init__(self,secure : bool =", "vari_lst[0].index(username) s.send(bytes(f\"{len(vari_lst[1][INDEX])}\".center(16,\"|\"),\"utf-8\")) s.send( vari_lst[1][INDEX] ) varifierList.pop(INDEX) if len(send_c_msg) > 0:", "= aes_key_pack ) if _recv_.DSP_type == \"DSP_REQ\": try: resolved_data =", ") __receiving_msg.remove(_data_) except: pass elif _recv_.DSP_type in self.__CUSTOM_CHANNEL: try: resolved_data", "prepare_data ) dsp_data = [resolved_data[\"username\"],dsp_data] __varifier_lst.append(dsp_data) __varified_devices.append(resolved_data[\"username\"]) __client_keys[resolved_data[\"username\"]] = b64_aes_key_pack", "try: r_yml = self._read_yml(file = file) r_yml[node] r_yml.pop(node) self._write_yml(file =", "self.sock.setsockopt( socket.SOL_SOCKET, socket.SO_REUSEADDR, 1 ) self.sock.setblocking(0) self.sock.bind((self.address,self.port)) self.sock.listen(listeners) print(\"[SERVER IS", "file self.READABLE = [] self.WRITABLE = [] self.INPUTS = []", "qw = { \"aes_key\" : aes_key, \"nonce\" : nonce, \"aad\"", "thread1.start() # thread2.daemon = True thread2.start() # thread3.daemon = True", ") requestResList.pop(INDEX) if len(vari_lst) > 0: if username in vari_lst[0]:", "None, key = None, wait = True): if key ==", "= self._get_node(file = self.__file_location,key = hashlib.sha256(bytes(\"key\", \"utf-8\")).digest(), wait = False)", "__request_res_lst = self.REQUEST_RESPONSE_LIST # __message_lst = self.MESSAGE_LIS # __custom_c_m_r =", "for r in readable: if r is self.sock: connection,addr =", "b64_aes_key_pack self._add_node( file = self.__file_location, node = [ hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(), get", "None: self.msg = msg else: self.msg = msg self.DSP_type =", "elif _recv_.DSP_type == \"DSP_REQ_RES\": try: resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg))) __request_res_lst.append( [", "backend=default_backend() ) ciphertext = key.encrypt( b64_aes_key_pack, padding.OAEP( mgf = padding.MGF1(algorithm", "lower_case = list(\"abcdefghijklmnopqrstuvwxyz\") upper_case = list('ABCDEFGHIJKLMNOPQRSTUVWXYZ') special = list(\"!@#$%&*?\") number", "# __varifier_lst = self.__VARIFIER_LIST, # __client_keys = self.__CLIENT_KEYS, # __outputs", "dict_data = r_yml, mode = \"w\") except KeyError: return False", ") dsp_data = [resolved_data[\"username\"],dsp_data] __varifier_lst.append(dsp_data) __varified_devices.append(resolved_data[\"username\"]) __client_keys[resolved_data[\"username\"]] = b64_aes_key_pack get", "data_recv_len = [] while True: readable, writable, exceptions = select.select(self.INPUTS,", "in readable: if r is self.sock: connection,addr = r.accept() connection.setblocking(0)", "s.send(bytes(f\"{len(dsp_data)}\".center(16,\"|\"),\"utf-8\")) s.send( dsp_data ) message_list.pop(INDEX) except OSError: pass if len(req_lst)", "if username in msg_lst[0]: INDEX = msg_lst[0].index(username) aes_key_pack = self.__CLIENT_KEYS[username]", "data.decode().strip(\"0\").encode(\"utf-8\") except: print(\"Error in decoding\") self.__RECEIVING_MSG.append(data) self.MESSAGE_QUEUES[r].put(pickle.loads(base64.b64decode(data))[0]) if r not", "self.__CUSTOM_CHANNEL: self.__CUSTOM_CHANNEL.append(channel_name) def LISTEN(self,channel : str = None,function : object", "pickle.loads(base64.b64decode(eval(_recv_.msg))) __message_lst.append( [ resolved_data['target_name'], _recv_.msg ] ) __receiving_msg.remove(_data_) except: pass", "= ( self.__RECEIVING_MSG, self.__VARIFIED_DEVICES, self.__VARIFIER_LIST, self.__CLIENT_KEYS, self.OUTPUTS, self.REQUEST_LIST, self.REQUEST_RESPONSE_LIST, self.MESSAGE_LIST,", "try: read = self._read_yml(file) if read != None: read[node[0]] self._change_node_value(file,node)", "is not None: self.__CLIENT_KEYS = get self.__VARIFIED_DEVICES.extend(list(get.keys())) def SERVER(self,address :", "and s.fileno() == -1: Writable.remove(s) # try: try: username =", "\"w\") except KeyError: return False except: pass def _name_generator(self,_len_ =", "f\"{req_res_lst[1][INDEX]}\" ).decode().center(len(req_res_lst[1][INDEX]) + 100, \"|\").encode(\"utf-8\") s.send(bytes(f\"{len(dsp_data)+100}\".center(16,\"|\"),\"utf-8\")) s.send( dsp_data ) requestResList.pop(INDEX)", "or r not in ini[0]: try: data_len = pickle.loads(base64.b64decode(r.recv(32).decode().strip(\"0\").encode(\"utf-8\"))) except", "DSP_type : str = None, device_id : int = None,", "req_res_lst[0]: INDEX = req_res_lst[0].index(username) aes_key_pack = self.__CLIENT_KEYS[username] aes_key_pack = pickle.loads(base64.b64decode(aes_key_pack))", "self.REQUEST_RESPONSE_LIST, self.MESSAGE_LIST, self.__CUSTOM_CHANNEL_MSG_REC, ) ) thread2 = threading.Thread( target =", "aes_key_pack ) if _recv_.DSP_type == \"DSP_REQ\": try: resolved_data = eval(_recv_.msg)", "i break if found: if args is None: p_data =", "None): try: OBJECT = base64.b64decode(OBJECT) OBJECT = pickle.loads(OBJECT) if secure", "__outputs = self.OUTPUTS, # __request_lst = self.REQUEST_LIST # __request_res_lst =", "= r_yml[key] return value except KeyError: return None except TypeError:", "= pickle.loads(base64.b64decode(_data_)) # print(f\"data[0] : {data[0]}\") # print(f\"__varified_devices : {__varified_devices}\")", "= None, nonce : bytes = None, aad : str", "__varifier_lst = self.__VARIFIER_LIST, # __client_keys = self.__CLIENT_KEYS, # __outputs =", "def _write_yml(self,file = None, dict_data = None,mode = \"a+\"): with", "self.__CLIENT_KEYS[username] aes_key_pack = pickle.loads(base64.b64decode(aes_key_pack)) dsp_data = DSP( DSP_type = \"DSP_handshake_request_res\",", "= None -> here user need to pass a yaml", "load_ssh_public_key from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives.asymmetric import padding from", "[] self.__VARIFIED_DEVICES = [] self.__CLIENT_KEYS = {} self.__CUSTOM_CHANNEL = []", "_read_yml(self,file = None): with open(file) as file: documents = yaml.full_load(file)", "] ) else: self._add_node( file = self.__file_location, node = [", "secure == True: if secure_dict is None: raise TypeError( \"convert_to_class()", "i,d in enumerate(self.__CUSTOM_CHANNEL_MSG_REC): if d[\"channel\"] == channel: found = True", "base64.b64decode(OBJECT) OBJECT = pickle.loads(OBJECT) if secure == True: if secure_dict", "initializer class that will create the a asyncronouse tcp server.", "= None,mode = \"a+\"): with open(file, mode) as file: yaml.dump(dict_data,", "= self.__CLIENT_KEYS[target_name] key_pack = pickle.loads(base64.b64decode(key_pack)) dsp_data = DSP( DSP_type =", "data_recv_len.append( [ r, data_len[0] ] ) else: print(\"User Disconnected\") if", "= True thread3.start() thread = threading.Thread(target = self.__server) # thread.daemon", "1 required positional argument: 'secure_lst'\") else: secure_dict = pickle.loads(base64.b64decode(secure_dict)) aesgcm", "b64_aes_key_pack, padding.OAEP( mgf = padding.MGF1(algorithm = hashes.SHA256()), algorithm = hashes.SHA256(),", ") ciphertext = key.encrypt( b64_aes_key_pack, padding.OAEP( mgf = padding.MGF1(algorithm =", "base64 import os from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.ciphers.aead import", "send_func(self,Writable,message_q,message_list,requestList,requestResList,varifierList,customChannelMessageSend): while True: # print(f\"Writable : {Writable}\") # time.sleep(2) for", "pass if data_len: if type(data_len) == type([]): data_recv_len.append( [ r,", "= secure self.__file_location = file self.READABLE = [] self.WRITABLE =", "= list(zip(*data_recv_len)) if len(ini) == 0 or r not in", "not specified, will raise an TypeError \"\"\" if not file:", "r_yml, mode = \"w\") except KeyError: return False except: pass", "bytes( resolved_data[\"data\"], \"utf-8\" ), backend=default_backend() ) ciphertext = key.encrypt( b64_aes_key_pack,", "file) def _add_node(self,file = None, node = None): try: read", "if args is None: p_data = self.__CUSTOM_CHANNEL_MSG_REC.pop(index) self.__CALLBACK_LOOP.append([function,[p_data]]) else: p_data", "DSP_type = channel_name, universalAesKey=key_pack[\"aes_key\"], nonce = key_pack[\"nonce\"], aad= key_pack[\"aad\"] )._messanger(", "receive_func(self, __receiving_msg,__varified_devices, __varifier_lst, __client_keys, __outputs, __request_lst, __request_res_lst, __message_lst, __custom_c_m_r): #", "SERVER(self,address : str = None, port : int = None,", "bytes(self._name_generator(),\"utf-8\") qw = { \"aes_key\" : aes_key, \"nonce\" : nonce,", "None: get[resolved_data[\"username\"]] = b64_aes_key_pack self._add_node( file = self.__file_location, node =", "pickle.loads(base64.b64decode(secure_dict)) aesgcm = AESGCM(secure_dict[\"aes_key\"]) ct = aesgcm.decrypt( secure_dict[\"nonce\"], OBJECT[-1], secure_dict[\"aad\"])", "KeyError: pass except TypeError: pass else: r_yml = self._read_yml(file) try:", "self.sock: connection,addr = r.accept() connection.setblocking(0) self.INPUTS.append(connection) self.MESSAGE_QUEUES[connection] = queue.Queue() else:", "= [] self.__CUSTOM_CHANNEL_MSG_REC = [] self.__CUSTOM_CHANNEL_MSG_SEND = [] self.__VARIFIER_LIST =", "= self.__file_location, key = hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(), wait = False ) if", "node_dict = { node[0] : node[1] } self._write_yml(file, node_dict) def", "lower_case + upper_case + special + number random.shuffle(_all_) return \"\".join(random.sample(_all_,_len_))", "= req_lst[0].index(username) try: aes_key_pack = self.__CLIENT_KEYS[username] except KeyError: continue aes_key_pack", "for INDEX,_data_ in enumerate(__receiving_msg): data = pickle.loads(base64.b64decode(_data_)) # print(f\"data[0] :", "ciphertext} dsp_data = DSP( DSP_type=\"username_secure_response\" )._messanger( MSG = prepare_data )", "from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.ciphers.aead import AESGCM from cryptography.hazmat.primitives.serialization", "= None, multiple : bool = False): if multiple: if", "100, \"|\").encode(\"utf-8\") s.send(bytes(f\"{len(dsp_data)+100}\".center(16,\"|\"),\"utf-8\")) s.send( dsp_data ) requestResList.pop(INDEX) if len(vari_lst) >", "_recv_.msg ] ) __receiving_msg.remove(_data_) except: pass elif _recv_.DSP_type in self.__CUSTOM_CHANNEL:", "server(): def __init__(self, file = None, debug : bool =", "file = self.__file_location, key = hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(), wait = False )", "is not None: self.AAD = aad else: self.AAD = b\"au$tica&tedbut@u32nencr#cdscypteddatafdrj\"", "target_name, dsp_data ] ) class server(): def __init__(self, file =", "args = list(args) args.insert(0,p_data) self.__CALLBACK_LOOP.append([function,args]) else: raise TypeError(\"'channel' should not", "self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.sock.setsockopt( socket.SOL_SOCKET, socket.SO_REUSEADDR, 1 ) self.sock.setblocking(0)", "p_data = self.__CUSTOM_CHANNEL_MSG_REC.pop(index) self.__CALLBACK_LOOP.append([function,[p_data]]) else: p_data = self.__CUSTOM_CHANNEL_MSG_REC.pop(index) args =", "def _convert_to_class(self,OBJECT : bytes = None,secure : bool = True,", "\"DSP_REQ_RES\": try: resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg))) __request_res_lst.append( [ resolved_data[\"target_name\"], _recv_.msg ]", "file = None, debug : bool = False, MTCL :", "channel in channel_name: if channel not in self.__CUSTOM_CHANNEL: self.__CUSTOM_CHANNEL.append(channel) else:", "= True, secure_dict : list = None): try: OBJECT =", "_convert_to_class(self,OBJECT : bytes = None,secure : bool = True, secure_dict", "req_res_lst = list(list(zip(*requestResList))) vari_lst = list(list(zip(*varifierList))) send_c_msg = list(zip(*customChannelMessageSend)) except", "print(f\"data[0] : {data[0]}\") # print(f\"__varified_devices : {__varified_devices}\") if data[0] not", "== None: return self._read_yml(file) if wait: while True: r_yml =", ") ciphertext = base64.b64encode(ciphertext) prepare_data = {\"key\" : ciphertext} dsp_data", "is not None: self.msg = MSG data = f'DSP(\"{self.msg}\",\"{self.DSP_type}\")' data", "thread.start() def __server(self): data_recv_len = [] while True: readable, writable,", "data = pickle.loads(base64.b64decode(_data_)) # print(f\"data[0] : {data[0]}\") # print(f\"__varified_devices :", "= None): try: OBJECT = base64.b64decode(OBJECT) OBJECT = pickle.loads(OBJECT) if", "w not in self.WRITABLE: self.WRITABLE.append(w) # handling the errors for", "list(zip(*data_recv_len)) if len(ini) == 0 or r not in ini[0]:", "= list(list(zip(*requestResList))) vari_lst = list(list(zip(*varifierList))) send_c_msg = list(zip(*customChannelMessageSend)) except KeyError:", "\"aes_key\" : aes_key, \"nonce\" : nonce, \"aad\" : aad, }", "sys import pickle import base64 import os from cryptography.hazmat.backends import", "self.INPUTS) # handling the inputs for r in readable: if", "dsp_data ) message_list.pop(INDEX) except OSError: pass if len(req_lst) > 0:", "[] self.__CLIENT_KEYS = {} self.__CUSTOM_CHANNEL = [] self.__CUSTOM_CHANNEL_MSG_REC = []", "\"w\") def _get_node(self,file = None, key = None, wait =", "= None, aad : str = None, ): if msg", "_add_node(self,file = None, node = None): try: read = self._read_yml(file)", "s._closed == True and s.fileno() == -1: Writable.remove(s) # try:", "pickle.loads(base64.b64decode(r.recv(32).decode().strip(\"0\").encode(\"utf-8\"))) except ConnectionResetError: print(\"Client Disconnected\") if r in self.OUTPUTS: self.OUTPUTS.remove(r)", "True: readable, writable, exceptions = select.select(self.INPUTS, self.OUTPUTS, self.INPUTS) # handling", "= vari_lst[0].index(username) s.send(bytes(f\"{len(vari_lst[1][INDEX])}\".center(16,\"|\"),\"utf-8\")) s.send( vari_lst[1][INDEX] ) varifierList.pop(INDEX) if len(send_c_msg) >", "sys.exit() except ValueError: print(\"sender has not done the handshake\") class", "aes_key_pack[\"nonce\"], aad = aes_key_pack[\"aad\"] )._messanger( MSG = f\"{msg_lst[1][INDEX]}\" ).decode().center(len(msg_lst[1][INDEX]) +", "read != None: read[node[0]] self._change_node_value(file,node) else: raise KeyError except KeyError:", "MSG data = f'DSP(\"{self.msg}\",\"{self.DSP_type}\")' data = pickle.dumps(data) pickled_data = data", "in vari_lst[0]: INDEX = vari_lst[0].index(username) s.send(bytes(f\"{len(vari_lst[1][INDEX])}\".center(16,\"|\"),\"utf-8\")) s.send( vari_lst[1][INDEX] ) varifierList.pop(INDEX)", "[] while True: readable, writable, exceptions = select.select(self.INPUTS, self.OUTPUTS, self.INPUTS)", "open(file) as file: documents = yaml.full_load(file) return documents def _write_yml(self,file", "writable.remove(r) self.OUTPUTS.remove(r) if r in self.WRITABLE: self.WRITABLE.remove(r) del self.MESSAGE_QUEUES[r] continue", "AESGCM(self.UNIVERSAL_AES_KEY,) ct = aesgcm.encrypt( self.NONCE, data, self.AAD ) return ct", "= _data_, secure = True, secure_dict = aes_key_pack ) if", "pickle.loads(OBJECT) if secure == True: if secure_dict is None: raise", "cryptography.hazmat.backends import default_backend import hashlib import yaml import random import", "key_pack[\"nonce\"], aad= key_pack[\"aad\"] )._messanger( MSG = base64.b64encode(pickle.dumps(data)) ) self.__CUSTOM_CHANNEL_MSG_SEND.append( [", "__message_lst = self.MESSAGE_LIS # __custom_c_m_r = self.__CUSTOM_CHANNEL_MSG_REC while True: try:", "except KeyError: continue aes_key_pack = pickle.loads(base64.b64decode(aes_key_pack)) dsp_data = DSP( DSP_type", "device_id if universalAesKey is not None: self.UNIVERSAL_AES_KEY = universalAesKey else:", "= self.send_func, args = ( self.WRITABLE, self.MESSAGE_QUEUES, self.MESSAGE_LIST, self.REQUEST_LIST, self.REQUEST_RESPONSE_LIST,", "None, node = None): r_yml = self._read_yml(file) r_yml[node[0]] = node[1]", "!= None: read[node[0]] self._change_node_value(file,node) else: raise KeyError except KeyError: node_dict", "secure = True, secure_dict = aes_key_pack ) if _recv_.DSP_type ==", "socket.SOL_SOCKET, socket.SO_REUSEADDR, 1 ) self.sock.setblocking(0) self.sock.bind((self.address,self.port)) self.sock.listen(listeners) print(\"[SERVER IS ACTIVATED", "_all_ = lower_case + upper_case else: _all_ = lower_case +", "if type(data_len) == type([]): data_recv_len.append( [ r, data_len[0] ] )", "= None, DSP_type : str = None, device_id : int", "if username in vari_lst[0]: INDEX = vari_lst[0].index(username) s.send(bytes(f\"{len(vari_lst[1][INDEX])}\".center(16,\"|\"),\"utf-8\")) s.send( vari_lst[1][INDEX]", "None -> here user need to pass a yaml file", "import default_backend from cryptography.hazmat.primitives.ciphers.aead import AESGCM from cryptography.hazmat.primitives.serialization import load_ssh_public_key", "TypeError: sys.exit() except ValueError: print(\"sender has not done the handshake\")", "while True: try: for INDEX,_data_ in enumerate(__receiving_msg): data = pickle.loads(base64.b64decode(_data_))", "in req_res_lst[0]: INDEX = req_res_lst[0].index(username) aes_key_pack = self.__CLIENT_KEYS[username] aes_key_pack =", "= None, universalAesKey : bytes = None, nonce : bytes", "self.WRITABLE.append(w) # handling the errors for e in exceptions: self.INPUTS.remove(e)", "target = self.receive_func, args = ( self.__RECEIVING_MSG, self.__VARIFIED_DEVICES, self.__VARIFIER_LIST, self.__CLIENT_KEYS,", "object = None,args = None): if channel is not None:", "in self.__CUSTOM_CHANNEL: self.__CUSTOM_CHANNEL.append(channel_name) def LISTEN(self,channel : str = None,function :", "\"\".join(random.sample(_all_,_len_)) class DSP(): def __init__( self, msg : str =", "= socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.sock.setsockopt( socket.SOL_SOCKET, socket.SO_REUSEADDR, 1 ) self.sock.setblocking(0) self.sock.bind((self.address,self.port))", "= [] self.REQUEST_RESPONSE_LIST = [] self.MESSAGE_LIST = [] self.__VARIFIED_DEVICES =", "in self.__CUSTOM_CHANNEL: try: resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg))) __custom_c_m_r.append(resolved_data) __receiving_msg.remove(_data_) except: pass", "= [] self.MESSAGE_QUEUES = {} self.REQUEST_LIST = [] self.REQUEST_RESPONSE_LIST =", "r in self.WRITABLE: self.WRITABLE.remove(r) r.close() del self.MESSAGE_QUEUES[r] continue else: qwe", "aad = aes_key_pack[\"aad\"] )._messanger( MSG = f\"{req_res_lst[1][INDEX]}\" ).decode().center(len(req_res_lst[1][INDEX]) + 100,", "yaml.dump(dict_data, file) def _add_node(self,file = None, node = None): try:", "# __outputs = self.OUTPUTS, # __request_lst = self.REQUEST_LIST # __request_res_lst", "specified, will raise an TypeError \"\"\" if not file: raise", "self._read_yml(file) r_yml[node[0]] = node[1] self._write_yml(file = file, dict_data = r_yml,", "with open(file, mode) as file: yaml.dump(dict_data, file) def _add_node(self,file =", "secure_dict : list = None): try: OBJECT = base64.b64decode(OBJECT) OBJECT", "pass elif _recv_.DSP_type in self.__CUSTOM_CHANNEL: try: resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg))) __custom_c_m_r.append(resolved_data)", "if not file: raise TypeError(\"asyncServer() missing 1 required positional argument:", "list = None): try: OBJECT = base64.b64decode(OBJECT) OBJECT = pickle.loads(OBJECT)", "if username in req_res_lst[0]: INDEX = req_res_lst[0].index(username) aes_key_pack = self.__CLIENT_KEYS[username]", "yaml import random import time class IPNC(): def __init__(self): pass", "None: raise TypeError( \"convert_to_class() missing 1 required positional argument: 'secure_lst'\")", "None,function : object = None,args = None): if channel is", "resolved_data = eval(_recv_.msg) resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg))) __request_lst.append( [ resolved_data[\"target_name\"], _recv_.msg", "found = False index = None if channel in self.__CUSTOM_CHANNEL:", "channel_name: if channel not in self.__CUSTOM_CHANNEL: self.__CUSTOM_CHANNEL.append(channel) else: print(f\"Channel :", "LISTENING]\") self.INPUTS.append(self.sock) thread1 = threading.Thread( target = self.receive_func, args =", "DSP( DSP_type=\"username_secure_response\" )._messanger( MSG = prepare_data ) dsp_data = [resolved_data[\"username\"],dsp_data]", "padding.OAEP( mgf = padding.MGF1(algorithm = hashes.SHA256()), algorithm = hashes.SHA256(), label", "= pickle.dumps(encrypted_data) ret = base64.b64encode(p_e_d) return ret def __repr__(self): return", "= get self.__VARIFIED_DEVICES.extend(list(get.keys())) def SERVER(self,address : str = None, port", "None if channel in self.__CUSTOM_CHANNEL: for i,d in enumerate(self.__CUSTOM_CHANNEL_MSG_REC): if", "documents def _write_yml(self,file = None, dict_data = None,mode = \"a+\"):", "# __client_keys = self.__CLIENT_KEYS, # __outputs = self.OUTPUTS, # __request_lst", "print(\"User Disconnected\") if r in self.OUTPUTS: self.OUTPUTS.remove(r) self.INPUTS.remove(r) if r", "base64.b64encode(pickle_qw) key = load_ssh_public_key( bytes( resolved_data[\"data\"], \"utf-8\" ), backend=default_backend() )", "try: value = r_yml[key] return value except KeyError: pass except", "for s in Writable: if s._closed == True and s.fileno()", "DSP_type = \"DSP_MSG\", universalAesKey = aes_key_pack[\"aes_key\"], nonce = aes_key_pack[\"nonce\"], aad", "= threading.Thread(target = self.__server) # thread.daemon = True thread.start() def", "dsp_data ] ) class server(): def __init__(self, file = None,", "= queue.Queue() else: ini = list(zip(*data_recv_len)) if len(ini) == 0", "ini[0]: try: data_len = pickle.loads(base64.b64decode(r.recv(32).decode().strip(\"0\").encode(\"utf-8\"))) except ConnectionResetError: print(\"Client Disconnected\") if", "aes_key_pack = self.__CLIENT_KEYS[username] except KeyError: continue aes_key_pack = pickle.loads(base64.b64decode(aes_key_pack)) dsp_data", "dsp_data = DSP( DSP_type = \"DSP_handshake_request_res\", universalAesKey = aes_key_pack[\"aes_key\"], nonce", "= threading.Thread( target = self.send_func, args = ( self.WRITABLE, self.MESSAGE_QUEUES,", "== \"DSP_REQ_RES\": try: resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg))) __request_res_lst.append( [ resolved_data[\"target_name\"], _recv_.msg", "0 or r not in ini[0]: try: data_len = pickle.loads(base64.b64decode(r.recv(32).decode().strip(\"0\").encode(\"utf-8\")))", "self.__RECEIVING_MSG, self.__VARIFIED_DEVICES, self.__VARIFIER_LIST, self.__CLIENT_KEYS, self.OUTPUTS, self.REQUEST_LIST, self.REQUEST_RESPONSE_LIST, self.MESSAGE_LIST, self.__CUSTOM_CHANNEL_MSG_REC, )", "s.send(bytes(f\"{len(vari_lst[1][INDEX])}\".center(16,\"|\"),\"utf-8\")) s.send( vari_lst[1][INDEX] ) varifierList.pop(INDEX) if len(send_c_msg) > 0: if", "from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives.asymmetric import padding from cryptography.hazmat.backends", "== \"DSP_MSG\": try: resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg))) __message_lst.append( [ resolved_data['target_name'], _recv_.msg", "KeyError: continue aes_key_pack = pickle.loads(base64.b64decode(aes_key_pack)) dsp_data = DSP( DSP_type =", "import hashlib import yaml import random import time class IPNC():", "padding.MGF1(algorithm = hashes.SHA256()), algorithm = hashes.SHA256(), label = None )", "elif _recv_.DSP_type == \"DSP_MSG\": try: resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg))) __message_lst.append( [", "+ upper_case + special + number random.shuffle(_all_) return \"\".join(random.sample(_all_,_len_)) class", "s.send( dsp_data ) requestResList.pop(INDEX) if len(vari_lst) > 0: if username", "yaml file which saves all the keys and configurations. if", "\"DSP_MSG\": try: resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg))) __message_lst.append( [ resolved_data['target_name'], _recv_.msg ]", "True thread3.start() thread = threading.Thread(target = self.__server) # thread.daemon =", "channel_name, universalAesKey=key_pack[\"aes_key\"], nonce = key_pack[\"nonce\"], aad= key_pack[\"aad\"] )._messanger( MSG =", "= hashlib.sha256(bytes(\"key\", \"utf-8\")).digest(), wait = False) if get is not", "self.__CLIENT_KEYS = get self.__VARIFIED_DEVICES.extend(list(get.keys())) def SERVER(self,address : str = None,", "aes_key_pack = pickle.loads(base64.b64decode(aes_key_pack)) dsp_data = DSP( DSP_type = \"DSP_handshake_request_res\", universalAesKey", "self.__file_location = file self.READABLE = [] self.WRITABLE = [] self.INPUTS", "class IPNC(): def __init__(self): pass def _read_yml(self,file = None): with", "= pickle.loads(base64.b64decode(eval(_recv_.msg))) __custom_c_m_r.append(resolved_data) __receiving_msg.remove(_data_) except: pass except: pass def send_func(self,Writable,message_q,message_list,requestList,requestResList,varifierList,customChannelMessageSend):", "msg : str = None, DSP_type : str = None,", "True, MPCL : bool = False, safeMode : bool =", "aes_key_pack[\"aad\"] )._messanger( MSG = f\"{req_res_lst[1][INDEX]}\" ).decode().center(len(req_res_lst[1][INDEX]) + 100, \"|\").encode(\"utf-8\") s.send(bytes(f\"{len(dsp_data)+100}\".center(16,\"|\"),\"utf-8\"))", "value = r_yml[key] return value except KeyError: pass except TypeError:", "None, nonce : bytes = None, aad : str =", "except ValueError: print(\"sender has not done the handshake\") class MAIN(IPNC):", "self.OUTPUTS, self.INPUTS) # handling the inputs for r in readable:", "r_yml[key] return value except KeyError: pass except TypeError: pass else:", "return \"_main.DSP._\" def __encrypt(self,data): aesgcm = AESGCM(self.UNIVERSAL_AES_KEY,) ct = aesgcm.encrypt(", "self.MESSAGE_QUEUES, self.MESSAGE_LIST, self.REQUEST_LIST, self.REQUEST_RESPONSE_LIST, self.__VARIFIER_LIST, self.__CUSTOM_CHANNEL_MSG_SEND ) ) thread3 =", "DSP( DSP_type = \"DSP_handshake_request\", universalAesKey = aes_key_pack[\"aes_key\"], nonce = aes_key_pack[\"nonce\"],", "positional argument: 'secure_lst'\") else: secure_dict = pickle.loads(base64.b64decode(secure_dict)) aesgcm = AESGCM(secure_dict[\"aes_key\"])", "def _change_node_value(self,file = None, node = None): r_yml = self._read_yml(file)", "required positional argument: 'secure_lst'\") else: secure_dict = pickle.loads(base64.b64decode(secure_dict)) aesgcm =", "self.MESSAGE_QUEUES[r] continue # handling the outputs for w in writable:", "w in writable: if w not in self.WRITABLE: self.WRITABLE.append(w) #", "__parent.SERVER self.CREATE_CHANNEL = __parent.CREATE_CHANNEL self.LISTEN = __parent.LISTEN self.SEND = __parent.SEND", "= req_res_lst[0].index(username) aes_key_pack = self.__CLIENT_KEYS[username] aes_key_pack = pickle.loads(base64.b64decode(aes_key_pack)) dsp_data =", "# __message_lst = self.MESSAGE_LIS # __custom_c_m_r = self.__CUSTOM_CHANNEL_MSG_REC while True:", "raise an TypeError \"\"\" if not file: raise TypeError(\"asyncServer() missing", "OBJECT[-1], self.AAD) ct = pickle.loads(ct) return eval(ct) except TypeError: sys.exit()", "writable: if w not in self.WRITABLE: self.WRITABLE.append(w) # handling the", "= pickle.loads(base64.b64decode(aes_key_pack)) dsp_data = DSP( DSP_type = \"DSP_MSG\", universalAesKey =", "channel_name not in self.__CUSTOM_CHANNEL: self.__CUSTOM_CHANNEL.append(channel_name) def LISTEN(self,channel : str =", "universalAesKey : bytes = None, nonce : bytes = None,", "resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg))) __custom_c_m_r.append(resolved_data) __receiving_msg.remove(_data_) except: pass except: pass def", ") thread2 = threading.Thread( target = self.send_func, args = (", "= False index = None if channel in self.__CUSTOM_CHANNEL: for", "MTCL : bool = True, MPCL : bool = False,", "wait = False ) if get is not None: get[resolved_data[\"username\"]]", "import default_backend import hashlib import yaml import random import time", "pickle.loads(base64.b64decode(eval(_recv_.msg))) __request_lst.append( [ resolved_data[\"target_name\"], _recv_.msg ] ) __receiving_msg.remove(_data_) except: pass", "{channel} already exists.\") else: raise TypeError(\"When 'mutliple' is to True", "self.REQUEST_LIST # __request_res_lst = self.REQUEST_RESPONSE_LIST # __message_lst = self.MESSAGE_LIS #", "= address self.port = port self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.sock.setsockopt(", "True, secure_dict : list = None): try: OBJECT = base64.b64decode(OBJECT)", "key = None, wait = True): if key == None:", "= None): self.address = address self.port = port self.sock =", "__init__( self, msg : str = None, DSP_type : str", "ct = pickle.loads(ct) return eval(ct) else: aesgcm = AESGCM(self.UNIVERSAL_AES_KEY) ct", ") requestList.pop(INDEX) if len(req_res_lst) > 0: if username in req_res_lst[0]:", "ct = pickle.loads(ct) return eval(ct) except TypeError: sys.exit() except ValueError:", "load_ssh_public_key( bytes( resolved_data[\"data\"], \"utf-8\" ), backend=default_backend() ) ciphertext = key.encrypt(", "DSP()._convert_to_class(_data_, secure = False) if _recv_.DSP_type == \"username_secure\": resolved_data =", "self.UNIVERSAL_AES_KEY = universalAesKey else: self.UNIVERSAL_AES_KEY = b'<KEY>' if nonce is", "default_backend from cryptography.hazmat.primitives.ciphers.aead import AESGCM from cryptography.hazmat.primitives.serialization import load_ssh_public_key from", "__custom_c_m_r = self.__CUSTOM_CHANNEL_MSG_REC while True: try: for INDEX,_data_ in enumerate(__receiving_msg):", "r, data_len[0] ] ) else: print(\"User Disconnected\") if r in", "= channel_name, universalAesKey=key_pack[\"aes_key\"], nonce = key_pack[\"nonce\"], aad= key_pack[\"aad\"] )._messanger( MSG", "IPNC.__init__(self) self.__secure = secure self.__file_location = file self.READABLE = []", "missing 1 required positional argument: 'secure_lst'\") else: secure_dict = pickle.loads(base64.b64decode(secure_dict))", "self._read_yml(file) if wait: while True: r_yml = self._read_yml(file) try: value", "try: username = message_q[s].get_nowait() message_q[s].put(username) msg_lst = list(list(zip(*message_list))) req_lst =", "del self.MESSAGE_QUEUES[r] continue # handling the outputs for w in", "True then channel_name should be a list of multiple channel", "node = None): r_yml = self._read_yml(file) r_yml[node[0]] = node[1] self._write_yml(file", "None): \"\"\"async_server initializer class that will create the a asyncronouse", "INDEX = req_res_lst[0].index(username) aes_key_pack = self.__CLIENT_KEYS[username] aes_key_pack = pickle.loads(base64.b64decode(aes_key_pack)) dsp_data", "is None: p_data = self.__CUSTOM_CHANNEL_MSG_REC.pop(index) self.__CALLBACK_LOOP.append([function,[p_data]]) else: p_data = self.__CUSTOM_CHANNEL_MSG_REC.pop(index)", "def CREATE_CHANNEL(self,channel_name = None, multiple : bool = False): if", "DSP_type self.device_id = device_id if universalAesKey is not None: self.UNIVERSAL_AES_KEY", "True,file = None): \"\"\"async_server initializer class that will create the", "wait = False) if get is not None: self.__CLIENT_KEYS =", "bool = True,file = None): \"\"\"async_server initializer class that will", "s.send(bytes(f\"{len(dsp_data)+100}\".center(16,\"|\"),\"utf-8\")) s.send( dsp_data ) requestList.pop(INDEX) if len(req_res_lst) > 0: if", "should set to the default value True, file : str", ") message_list.pop(INDEX) except OSError: pass if len(req_lst) > 0: if", "self._get_node( file = self.__file_location, key = hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(), wait = False", "def __init__(self, file = None, debug : bool = False,", "channel is not None: found = False index = None", "= aes_key_pack[\"aad\"] )._messanger( MSG = f\"{req_res_lst[1][INDEX]}\" ).decode().center(len(req_res_lst[1][INDEX]) + 100, \"|\").encode(\"utf-8\")", "= self.__CLIENT_KEYS[username] aes_key_pack = pickle.loads(base64.b64decode(aes_key_pack)) dsp_data = DSP( DSP_type =", "except Exception as e: pass if data_len: if type(data_len) ==", "try: resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg))) __request_res_lst.append( [ resolved_data[\"target_name\"], _recv_.msg ] )", "= None): r_yml = self._read_yml(file) r_yml[node[0]] = node[1] self._write_yml(file =", "self.msg = msg else: self.msg = msg self.DSP_type = DSP_type", "__client_keys[data[0]] _recv_ = DSP()._convert_to_class( OBJECT = _data_, secure = True,", "not done the handshake\") class MAIN(IPNC): def __init__(self,secure : bool", "= None, dict_data = None,mode = \"a+\"): with open(file, mode)", "else: raise KeyError except KeyError: node_dict = { node[0] :", "= None, node = None): try: read = self._read_yml(file) if", "aesgcm = AESGCM(self.UNIVERSAL_AES_KEY) ct = aesgcm.decrypt(self.NONCE, OBJECT[-1], self.AAD) ct =", "] ) else: print(\"User Disconnected\") if r in self.OUTPUTS: self.OUTPUTS.remove(r)", "return self._read_yml(file) if wait: while True: r_yml = self._read_yml(file) try:", "self._change_node_value(file,node) else: raise KeyError except KeyError: node_dict = { node[0]", "try: s.send(bytes(f\"{len(dsp_data)}\".center(16,\"|\"),\"utf-8\")) s.send( dsp_data ) message_list.pop(INDEX) except OSError: pass if", "aad else: self.AAD = b\"au$tica&tedbut@u32nencr#cdscypteddatafdrj\" def _messanger(self,MSG = None): if", "else: print(f\"Channel : {channel} already exists.\") else: raise TypeError(\"When 'mutliple'", ": list = None): try: OBJECT = base64.b64decode(OBJECT) OBJECT =", "the keys and configurations. if not specified, will raise an", "str = None -> here user need to pass a", "def receive_func(self, __receiving_msg,__varified_devices, __varifier_lst, __client_keys, __outputs, __request_lst, __request_res_lst, __message_lst, __custom_c_m_r):", "bool = False, safeMode : bool = True): \"\"\" This", "pickle_qw = pickle.dumps(qw) b64_aes_key_pack = base64.b64encode(pickle_qw) key = load_ssh_public_key( bytes(", "r_yml, mode = \"w\") def _get_node(self,file = None, key =", "send_c_msg[0]: INDEX = send_c_msg[0].index(username) s.send(bytes(f\"{len(send_c_msg[1][INDEX])}\".center(16,\"|\"),\"utf-8\")) s.send(send_c_msg[1][INDEX]) customChannelMessageSend.pop(INDEX) # except: #", "eval(ct) except TypeError: sys.exit() except ValueError: print(\"sender has not done", "self.__VARIFIER_LIST, # __client_keys = self.__CLIENT_KEYS, # __outputs = self.OUTPUTS, #", "for i,d in enumerate(self.__CUSTOM_CHANNEL_MSG_REC): if d[\"channel\"] == channel: found =", "random.shuffle(_all_) return \"\".join(random.sample(_all_,_len_)) class DSP(): def __init__( self, msg :", "= self.MESSAGE_LIS # __custom_c_m_r = self.__CUSTOM_CHANNEL_MSG_REC while True: try: for", "MSG is not None: self.msg = MSG data = f'DSP(\"{self.msg}\",\"{self.DSP_type}\")'", "= None): try: read = self._read_yml(file) if read != None:", "= None,secure : bool = True, secure_dict : list =", "yaml.full_load(file) return documents def _write_yml(self,file = None, dict_data = None,mode", "# except: # pass def CREATE_CHANNEL(self,channel_name = None, multiple :", "= False) if _recv_.DSP_type == \"username_secure\": resolved_data = eval(_recv_.msg) aes_key", "} ] ) __receiving_msg.pop(INDEX) else: aes_key_pack = __client_keys[data[0]] _recv_ =", "INDEX = vari_lst[0].index(username) s.send(bytes(f\"{len(vari_lst[1][INDEX])}\".center(16,\"|\"),\"utf-8\")) s.send( vari_lst[1][INDEX] ) varifierList.pop(INDEX) if len(send_c_msg)", "s.send(send_c_msg[1][INDEX]) customChannelMessageSend.pop(INDEX) # except: # pass def CREATE_CHANNEL(self,channel_name = None,", ": str = None, ): if msg is not None:", "True thread1.start() # thread2.daemon = True thread2.start() # thread3.daemon =", "try: data = data.decode().strip(\"0\").encode(\"utf-8\") except: print(\"Error in decoding\") self.__RECEIVING_MSG.append(data) self.MESSAGE_QUEUES[r].put(pickle.loads(base64.b64decode(data))[0])", "+ upper_case else: _all_ = lower_case + upper_case + special", "False except: pass def _name_generator(self,_len_ = 16, onlyText = False):", "except: pass except: pass def send_func(self,Writable,message_q,message_list,requestList,requestResList,varifierList,customChannelMessageSend): while True: # print(f\"Writable", "def __init__( self, msg : str = None, DSP_type :", "self.REQUEST_LIST, self.REQUEST_RESPONSE_LIST, self.MESSAGE_LIST, self.__CUSTOM_CHANNEL_MSG_REC, ) ) thread2 = threading.Thread( target", "import S import select import socket import queue import threading", "self.SERVER = __parent.SERVER self.CREATE_CHANNEL = __parent.CREATE_CHANNEL self.LISTEN = __parent.LISTEN self.SEND", "self.NONCE = nonce else: self.NONCE = b'\\xfe\\x1e1\\xc0\\xfc`s\\xbc6\\x9fQ\\xb2' if aad is", "self.MESSAGE_LIS # __custom_c_m_r = self.__CUSTOM_CHANNEL_MSG_REC while True: try: for INDEX,_data_", "__receiving_msg.remove(_data_) except: pass elif _recv_.DSP_type == \"DSP_MSG\": try: resolved_data =", "self.__CUSTOM_CHANNEL_MSG_SEND.append( [ target_name, dsp_data ] ) class server(): def __init__(self,", "self.__CUSTOM_CHANNEL = [] self.__CUSTOM_CHANNEL_MSG_REC = [] self.__CUSTOM_CHANNEL_MSG_SEND = [] self.__VARIFIER_LIST", "# pass def CREATE_CHANNEL(self,channel_name = None, multiple : bool =", "thread2.daemon = True thread2.start() # thread3.daemon = True thread3.start() thread", "= aesgcm.encrypt( self.NONCE, data, self.AAD ) return ct def _convert_to_class(self,OBJECT", "== type([]): data_recv_len.append( [ r, data_len[0] ] ) else: print(\"User", "self.WRITABLE.remove(r) r.close() del self.MESSAGE_QUEUES[r] continue else: qwe = list(zip(*data_recv_len)) INDEX", "file: raise TypeError(\"asyncServer() missing 1 required positional argument: 'file'\") __parent", "= None ) ) ciphertext = base64.b64encode(ciphertext) prepare_data = {\"key\"", "_data_, secure = True, secure_dict = aes_key_pack ) if _recv_.DSP_type", "r_yml[key] return value except KeyError: return None except TypeError: pass", "a list of multiple channel names\") else: if channel_name not", "# thread2.daemon = True thread2.start() # thread3.daemon = True thread3.start()", "True index = i break if found: if args is", "def _add_node(self,file = None, node = None): try: read =", "TypeError( \"convert_to_class() missing 1 required positional argument: 'secure_lst'\") else: secure_dict", ") __receiving_msg.remove(_data_) except: pass elif _recv_.DSP_type == \"DSP_REQ_RES\": try: resolved_data", "self.__RECEIVING_MSG.append(data) self.MESSAGE_QUEUES[r].put(pickle.loads(base64.b64decode(data))[0]) if r not in self.OUTPUTS: self.OUTPUTS.append(r) except Exception", "f'DSP(\"{self.msg}\",\"{self.DSP_type}\")' data = pickle.dumps(data) pickled_data = data encrypted_data = [self.device_id,", "= base64.b64encode(pickle.dumps(data)) ) self.__CUSTOM_CHANNEL_MSG_SEND.append( [ target_name, dsp_data ] ) class", "KeyError: pass if len(msg_lst) > 0: if username in msg_lst[0]:", "self.OUTPUTS.append(r) except Exception as e: print(\"User Disconnected\") readable.remove(r) self.INPUTS.remove(r) writable.remove(r)", "DSP_type=\"username_secure_response\" )._messanger( MSG = prepare_data ) dsp_data = [resolved_data[\"username\"],dsp_data] __varifier_lst.append(dsp_data)", "aes_key_pack = __client_keys[data[0]] _recv_ = DSP()._convert_to_class( OBJECT = _data_, secure", "except KeyError: return False except: pass def _name_generator(self,_len_ = 16,", "cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.ciphers.aead import AESGCM from cryptography.hazmat.primitives.serialization import", "list(list(zip(*requestList))) req_res_lst = list(list(zip(*requestResList))) vari_lst = list(list(zip(*varifierList))) send_c_msg = list(zip(*customChannelMessageSend))", "thread1 = threading.Thread( target = self.receive_func, args = ( self.__RECEIVING_MSG,", "__varified_devices: _recv_ = DSP()._convert_to_class(_data_, secure = False) if _recv_.DSP_type ==", "self.INPUTS.remove(r) writable.remove(r) self.OUTPUTS.remove(r) if r in self.WRITABLE: self.WRITABLE.remove(r) del self.MESSAGE_QUEUES[r]", "mode) as file: yaml.dump(dict_data, file) def _add_node(self,file = None, node", "if r in self.WRITABLE: self.WRITABLE.remove(r) r.close() del self.MESSAGE_QUEUES[r] continue else:", ": object = None,args = None): if channel is not", "len(ini) == 0 or r not in ini[0]: try: data_len", "except ConnectionResetError: print(\"Client Disconnected\") if r in self.OUTPUTS: self.OUTPUTS.remove(r) if", "not in self.__CUSTOM_CHANNEL: self.__CUSTOM_CHANNEL.append(channel_name) def LISTEN(self,channel : str = None,function", "] ) __receiving_msg.pop(INDEX) else: aes_key_pack = __client_keys[data[0]] _recv_ = DSP()._convert_to_class(", "= [] self.__VARIFIER_LIST = [] self.__CALLBACK_LOOP = [] self.__RECEIVING_MSG =", "bool = True): \"\"\" This class allows user to create", "in self.__CUSTOM_CHANNEL: key_pack = self.__CLIENT_KEYS[target_name] key_pack = pickle.loads(base64.b64decode(key_pack)) dsp_data =", "str = None, ): if msg is not None: self.msg", "= key_pack[\"nonce\"], aad= key_pack[\"aad\"] )._messanger( MSG = base64.b64encode(pickle.dumps(data)) ) self.__CUSTOM_CHANNEL_MSG_SEND.append(", "= prepare_data ) dsp_data = [resolved_data[\"username\"],dsp_data] __varifier_lst.append(dsp_data) __varified_devices.append(resolved_data[\"username\"]) __client_keys[resolved_data[\"username\"]] =", "Disconnected\") if r in self.OUTPUTS: self.OUTPUTS.remove(r) if r in self.WRITABLE:", "[] get = self._get_node(file = self.__file_location,key = hashlib.sha256(bytes(\"key\", \"utf-8\")).digest(), wait", "socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.sock.setsockopt( socket.SOL_SOCKET, socket.SO_REUSEADDR, 1 ) self.sock.setblocking(0) self.sock.bind((self.address,self.port)) self.sock.listen(listeners)", "= aes_key_pack[\"aad\"] )._messanger( MSG = f\"{req_lst[1][INDEX]}\" ).decode().center(len(req_lst[1][INDEX]) + 100, \"|\").encode(\"utf-8\")", "key_pack = self.__CLIENT_KEYS[target_name] key_pack = pickle.loads(base64.b64decode(key_pack)) dsp_data = DSP( DSP_type", "args = ( self.__RECEIVING_MSG, self.__VARIFIED_DEVICES, self.__VARIFIER_LIST, self.__CLIENT_KEYS, self.OUTPUTS, self.REQUEST_LIST, self.REQUEST_RESPONSE_LIST,", "data_len[0] ] ) else: print(\"User Disconnected\") if r in self.OUTPUTS:", "self.WRITABLE.remove(r) del self.MESSAGE_QUEUES[r] continue # handling the outputs for w", "INDEX,_data_ in enumerate(__receiving_msg): data = pickle.loads(base64.b64decode(_data_)) # print(f\"data[0] : {data[0]}\")", "if len(req_res_lst) > 0: if username in req_res_lst[0]: INDEX =", "list of multiple channel names\") else: if channel_name not in", "def __server(self): data_recv_len = [] while True: readable, writable, exceptions", "data = pickle.dumps(data) pickled_data = data encrypted_data = [self.device_id, self.__encrypt(pickled_data)]", "= yaml.full_load(file) return documents def _write_yml(self,file = None, dict_data =", "= pickle.loads(base64.b64decode(eval(_recv_.msg))) __request_lst.append( [ resolved_data[\"target_name\"], _recv_.msg ] ) __receiving_msg.remove(_data_) except:", "= self.__file_location,key = hashlib.sha256(bytes(\"key\", \"utf-8\")).digest(), wait = False) if get", "= AESGCM(self.UNIVERSAL_AES_KEY,) ct = aesgcm.encrypt( self.NONCE, data, self.AAD ) return", "] ) __receiving_msg.remove(_data_) except: pass elif _recv_.DSP_type == \"DSP_MSG\": try:", "create multi-client server. args: secure : bool = True ->", "\"aad\" : aad, } pickle_qw = pickle.dumps(qw) b64_aes_key_pack = base64.b64encode(pickle_qw)", "__varifier_lst.append(dsp_data) __varified_devices.append(resolved_data[\"username\"]) __client_keys[resolved_data[\"username\"]] = b64_aes_key_pack get = self._get_node( file =", "= f\"{req_lst[1][INDEX]}\" ).decode().center(len(req_lst[1][INDEX]) + 100, \"|\").encode(\"utf-8\") s.send(bytes(f\"{len(dsp_data)+100}\".center(16,\"|\"),\"utf-8\")) s.send( dsp_data )", "if len(ini) == 0 or r not in ini[0]: try:", "self.OUTPUTS: self.OUTPUTS.remove(e) e.close() del self.MESSAGE_QUEUES[e] def receive_func(self, __receiving_msg,__varified_devices, __varifier_lst, __client_keys,", "need to pass a yaml file which saves all the", "raise TypeError(\"asyncServer() missing 1 required positional argument: 'file'\") __parent =", "except: pass elif _recv_.DSP_type == \"DSP_MSG\": try: resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg)))", "time.sleep(2) for s in Writable: if s._closed == True and", "= nonce else: self.NONCE = b'\\xfe\\x1e1\\xc0\\xfc`s\\xbc6\\x9fQ\\xb2' if aad is not", ") __receiving_msg.remove(_data_) except: pass elif _recv_.DSP_type == \"DSP_MSG\": try: resolved_data", "read[node[0]] self._change_node_value(file,node) else: raise KeyError except KeyError: node_dict = {", "re import S import select import socket import queue import", "debug : bool = False, MTCL : bool = True,", "the inputs for r in readable: if r is self.sock:", "Writable.remove(s) # try: try: username = message_q[s].get_nowait() message_q[s].put(username) msg_lst =", "configurations. if not specified, will raise an TypeError \"\"\" if", "__repr__(self): return \"_main.DSP._\" def __encrypt(self,data): aesgcm = AESGCM(self.UNIVERSAL_AES_KEY,) ct =", "( self.WRITABLE, self.MESSAGE_QUEUES, self.MESSAGE_LIST, self.REQUEST_LIST, self.REQUEST_RESPONSE_LIST, self.__VARIFIER_LIST, self.__CUSTOM_CHANNEL_MSG_SEND ) )", "username in msg_lst[0]: INDEX = msg_lst[0].index(username) aes_key_pack = self.__CLIENT_KEYS[username] aes_key_pack", "type([]): data_recv_len.append( [ r, data_len[0] ] ) else: print(\"User Disconnected\")", "= \"DSP_handshake_request\", universalAesKey = aes_key_pack[\"aes_key\"], nonce = aes_key_pack[\"nonce\"], aad =", "self.__VARIFIER_LIST = [] self.__CALLBACK_LOOP = [] self.__RECEIVING_MSG = [] get", "aad = aes_key_pack[\"aad\"] )._messanger( MSG = f\"{msg_lst[1][INDEX]}\" ).decode().center(len(msg_lst[1][INDEX]) + 100,", "secure_dict[\"aad\"]) ct = pickle.loads(ct) return eval(ct) else: aesgcm = AESGCM(self.UNIVERSAL_AES_KEY)", "if r in self.WRITABLE: self.WRITABLE.remove(r) self.INPUTS.remove(r) r.close() del self.MESSAGE_QUEUES[r] continue", "} pickle_qw = pickle.dumps(qw) b64_aes_key_pack = base64.b64encode(pickle_qw) key = load_ssh_public_key(", "class allows user to create multi-client server. args: secure :", "enumerate(self.__CUSTOM_CHANNEL_MSG_REC): if d[\"channel\"] == channel: found = True index =", "= self.__CUSTOM_CHANNEL_MSG_REC.pop(index) self.__CALLBACK_LOOP.append([function,[p_data]]) else: p_data = self.__CUSTOM_CHANNEL_MSG_REC.pop(index) args = list(args)", "= b64_aes_key_pack self._add_node( file = self.__file_location, node = [ hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(),", "= True,file = None): \"\"\"async_server initializer class that will create", "if universalAesKey is not None: self.UNIVERSAL_AES_KEY = universalAesKey else: self.UNIVERSAL_AES_KEY", "pickled_data = data encrypted_data = [self.device_id, self.__encrypt(pickled_data)] p_e_d = pickle.dumps(encrypted_data)", "= None if channel in self.__CUSTOM_CHANNEL: for i,d in enumerate(self.__CUSTOM_CHANNEL_MSG_REC):", "self.DSP_type = DSP_type self.device_id = device_id if universalAesKey is not", "onlyText = False): lower_case = list(\"abcdefghijklmnopqrstuvwxyz\") upper_case = list('ABCDEFGHIJKLMNOPQRSTUVWXYZ') special", "eval(_recv_.msg) resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg))) __request_lst.append( [ resolved_data[\"target_name\"], _recv_.msg ] )", "self.sock.setblocking(0) self.sock.bind((self.address,self.port)) self.sock.listen(listeners) print(\"[SERVER IS ACTIVATED | LISTENING]\") self.INPUTS.append(self.sock) thread1", "keys and configurations. if not specified, will raise an TypeError", "self.__file_location, node = [ hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(), { resolved_data[\"username\"] : b64_aes_key_pack }", "self.MESSAGE_LIST, self.__CUSTOM_CHANNEL_MSG_REC, ) ) thread2 = threading.Thread( target = self.send_func,", "= os.urandom(32) aad = bytes(self._name_generator(),\"utf-8\") qw = { \"aes_key\" :", "key.encrypt( b64_aes_key_pack, padding.OAEP( mgf = padding.MGF1(algorithm = hashes.SHA256()), algorithm =", "if r is self.sock: connection,addr = r.accept() connection.setblocking(0) self.INPUTS.append(connection) self.MESSAGE_QUEUES[connection]", "msg_lst[0].index(username) aes_key_pack = self.__CLIENT_KEYS[username] aes_key_pack = pickle.loads(base64.b64decode(aes_key_pack)) dsp_data = DSP(", "r_yml[node] r_yml.pop(node) self._write_yml(file = file, dict_data = r_yml, mode =", "if d[\"channel\"] == channel: found = True index = i", "self.msg = msg self.DSP_type = DSP_type self.device_id = device_id if", "req_lst = list(list(zip(*requestList))) req_res_lst = list(list(zip(*requestResList))) vari_lst = list(list(zip(*varifierList))) send_c_msg", "list(\"!@#$%&*?\") number = list(\"0123456789\") if onlyText: _all_ = lower_case +", "self.__VARIFIED_DEVICES, # __varifier_lst = self.__VARIFIER_LIST, # __client_keys = self.__CLIENT_KEYS, #", "= aes_key_pack[\"nonce\"], aad = aes_key_pack[\"aad\"] )._messanger( MSG = f\"{req_lst[1][INDEX]}\" ).decode().center(len(req_lst[1][INDEX])", "False, safeMode : bool = True): \"\"\" This class allows", "data encrypted_data = [self.device_id, self.__encrypt(pickled_data)] p_e_d = pickle.dumps(encrypted_data) ret =", "from cryptography.hazmat.primitives.asymmetric import padding from cryptography.hazmat.backends import default_backend import hashlib", ": str = None, port : int = None, listeners", "requestList.pop(INDEX) if len(req_res_lst) > 0: if username in req_res_lst[0]: INDEX", "pickle.loads(base64.b64decode(aes_key_pack)) dsp_data = DSP( DSP_type = \"DSP_handshake_request_res\", universalAesKey = aes_key_pack[\"aes_key\"],", "None,mode = \"a+\"): with open(file, mode) as file: yaml.dump(dict_data, file)", "ACTIVATED | LISTENING]\") self.INPUTS.append(self.sock) thread1 = threading.Thread( target = self.receive_func,", "__message_lst, __custom_c_m_r): # __receiving_msg = self.__RECEIVING_MSG, # __varified_devices = self.__VARIFIED_DEVICES,", "create the a asyncronouse tcp server. \"\"\" IPNC.__init__(self) self.__secure =", "not in __varified_devices: _recv_ = DSP()._convert_to_class(_data_, secure = False) if", "SEND(self,channel_name,target_name,data): if channel_name in self.__CUSTOM_CHANNEL: key_pack = self.__CLIENT_KEYS[target_name] key_pack =", "= DSP( DSP_type = channel_name, universalAesKey=key_pack[\"aes_key\"], nonce = key_pack[\"nonce\"], aad=", "# print(f\"data[0] : {data[0]}\") # print(f\"__varified_devices : {__varified_devices}\") if data[0]", "aes_key, \"nonce\" : nonce, \"aad\" : aad, } pickle_qw =", "dsp_data = DSP( DSP_type = \"DSP_handshake_request\", universalAesKey = aes_key_pack[\"aes_key\"], nonce", ": bool = True -> this should set to the", "import load_ssh_public_key from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives.asymmetric import padding", "= b64_aes_key_pack get = self._get_node( file = self.__file_location, key =", "__request_res_lst, __message_lst, __custom_c_m_r): # __receiving_msg = self.__RECEIVING_MSG, # __varified_devices =", ").decode().center(len(req_lst[1][INDEX]) + 100, \"|\").encode(\"utf-8\") s.send(bytes(f\"{len(dsp_data)+100}\".center(16,\"|\"),\"utf-8\")) s.send( dsp_data ) requestList.pop(INDEX) if", "b64_aes_key_pack get = self._get_node( file = self.__file_location, key = hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(),", "args is None: p_data = self.__CUSTOM_CHANNEL_MSG_REC.pop(index) self.__CALLBACK_LOOP.append([function,[p_data]]) else: p_data =", "# try: try: username = message_q[s].get_nowait() message_q[s].put(username) msg_lst = list(list(zip(*message_list)))", "del self.MESSAGE_QUEUES[e] def receive_func(self, __receiving_msg,__varified_devices, __varifier_lst, __client_keys, __outputs, __request_lst, __request_res_lst,", "__init__(self,secure : bool = True,file = None): \"\"\"async_server initializer class", "= aesgcm.decrypt( secure_dict[\"nonce\"], OBJECT[-1], secure_dict[\"aad\"]) ct = pickle.loads(ct) return eval(ct)", ": {Writable}\") # time.sleep(2) for s in Writable: if s._closed", "[ hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(), { resolved_data[\"username\"] : b64_aes_key_pack } ] ) __receiving_msg.pop(INDEX)", "r not in self.OUTPUTS: self.OUTPUTS.append(r) except Exception as e: print(\"User", "= self.__CLIENT_KEYS, # __outputs = self.OUTPUTS, # __request_lst = self.REQUEST_LIST", "[ r, data_len[0] ] ) else: print(\"User Disconnected\") if r", "if data[0] not in __varified_devices: _recv_ = DSP()._convert_to_class(_data_, secure =", "self.__server) # thread.daemon = True thread.start() def __server(self): data_recv_len =", "else: ini = list(zip(*data_recv_len)) if len(ini) == 0 or r", "positional argument: 'file'\") __parent = MAIN(file,debug,MTCL,MPCL,safeMode) self.SERVER = __parent.SERVER self.CREATE_CHANNEL", "file = self.__file_location, node = [ hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(), get ] )", ": b64_aes_key_pack } ] ) __receiving_msg.pop(INDEX) else: aes_key_pack = __client_keys[data[0]]", "None: self.AAD = aad else: self.AAD = b\"au$tica&tedbut@u32nencr#cdscypteddatafdrj\" def _messanger(self,MSG", "__receiving_msg.remove(_data_) except: pass except: pass def send_func(self,Writable,message_q,message_list,requestList,requestResList,varifierList,customChannelMessageSend): while True: #", "r_yml = self._read_yml(file = file) r_yml[node] r_yml.pop(node) self._write_yml(file = file,", "self.__file_location,key = hashlib.sha256(bytes(\"key\", \"utf-8\")).digest(), wait = False) if get is", "[ resolved_data['target_name'], _recv_.msg ] ) __receiving_msg.remove(_data_) except: pass elif _recv_.DSP_type", "r_yml[node[0]] = node[1] self._write_yml(file = file, dict_data = r_yml, mode", "in self.OUTPUTS: self.OUTPUTS.remove(e) e.close() del self.MESSAGE_QUEUES[e] def receive_func(self, __receiving_msg,__varified_devices, __varifier_lst,", "in enumerate(__receiving_msg): data = pickle.loads(base64.b64decode(_data_)) # print(f\"data[0] : {data[0]}\") #", "type([]): for channel in channel_name: if channel not in self.__CUSTOM_CHANNEL:", "send_c_msg = list(zip(*customChannelMessageSend)) except KeyError: pass if len(msg_lst) > 0:", "\"_main.DSP._\" def __encrypt(self,data): aesgcm = AESGCM(self.UNIVERSAL_AES_KEY,) ct = aesgcm.encrypt( self.NONCE,", "= hashes.SHA256()), algorithm = hashes.SHA256(), label = None ) )", "== type([]): for channel in channel_name: if channel not in", "thread.daemon = True thread.start() def __server(self): data_recv_len = [] while", "time class IPNC(): def __init__(self): pass def _read_yml(self,file = None):", "multi-client server. args: secure : bool = True -> this", "aesgcm.encrypt( self.NONCE, data, self.AAD ) return ct def _convert_to_class(self,OBJECT :", "= load_ssh_public_key( bytes( resolved_data[\"data\"], \"utf-8\" ), backend=default_backend() ) ciphertext =", "data_recv_len.pop(INDEX)[1] data = r.recv(recv_len) try: data = data.decode().strip(\"0\").encode(\"utf-8\") except: print(\"Error", "base64.b64encode(ciphertext) prepare_data = {\"key\" : ciphertext} dsp_data = DSP( DSP_type=\"username_secure_response\"", "> 0: if username in send_c_msg[0]: INDEX = send_c_msg[0].index(username) s.send(bytes(f\"{len(send_c_msg[1][INDEX])}\".center(16,\"|\"),\"utf-8\"))", "dsp_data = DSP( DSP_type = channel_name, universalAesKey=key_pack[\"aes_key\"], nonce = key_pack[\"nonce\"],", "None, ): if msg is not None: self.msg = msg", "user need to pass a yaml file which saves all", "= [] self.__VARIFIED_DEVICES = [] self.__CLIENT_KEYS = {} self.__CUSTOM_CHANNEL =", "None: return self._read_yml(file) if wait: while True: r_yml = self._read_yml(file)", "while True: r_yml = self._read_yml(file) try: value = r_yml[key] return", "req_lst[0].index(username) try: aes_key_pack = self.__CLIENT_KEYS[username] except KeyError: continue aes_key_pack =", "else: aesgcm = AESGCM(self.UNIVERSAL_AES_KEY) ct = aesgcm.decrypt(self.NONCE, OBJECT[-1], self.AAD) ct", "cryptography.hazmat.primitives.asymmetric import padding from cryptography.hazmat.backends import default_backend import hashlib import", "except TypeError: sys.exit() except ValueError: print(\"sender has not done the", "None: found = False index = None if channel in", "= self.__CUSTOM_CHANNEL_MSG_REC while True: try: for INDEX,_data_ in enumerate(__receiving_msg): data", "r in self.WRITABLE: self.WRITABLE.remove(r) self.INPUTS.remove(r) r.close() del self.MESSAGE_QUEUES[r] continue except", "= file, dict_data = r_yml, mode = \"w\") def _get_node(self,file", "self.__CUSTOM_CHANNEL: try: resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg))) __custom_c_m_r.append(resolved_data) __receiving_msg.remove(_data_) except: pass except:", "args = ( self.__CALLBACK_LOOP, ) ) # thread1.daemon = True", "except: # pass def CREATE_CHANNEL(self,channel_name = None, multiple : bool", "os.urandom(32) aad = bytes(self._name_generator(),\"utf-8\") qw = { \"aes_key\" : aes_key,", "self.NONCE, data, self.AAD ) return ct def _convert_to_class(self,OBJECT : bytes", "_recv_.DSP_type in self.__CUSTOM_CHANNEL: try: resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg))) __custom_c_m_r.append(resolved_data) __receiving_msg.remove(_data_) except:", "import hashes from cryptography.hazmat.primitives.asymmetric import padding from cryptography.hazmat.backends import default_backend", "not file: raise TypeError(\"asyncServer() missing 1 required positional argument: 'file'\")", "bool = True, secure_dict : list = None): try: OBJECT", "an TypeError \"\"\" if not file: raise TypeError(\"asyncServer() missing 1", "self.OUTPUTS: self.OUTPUTS.remove(r) self.INPUTS.remove(r) if r in self.WRITABLE: self.WRITABLE.remove(r) r.close() del", "= { node[0] : node[1] } self._write_yml(file, node_dict) def _change_node_value(self,file", ": str = None,function : object = None,args = None):", "self.port = port self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.sock.setsockopt( socket.SOL_SOCKET, socket.SO_REUSEADDR,", "TypeError: pass else: r_yml = self._read_yml(file) try: value = r_yml[key]", "None, universalAesKey : bytes = None, nonce : bytes =", "{Writable}\") # time.sleep(2) for s in Writable: if s._closed ==", ": int = None, universalAesKey : bytes = None, nonce", "__callback_loop(self,__callback_loop): while True: for index,func in enumerate(__callback_loop): __callback_loop.pop(index) func[0](*func[1]) def", "KeyError: return False except: pass def _name_generator(self,_len_ = 16, onlyText", "= msg self.DSP_type = DSP_type self.device_id = device_id if universalAesKey", "__callback_loop.pop(index) func[0](*func[1]) def SEND(self,channel_name,target_name,data): if channel_name in self.__CUSTOM_CHANNEL: key_pack =", "def _remove_node(self,file,node): try: r_yml = self._read_yml(file = file) r_yml[node] r_yml.pop(node)", "vari_lst = list(list(zip(*varifierList))) send_c_msg = list(zip(*customChannelMessageSend)) except KeyError: pass if", "__request_lst, __request_res_lst, __message_lst, __custom_c_m_r): # __receiving_msg = self.__RECEIVING_MSG, # __varified_devices", "qwe = list(zip(*data_recv_len)) INDEX = qwe[0].index(r) try: recv_len = data_recv_len.pop(INDEX)[1]", ")._messanger( MSG = f\"{msg_lst[1][INDEX]}\" ).decode().center(len(msg_lst[1][INDEX]) + 100, \"|\").encode(\"utf-8\") try: s.send(bytes(f\"{len(dsp_data)}\".center(16,\"|\"),\"utf-8\"))", "continue else: qwe = list(zip(*data_recv_len)) INDEX = qwe[0].index(r) try: recv_len", "== True and s.fileno() == -1: Writable.remove(s) # try: try:", "_write_yml(self,file = None, dict_data = None,mode = \"a+\"): with open(file,", "= False, safeMode : bool = True): \"\"\" This class", "self.UNIVERSAL_AES_KEY = b'<KEY>' if nonce is not None: self.NONCE =", "import queue import threading import sys import pickle import base64", "True thread2.start() # thread3.daemon = True thread3.start() thread = threading.Thread(target", "MPCL : bool = False, safeMode : bool = True):", "self.OUTPUTS, # __request_lst = self.REQUEST_LIST # __request_res_lst = self.REQUEST_RESPONSE_LIST #", "self._read_yml(file) try: value = r_yml[key] return value except KeyError: return", "None: self.__CLIENT_KEYS = get self.__VARIFIED_DEVICES.extend(list(get.keys())) def SERVER(self,address : str =", "= r.recv(recv_len) try: data = data.decode().strip(\"0\").encode(\"utf-8\") except: print(\"Error in decoding\")", "s.send(bytes(f\"{len(send_c_msg[1][INDEX])}\".center(16,\"|\"),\"utf-8\")) s.send(send_c_msg[1][INDEX]) customChannelMessageSend.pop(INDEX) # except: # pass def CREATE_CHANNEL(self,channel_name =", "TypeError(\"When 'mutliple' is to True then channel_name should be a", "ct def _convert_to_class(self,OBJECT : bytes = None,secure : bool =", "_change_node_value(self,file = None, node = None): r_yml = self._read_yml(file) r_yml[node[0]]", "False) if _recv_.DSP_type == \"username_secure\": resolved_data = eval(_recv_.msg) aes_key =", "r_yml = self._read_yml(file) try: value = r_yml[key] return value except", "None): r_yml = self._read_yml(file) r_yml[node[0]] = node[1] self._write_yml(file = file,", "_remove_node(self,file,node): try: r_yml = self._read_yml(file = file) r_yml[node] r_yml.pop(node) self._write_yml(file", "True: # print(f\"Writable : {Writable}\") # time.sleep(2) for s in", "data = data.decode().strip(\"0\").encode(\"utf-8\") except: print(\"Error in decoding\") self.__RECEIVING_MSG.append(data) self.MESSAGE_QUEUES[r].put(pickle.loads(base64.b64decode(data))[0]) if", "= b'<KEY>' if nonce is not None: self.NONCE = nonce", "r.accept() connection.setblocking(0) self.INPUTS.append(connection) self.MESSAGE_QUEUES[connection] = queue.Queue() else: ini = list(zip(*data_recv_len))", "del self.MESSAGE_QUEUES[r] continue else: qwe = list(zip(*data_recv_len)) INDEX = qwe[0].index(r)", "= True): \"\"\" This class allows user to create multi-client", "= \"DSP_handshake_request_res\", universalAesKey = aes_key_pack[\"aes_key\"], nonce = aes_key_pack[\"nonce\"], aad =", "in exceptions: self.INPUTS.remove(e) if e in self.OUTPUTS: self.OUTPUTS.remove(e) e.close() del", "node[0] : node[1] } self._write_yml(file, node_dict) def _change_node_value(self,file = None,", "[ resolved_data[\"target_name\"], _recv_.msg ] ) __receiving_msg.remove(_data_) except: pass elif _recv_.DSP_type", "index = None if channel in self.__CUSTOM_CHANNEL: for i,d in", "[resolved_data[\"username\"],dsp_data] __varifier_lst.append(dsp_data) __varified_devices.append(resolved_data[\"username\"]) __client_keys[resolved_data[\"username\"]] = b64_aes_key_pack get = self._get_node( file", "None: self.NONCE = nonce else: self.NONCE = b'\\xfe\\x1e1\\xc0\\xfc`s\\xbc6\\x9fQ\\xb2' if aad", "pass a yaml file which saves all the keys and", "if get is not None: get[resolved_data[\"username\"]] = b64_aes_key_pack self._add_node( file", "= 16, onlyText = False): lower_case = list(\"abcdefghijklmnopqrstuvwxyz\") upper_case =", "channel names\") else: if channel_name not in self.__CUSTOM_CHANNEL: self.__CUSTOM_CHANNEL.append(channel_name) def", "listeners : int = None): self.address = address self.port =", "= [] while True: readable, writable, exceptions = select.select(self.INPUTS, self.OUTPUTS,", "node_dict) def _change_node_value(self,file = None, node = None): r_yml =", "{} self.REQUEST_LIST = [] self.REQUEST_RESPONSE_LIST = [] self.MESSAGE_LIST = []", ") ) thread3 = threading.Thread( target = self.__callback_loop, args =", "in enumerate(self.__CUSTOM_CHANNEL_MSG_REC): if d[\"channel\"] == channel: found = True index", "CREATE_CHANNEL(self,channel_name = None, multiple : bool = False): if multiple:", "= True thread2.start() # thread3.daemon = True thread3.start() thread =", "= universalAesKey else: self.UNIVERSAL_AES_KEY = b'<KEY>' if nonce is not", "raise TypeError(\"'channel' should not be None\") def __callback_loop(self,__callback_loop): while True:", "= f\"{req_res_lst[1][INDEX]}\" ).decode().center(len(req_res_lst[1][INDEX]) + 100, \"|\").encode(\"utf-8\") s.send(bytes(f\"{len(dsp_data)+100}\".center(16,\"|\"),\"utf-8\")) s.send( dsp_data )", "saves all the keys and configurations. if not specified, will", "if w not in self.WRITABLE: self.WRITABLE.append(w) # handling the errors", "import pickle import base64 import os from cryptography.hazmat.backends import default_backend", ": bool = True): \"\"\" This class allows user to", "= padding.MGF1(algorithm = hashes.SHA256()), algorithm = hashes.SHA256(), label = None", "= eval(_recv_.msg) aes_key = AESGCM.generate_key(256) nonce = os.urandom(32) aad =", "if wait: while True: r_yml = self._read_yml(file) try: value =", ").decode().center(len(msg_lst[1][INDEX]) + 100, \"|\").encode(\"utf-8\") try: s.send(bytes(f\"{len(dsp_data)}\".center(16,\"|\"),\"utf-8\")) s.send( dsp_data ) message_list.pop(INDEX)", "mode = \"w\") def _get_node(self,file = None, key = None,", ": bool = False, safeMode : bool = True): \"\"\"", "readable.remove(r) self.INPUTS.remove(r) writable.remove(r) self.OUTPUTS.remove(r) if r in self.WRITABLE: self.WRITABLE.remove(r) del", "= True -> this should set to the default value", "file: documents = yaml.full_load(file) return documents def _write_yml(self,file = None,", "self.OUTPUTS: self.OUTPUTS.append(r) except Exception as e: print(\"User Disconnected\") readable.remove(r) self.INPUTS.remove(r)", "= None): with open(file) as file: documents = yaml.full_load(file) return", "threading import sys import pickle import base64 import os from", "= qwe[0].index(r) try: recv_len = data_recv_len.pop(INDEX)[1] data = r.recv(recv_len) try:", "self.device_id = device_id if universalAesKey is not None: self.UNIVERSAL_AES_KEY =", "= [ hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(), { resolved_data[\"username\"] : b64_aes_key_pack } ] )", "= file) r_yml[node] r_yml.pop(node) self._write_yml(file = file, dict_data = r_yml,", "varifierList.pop(INDEX) if len(send_c_msg) > 0: if username in send_c_msg[0]: INDEX", "raise TypeError( \"convert_to_class() missing 1 required positional argument: 'secure_lst'\") else:", ": {__varified_devices}\") if data[0] not in __varified_devices: _recv_ = DSP()._convert_to_class(_data_,", "pickle.loads(base64.b64decode(eval(_recv_.msg))) __request_res_lst.append( [ resolved_data[\"target_name\"], _recv_.msg ] ) __receiving_msg.remove(_data_) except: pass", "get is not None: get[resolved_data[\"username\"]] = b64_aes_key_pack self._add_node( file =", "None, port : int = None, listeners : int =", "r.close() del self.MESSAGE_QUEUES[r] continue except Exception as e: pass if", "= list(\"0123456789\") if onlyText: _all_ = lower_case + upper_case else:", ": str = None, DSP_type : str = None, device_id", "= bytes(self._name_generator(),\"utf-8\") qw = { \"aes_key\" : aes_key, \"nonce\" :", "resolved_data['target_name'], _recv_.msg ] ) __receiving_msg.remove(_data_) except: pass elif _recv_.DSP_type in", ")._messanger( MSG = prepare_data ) dsp_data = [resolved_data[\"username\"],dsp_data] __varifier_lst.append(dsp_data) __varified_devices.append(resolved_data[\"username\"])", "self.OUTPUTS: self.OUTPUTS.remove(r) if r in self.WRITABLE: self.WRITABLE.remove(r) self.INPUTS.remove(r) r.close() del", "= False, MTCL : bool = True, MPCL : bool", "except KeyError: return None except TypeError: pass def _remove_node(self,file,node): try:", "not None: self.UNIVERSAL_AES_KEY = universalAesKey else: self.UNIVERSAL_AES_KEY = b'<KEY>' if", "qwe[0].index(r) try: recv_len = data_recv_len.pop(INDEX)[1] data = r.recv(recv_len) try: data", "= None): \"\"\"async_server initializer class that will create the a", ": aes_key, \"nonce\" : nonce, \"aad\" : aad, } pickle_qw", "bool = True, MPCL : bool = False, safeMode :", "self.__file_location, node = [ hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(), get ] ) else: self._add_node(", ": bool = True, secure_dict : list = None): try:", "self.READABLE = [] self.WRITABLE = [] self.INPUTS = [] self.OUTPUTS", "pass def _remove_node(self,file,node): try: r_yml = self._read_yml(file = file) r_yml[node]", "nonce else: self.NONCE = b'\\xfe\\x1e1\\xc0\\xfc`s\\xbc6\\x9fQ\\xb2' if aad is not None:", "def __encrypt(self,data): aesgcm = AESGCM(self.UNIVERSAL_AES_KEY,) ct = aesgcm.encrypt( self.NONCE, data,", "aad= key_pack[\"aad\"] )._messanger( MSG = base64.b64encode(pickle.dumps(data)) ) self.__CUSTOM_CHANNEL_MSG_SEND.append( [ target_name,", "raise TypeError(\"When 'mutliple' is to True then channel_name should be", "None, listeners : int = None): self.address = address self.port", "cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives.asymmetric import padding from cryptography.hazmat.backends import", "= self.__file_location, node = [ hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(), { resolved_data[\"username\"] : b64_aes_key_pack", "self.__CUSTOM_CHANNEL.append(channel) else: print(f\"Channel : {channel} already exists.\") else: raise TypeError(\"When", "= \"DSP_MSG\", universalAesKey = aes_key_pack[\"aes_key\"], nonce = aes_key_pack[\"nonce\"], aad =", "pickle.loads(base64.b64decode(aes_key_pack)) dsp_data = DSP( DSP_type = \"DSP_handshake_request\", universalAesKey = aes_key_pack[\"aes_key\"],", "__server(self): data_recv_len = [] while True: readable, writable, exceptions =", "0: if username in vari_lst[0]: INDEX = vari_lst[0].index(username) s.send(bytes(f\"{len(vari_lst[1][INDEX])}\".center(16,\"|\"),\"utf-8\")) s.send(", "not in ini[0]: try: data_len = pickle.loads(base64.b64decode(r.recv(32).decode().strip(\"0\").encode(\"utf-8\"))) except ConnectionResetError: print(\"Client", "self.__VARIFIER_LIST, self.__CUSTOM_CHANNEL_MSG_SEND ) ) thread3 = threading.Thread( target = self.__callback_loop,", "pass except TypeError: pass else: r_yml = self._read_yml(file) try: value", "return ct def _convert_to_class(self,OBJECT : bytes = None,secure : bool", "in Writable: if s._closed == True and s.fileno() == -1:", "not None: found = False index = None if channel", "ini = list(zip(*data_recv_len)) if len(ini) == 0 or r not", "class that will create the a asyncronouse tcp server. \"\"\"", "secure : bool = True -> this should set to", "if channel_name in self.__CUSTOM_CHANNEL: key_pack = self.__CLIENT_KEYS[target_name] key_pack = pickle.loads(base64.b64decode(key_pack))", "secure_dict[\"nonce\"], OBJECT[-1], secure_dict[\"aad\"]) ct = pickle.loads(ct) return eval(ct) else: aesgcm", "except: pass elif _recv_.DSP_type == \"DSP_REQ_RES\": try: resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg)))", "self.__CLIENT_KEYS[username] except KeyError: continue aes_key_pack = pickle.loads(base64.b64decode(aes_key_pack)) dsp_data = DSP(", "self.OUTPUTS.remove(e) e.close() del self.MESSAGE_QUEUES[e] def receive_func(self, __receiving_msg,__varified_devices, __varifier_lst, __client_keys, __outputs,", "[ hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(), get ] ) else: self._add_node( file = self.__file_location,", "= [] self.MESSAGE_LIST = [] self.__VARIFIED_DEVICES = [] self.__CLIENT_KEYS =", "self.receive_func, args = ( self.__RECEIVING_MSG, self.__VARIFIED_DEVICES, self.__VARIFIER_LIST, self.__CLIENT_KEYS, self.OUTPUTS, self.REQUEST_LIST,", "# handling the outputs for w in writable: if w", "open(file, mode) as file: yaml.dump(dict_data, file) def _add_node(self,file = None,", "return documents def _write_yml(self,file = None, dict_data = None,mode =", "= { \"aes_key\" : aes_key, \"nonce\" : nonce, \"aad\" :", "__receiving_msg.pop(INDEX) else: aes_key_pack = __client_keys[data[0]] _recv_ = DSP()._convert_to_class( OBJECT =", "value True, file : str = None -> here user", "channel not in self.__CUSTOM_CHANNEL: self.__CUSTOM_CHANNEL.append(channel) else: print(f\"Channel : {channel} already", "__varifier_lst, __client_keys, __outputs, __request_lst, __request_res_lst, __message_lst, __custom_c_m_r): # __receiving_msg =", "= [] self.__CUSTOM_CHANNEL_MSG_SEND = [] self.__VARIFIER_LIST = [] self.__CALLBACK_LOOP =", "self.WRITABLE: self.WRITABLE.remove(r) r.close() del self.MESSAGE_QUEUES[r] continue else: qwe = list(zip(*data_recv_len))", "True: if secure_dict is None: raise TypeError( \"convert_to_class() missing 1", "class DSP(): def __init__( self, msg : str = None,", "eval(ct) else: aesgcm = AESGCM(self.UNIVERSAL_AES_KEY) ct = aesgcm.decrypt(self.NONCE, OBJECT[-1], self.AAD)", "except: print(\"Error in decoding\") self.__RECEIVING_MSG.append(data) self.MESSAGE_QUEUES[r].put(pickle.loads(base64.b64decode(data))[0]) if r not in", "to True then channel_name should be a list of multiple", ") else: self._add_node( file = self.__file_location, node = [ hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(),", "0: if username in send_c_msg[0]: INDEX = send_c_msg[0].index(username) s.send(bytes(f\"{len(send_c_msg[1][INDEX])}\".center(16,\"|\"),\"utf-8\")) s.send(send_c_msg[1][INDEX])", "__custom_c_m_r.append(resolved_data) __receiving_msg.remove(_data_) except: pass except: pass def send_func(self,Writable,message_q,message_list,requestList,requestResList,varifierList,customChannelMessageSend): while True:", "self.__CUSTOM_CHANNEL.append(channel_name) def LISTEN(self,channel : str = None,function : object =", "else: p_data = self.__CUSTOM_CHANNEL_MSG_REC.pop(index) args = list(args) args.insert(0,p_data) self.__CALLBACK_LOOP.append([function,args]) else:", "is not None: self.NONCE = nonce else: self.NONCE = b'\\xfe\\x1e1\\xc0\\xfc`s\\xbc6\\x9fQ\\xb2'", "req_res_lst[0].index(username) aes_key_pack = self.__CLIENT_KEYS[username] aes_key_pack = pickle.loads(base64.b64decode(aes_key_pack)) dsp_data = DSP(", "upper_case else: _all_ = lower_case + upper_case + special +", "self.address = address self.port = port self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)", "= None, wait = True): if key == None: return", "self.MESSAGE_QUEUES = {} self.REQUEST_LIST = [] self.REQUEST_RESPONSE_LIST = [] self.MESSAGE_LIST", "channel_name in self.__CUSTOM_CHANNEL: key_pack = self.__CLIENT_KEYS[target_name] key_pack = pickle.loads(base64.b64decode(key_pack)) dsp_data", "vari_lst[1][INDEX] ) varifierList.pop(INDEX) if len(send_c_msg) > 0: if username in", "message_q[s].put(username) msg_lst = list(list(zip(*message_list))) req_lst = list(list(zip(*requestList))) req_res_lst = list(list(zip(*requestResList)))", "\"nonce\" : nonce, \"aad\" : aad, } pickle_qw = pickle.dumps(qw)", "1 ) self.sock.setblocking(0) self.sock.bind((self.address,self.port)) self.sock.listen(listeners) print(\"[SERVER IS ACTIVATED | LISTENING]\")", "Writable: if s._closed == True and s.fileno() == -1: Writable.remove(s)", "ret def __repr__(self): return \"_main.DSP._\" def __encrypt(self,data): aesgcm = AESGCM(self.UNIVERSAL_AES_KEY,)", "aesgcm.decrypt( secure_dict[\"nonce\"], OBJECT[-1], secure_dict[\"aad\"]) ct = pickle.loads(ct) return eval(ct) else:", "hashes.SHA256()), algorithm = hashes.SHA256(), label = None ) ) ciphertext", "r not in ini[0]: try: data_len = pickle.loads(base64.b64decode(r.recv(32).decode().strip(\"0\").encode(\"utf-8\"))) except ConnectionResetError:", "elif _recv_.DSP_type in self.__CUSTOM_CHANNEL: try: resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg))) __custom_c_m_r.append(resolved_data) __receiving_msg.remove(_data_)", "print(\"[SERVER IS ACTIVATED | LISTENING]\") self.INPUTS.append(self.sock) thread1 = threading.Thread( target", "False): lower_case = list(\"abcdefghijklmnopqrstuvwxyz\") upper_case = list('ABCDEFGHIJKLMNOPQRSTUVWXYZ') special = list(\"!@#$%&*?\")", "not None: self.msg = MSG data = f'DSP(\"{self.msg}\",\"{self.DSP_type}\")' data =", "DSP_type = \"DSP_handshake_request\", universalAesKey = aes_key_pack[\"aes_key\"], nonce = aes_key_pack[\"nonce\"], aad", "try: try: username = message_q[s].get_nowait() message_q[s].put(username) msg_lst = list(list(zip(*message_list))) req_lst", "pass def CREATE_CHANNEL(self,channel_name = None, multiple : bool = False):", "DSP( DSP_type = \"DSP_MSG\", universalAesKey = aes_key_pack[\"aes_key\"], nonce = aes_key_pack[\"nonce\"],", "select import socket import queue import threading import sys import", "special + number random.shuffle(_all_) return \"\".join(random.sample(_all_,_len_)) class DSP(): def __init__(", "in self.WRITABLE: self.WRITABLE.remove(r) self.INPUTS.remove(r) r.close() del self.MESSAGE_QUEUES[r] continue except Exception", "mode = \"w\") except KeyError: return False except: pass def", "\"|\").encode(\"utf-8\") s.send(bytes(f\"{len(dsp_data)+100}\".center(16,\"|\"),\"utf-8\")) s.send( dsp_data ) requestResList.pop(INDEX) if len(vari_lst) > 0:", "# handling the errors for e in exceptions: self.INPUTS.remove(e) if", "LISTEN(self,channel : str = None,function : object = None,args =", "hashes from cryptography.hazmat.primitives.asymmetric import padding from cryptography.hazmat.backends import default_backend import", "self.__CUSTOM_CHANNEL_MSG_REC while True: try: for INDEX,_data_ in enumerate(__receiving_msg): data =", "dict_data = None,mode = \"a+\"): with open(file, mode) as file:", "secure self.__file_location = file self.READABLE = [] self.WRITABLE = []", "ret = base64.b64encode(p_e_d) return ret def __repr__(self): return \"_main.DSP._\" def", "self.REQUEST_RESPONSE_LIST, self.__VARIFIER_LIST, self.__CUSTOM_CHANNEL_MSG_SEND ) ) thread3 = threading.Thread( target =", "None: p_data = self.__CUSTOM_CHANNEL_MSG_REC.pop(index) self.__CALLBACK_LOOP.append([function,[p_data]]) else: p_data = self.__CUSTOM_CHANNEL_MSG_REC.pop(index) args", "if s._closed == True and s.fileno() == -1: Writable.remove(s) #", "= pickle.loads(OBJECT) if secure == True: if secure_dict is None:", "self.__VARIFIED_DEVICES.extend(list(get.keys())) def SERVER(self,address : str = None, port : int", "b'\\xfe\\x1e1\\xc0\\xfc`s\\xbc6\\x9fQ\\xb2' if aad is not None: self.AAD = aad else:", "-1: Writable.remove(s) # try: try: username = message_q[s].get_nowait() message_q[s].put(username) msg_lst", "self.send_func, args = ( self.WRITABLE, self.MESSAGE_QUEUES, self.MESSAGE_LIST, self.REQUEST_LIST, self.REQUEST_RESPONSE_LIST, self.__VARIFIER_LIST,", "= msg else: self.msg = msg self.DSP_type = DSP_type self.device_id", "None, node = None): try: read = self._read_yml(file) if read", "aes_key_pack[\"nonce\"], aad = aes_key_pack[\"aad\"] )._messanger( MSG = f\"{req_lst[1][INDEX]}\" ).decode().center(len(req_lst[1][INDEX]) +", "= True, MPCL : bool = False, safeMode : bool", ") self.sock.setblocking(0) self.sock.bind((self.address,self.port)) self.sock.listen(listeners) print(\"[SERVER IS ACTIVATED | LISTENING]\") self.INPUTS.append(self.sock)", "self.__CUSTOM_CHANNEL: self.__CUSTOM_CHANNEL.append(channel) else: print(f\"Channel : {channel} already exists.\") else: raise", "nonce = os.urandom(32) aad = bytes(self._name_generator(),\"utf-8\") qw = { \"aes_key\"", "{} self.__CUSTOM_CHANNEL = [] self.__CUSTOM_CHANNEL_MSG_REC = [] self.__CUSTOM_CHANNEL_MSG_SEND = []", "else: raise TypeError(\"When 'mutliple' is to True then channel_name should", "the default value True, file : str = None ->", "should be a list of multiple channel names\") else: if", "Exception as e: pass if data_len: if type(data_len) == type([]):", "None except TypeError: pass def _remove_node(self,file,node): try: r_yml = self._read_yml(file", "self.MESSAGE_QUEUES[connection] = queue.Queue() else: ini = list(zip(*data_recv_len)) if len(ini) ==", "Exception as e: print(\"User Disconnected\") readable.remove(r) self.INPUTS.remove(r) writable.remove(r) self.OUTPUTS.remove(r) if", "except OSError: pass if len(req_lst) > 0: if username in", "while True: # print(f\"Writable : {Writable}\") # time.sleep(2) for s", ") self.__CUSTOM_CHANNEL_MSG_SEND.append( [ target_name, dsp_data ] ) class server(): def", "True and s.fileno() == -1: Writable.remove(s) # try: try: username", "aad = bytes(self._name_generator(),\"utf-8\") qw = { \"aes_key\" : aes_key, \"nonce\"", "not None: self.NONCE = nonce else: self.NONCE = b'\\xfe\\x1e1\\xc0\\xfc`s\\xbc6\\x9fQ\\xb2' if", "aad, } pickle_qw = pickle.dumps(qw) b64_aes_key_pack = base64.b64encode(pickle_qw) key =", "pass if len(req_lst) > 0: if username in req_lst[0]: INDEX", ": bool = False, MTCL : bool = True, MPCL", "self.__CLIENT_KEYS = {} self.__CUSTOM_CHANNEL = [] self.__CUSTOM_CHANNEL_MSG_REC = [] self.__CUSTOM_CHANNEL_MSG_SEND", "hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(), get ] ) else: self._add_node( file = self.__file_location, node", "# handling the inputs for r in readable: if r", "self.REQUEST_LIST, self.REQUEST_RESPONSE_LIST, self.__VARIFIER_LIST, self.__CUSTOM_CHANNEL_MSG_SEND ) ) thread3 = threading.Thread( target", "bytes = None, nonce : bytes = None, aad :", "else: print(\"User Disconnected\") if r in self.OUTPUTS: self.OUTPUTS.remove(r) self.INPUTS.remove(r) if", "padding from cryptography.hazmat.backends import default_backend import hashlib import yaml import", "to the default value True, file : str = None", "= r_yml, mode = \"w\") def _get_node(self,file = None, key", "except KeyError: pass except TypeError: pass else: r_yml = self._read_yml(file)", "'file'\") __parent = MAIN(file,debug,MTCL,MPCL,safeMode) self.SERVER = __parent.SERVER self.CREATE_CHANNEL = __parent.CREATE_CHANNEL", "value except KeyError: return None except TypeError: pass def _remove_node(self,file,node):", "continue aes_key_pack = pickle.loads(base64.b64decode(aes_key_pack)) dsp_data = DSP( DSP_type = \"DSP_handshake_request\",", "self.MESSAGE_LIST = [] self.__VARIFIED_DEVICES = [] self.__CLIENT_KEYS = {} self.__CUSTOM_CHANNEL", "self.__CALLBACK_LOOP, ) ) # thread1.daemon = True thread1.start() # thread2.daemon", "raise KeyError except KeyError: node_dict = { node[0] : node[1]", "None): self.address = address self.port = port self.sock = socket.socket(socket.AF_INET,", "_name_generator(self,_len_ = 16, onlyText = False): lower_case = list(\"abcdefghijklmnopqrstuvwxyz\") upper_case", "= AESGCM(self.UNIVERSAL_AES_KEY) ct = aesgcm.decrypt(self.NONCE, OBJECT[-1], self.AAD) ct = pickle.loads(ct)", "_recv_.msg ] ) __receiving_msg.remove(_data_) except: pass elif _recv_.DSP_type == \"DSP_REQ_RES\":", "node[1] self._write_yml(file = file, dict_data = r_yml, mode = \"w\")", "for channel in channel_name: if channel not in self.__CUSTOM_CHANNEL: self.__CUSTOM_CHANNEL.append(channel)", "self.__CALLBACK_LOOP.append([function,args]) else: raise TypeError(\"'channel' should not be None\") def __callback_loop(self,__callback_loop):", "default_backend import hashlib import yaml import random import time class", "if _recv_.DSP_type == \"DSP_REQ\": try: resolved_data = eval(_recv_.msg) resolved_data =", "This class allows user to create multi-client server. args: secure", "__outputs, __request_lst, __request_res_lst, __message_lst, __custom_c_m_r): # __receiving_msg = self.__RECEIVING_MSG, #", "r_yml.pop(node) self._write_yml(file = file, dict_data = r_yml, mode = \"w\")", "self.OUTPUTS.remove(r) self.INPUTS.remove(r) if r in self.WRITABLE: self.WRITABLE.remove(r) r.close() del self.MESSAGE_QUEUES[r]", "self.MESSAGE_QUEUES[e] def receive_func(self, __receiving_msg,__varified_devices, __varifier_lst, __client_keys, __outputs, __request_lst, __request_res_lst, __message_lst,", "index = i break if found: if args is None:", "def _get_node(self,file = None, key = None, wait = True):", "key = hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(), wait = False ) if get is", "r in readable: if r is self.sock: connection,addr = r.accept()", "True: try: for INDEX,_data_ in enumerate(__receiving_msg): data = pickle.loads(base64.b64decode(_data_)) #", "{ resolved_data[\"username\"] : b64_aes_key_pack } ] ) __receiving_msg.pop(INDEX) else: aes_key_pack", "enumerate(__callback_loop): __callback_loop.pop(index) func[0](*func[1]) def SEND(self,channel_name,target_name,data): if channel_name in self.__CUSTOM_CHANNEL: key_pack", "self._write_yml(file = file, dict_data = r_yml, mode = \"w\") except", "dsp_data ) requestList.pop(INDEX) if len(req_res_lst) > 0: if username in", "args = ( self.WRITABLE, self.MESSAGE_QUEUES, self.MESSAGE_LIST, self.REQUEST_LIST, self.REQUEST_RESPONSE_LIST, self.__VARIFIER_LIST, self.__CUSTOM_CHANNEL_MSG_SEND", "= [self.device_id, self.__encrypt(pickled_data)] p_e_d = pickle.dumps(encrypted_data) ret = base64.b64encode(p_e_d) return", "found = True index = i break if found: if", "# thread1.daemon = True thread1.start() # thread2.daemon = True thread2.start()", "mgf = padding.MGF1(algorithm = hashes.SHA256()), algorithm = hashes.SHA256(), label =", "pickle.loads(ct) return eval(ct) else: aesgcm = AESGCM(self.UNIVERSAL_AES_KEY) ct = aesgcm.decrypt(self.NONCE,", "target = self.send_func, args = ( self.WRITABLE, self.MESSAGE_QUEUES, self.MESSAGE_LIST, self.REQUEST_LIST,", "> 0: if username in vari_lst[0]: INDEX = vari_lst[0].index(username) s.send(bytes(f\"{len(vari_lst[1][INDEX])}\".center(16,\"|\"),\"utf-8\"))", "self.MESSAGE_QUEUES[r].put(pickle.loads(base64.b64decode(data))[0]) if r not in self.OUTPUTS: self.OUTPUTS.append(r) except Exception as", "hashes.SHA256(), label = None ) ) ciphertext = base64.b64encode(ciphertext) prepare_data", "( self.__CALLBACK_LOOP, ) ) # thread1.daemon = True thread1.start() #", "AESGCM from cryptography.hazmat.primitives.serialization import load_ssh_public_key from cryptography.hazmat.primitives import hashes from", "= pickle.loads(base64.b64decode(eval(_recv_.msg))) __message_lst.append( [ resolved_data['target_name'], _recv_.msg ] ) __receiving_msg.remove(_data_) except:", "readable: if r is self.sock: connection,addr = r.accept() connection.setblocking(0) self.INPUTS.append(connection)", "self.WRITABLE, self.MESSAGE_QUEUES, self.MESSAGE_LIST, self.REQUEST_LIST, self.REQUEST_RESPONSE_LIST, self.__VARIFIER_LIST, self.__CUSTOM_CHANNEL_MSG_SEND ) ) thread3", "thread = threading.Thread(target = self.__server) # thread.daemon = True thread.start()", "self.__VARIFIED_DEVICES, self.__VARIFIER_LIST, self.__CLIENT_KEYS, self.OUTPUTS, self.REQUEST_LIST, self.REQUEST_RESPONSE_LIST, self.MESSAGE_LIST, self.__CUSTOM_CHANNEL_MSG_REC, ) )", "data_len = pickle.loads(base64.b64decode(r.recv(32).decode().strip(\"0\").encode(\"utf-8\"))) except ConnectionResetError: print(\"Client Disconnected\") if r in", "queue.Queue() else: ini = list(zip(*data_recv_len)) if len(ini) == 0 or", "# __custom_c_m_r = self.__CUSTOM_CHANNEL_MSG_REC while True: try: for INDEX,_data_ in", "else: self.NONCE = b'\\xfe\\x1e1\\xc0\\xfc`s\\xbc6\\x9fQ\\xb2' if aad is not None: self.AAD", "\"\"\"async_server initializer class that will create the a asyncronouse tcp", "except TypeError: pass else: r_yml = self._read_yml(file) try: value =", ")._messanger( MSG = base64.b64encode(pickle.dumps(data)) ) self.__CUSTOM_CHANNEL_MSG_SEND.append( [ target_name, dsp_data ]", "return value except KeyError: pass except TypeError: pass else: r_yml", "for index,func in enumerate(__callback_loop): __callback_loop.pop(index) func[0](*func[1]) def SEND(self,channel_name,target_name,data): if channel_name", "= data.decode().strip(\"0\").encode(\"utf-8\") except: print(\"Error in decoding\") self.__RECEIVING_MSG.append(data) self.MESSAGE_QUEUES[r].put(pickle.loads(base64.b64decode(data))[0]) if r", "return \"\".join(random.sample(_all_,_len_)) class DSP(): def __init__( self, msg : str", "pickle.loads(base64.b64decode(aes_key_pack)) dsp_data = DSP( DSP_type = \"DSP_MSG\", universalAesKey = aes_key_pack[\"aes_key\"],", "= self._read_yml(file) if read != None: read[node[0]] self._change_node_value(file,node) else: raise", "print(\"Error in decoding\") self.__RECEIVING_MSG.append(data) self.MESSAGE_QUEUES[r].put(pickle.loads(base64.b64decode(data))[0]) if r not in self.OUTPUTS:", "= None, port : int = None, listeners : int", "print(f\"Channel : {channel} already exists.\") else: raise TypeError(\"When 'mutliple' is", "import AESGCM from cryptography.hazmat.primitives.serialization import load_ssh_public_key from cryptography.hazmat.primitives import hashes", "required positional argument: 'file'\") __parent = MAIN(file,debug,MTCL,MPCL,safeMode) self.SERVER = __parent.SERVER", ": bytes = None,secure : bool = True, secure_dict :", "customChannelMessageSend.pop(INDEX) # except: # pass def CREATE_CHANNEL(self,channel_name = None, multiple", "= [] get = self._get_node(file = self.__file_location,key = hashlib.sha256(bytes(\"key\", \"utf-8\")).digest(),", "if data_len: if type(data_len) == type([]): data_recv_len.append( [ r, data_len[0]", "= base64.b64encode(p_e_d) return ret def __repr__(self): return \"_main.DSP._\" def __encrypt(self,data):", "s in Writable: if s._closed == True and s.fileno() ==", "not None: self.msg = msg else: self.msg = msg self.DSP_type", "__request_lst.append( [ resolved_data[\"target_name\"], _recv_.msg ] ) __receiving_msg.remove(_data_) except: pass elif", "be a list of multiple channel names\") else: if channel_name", "is not None: self.msg = msg else: self.msg = msg", "= device_id if universalAesKey is not None: self.UNIVERSAL_AES_KEY = universalAesKey", "p_e_d = pickle.dumps(encrypted_data) ret = base64.b64encode(p_e_d) return ret def __repr__(self):", "else: aes_key_pack = __client_keys[data[0]] _recv_ = DSP()._convert_to_class( OBJECT = _data_,", "enumerate(__receiving_msg): data = pickle.loads(base64.b64decode(_data_)) # print(f\"data[0] : {data[0]}\") # print(f\"__varified_devices", "= b\"au$tica&tedbut@u32nencr#cdscypteddatafdrj\" def _messanger(self,MSG = None): if MSG is not", "will raise an TypeError \"\"\" if not file: raise TypeError(\"asyncServer()", "+ number random.shuffle(_all_) return \"\".join(random.sample(_all_,_len_)) class DSP(): def __init__( self,", "[] self.__VARIFIER_LIST = [] self.__CALLBACK_LOOP = [] self.__RECEIVING_MSG = []", "pass elif _recv_.DSP_type == \"DSP_MSG\": try: resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg))) __message_lst.append(", "= ( self.__CALLBACK_LOOP, ) ) # thread1.daemon = True thread1.start()", "> 0: if username in msg_lst[0]: INDEX = msg_lst[0].index(username) aes_key_pack", "tcp server. \"\"\" IPNC.__init__(self) self.__secure = secure self.__file_location = file", "list(zip(*data_recv_len)) INDEX = qwe[0].index(r) try: recv_len = data_recv_len.pop(INDEX)[1] data =", "# thread3.daemon = True thread3.start() thread = threading.Thread(target = self.__server)", "req_lst[0]: INDEX = req_lst[0].index(username) try: aes_key_pack = self.__CLIENT_KEYS[username] except KeyError:", "continue except Exception as e: pass if data_len: if type(data_len)", "threading.Thread( target = self.__callback_loop, args = ( self.__CALLBACK_LOOP, ) )", "= data encrypted_data = [self.device_id, self.__encrypt(pickled_data)] p_e_d = pickle.dumps(encrypted_data) ret", "= self.__VARIFIED_DEVICES, # __varifier_lst = self.__VARIFIER_LIST, # __client_keys = self.__CLIENT_KEYS,", "universalAesKey = aes_key_pack[\"aes_key\"], nonce = aes_key_pack[\"nonce\"], aad = aes_key_pack[\"aad\"] )._messanger(", "= self.__VARIFIER_LIST, # __client_keys = self.__CLIENT_KEYS, # __outputs = self.OUTPUTS,", "not None: self.__CLIENT_KEYS = get self.__VARIFIED_DEVICES.extend(list(get.keys())) def SERVER(self,address : str", "= lower_case + upper_case + special + number random.shuffle(_all_) return", "self.AAD ) return ct def _convert_to_class(self,OBJECT : bytes = None,secure", "self.OUTPUTS = [] self.MESSAGE_QUEUES = {} self.REQUEST_LIST = [] self.REQUEST_RESPONSE_LIST", "aad is not None: self.AAD = aad else: self.AAD =", "else: self.msg = msg self.DSP_type = DSP_type self.device_id = device_id", ") ) ciphertext = base64.b64encode(ciphertext) prepare_data = {\"key\" : ciphertext}", "-> this should set to the default value True, file", "None,secure : bool = True, secure_dict : list = None):", "self.__file_location, key = hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(), wait = False ) if get", "= AESGCM(secure_dict[\"aes_key\"]) ct = aesgcm.decrypt( secure_dict[\"nonce\"], OBJECT[-1], secure_dict[\"aad\"]) ct =", "continue # handling the outputs for w in writable: if", "in self.WRITABLE: self.WRITABLE.append(w) # handling the errors for e in", "hashlib import yaml import random import time class IPNC(): def", "== -1: Writable.remove(s) # try: try: username = message_q[s].get_nowait() message_q[s].put(username)", "= None, node = None): r_yml = self._read_yml(file) r_yml[node[0]] =", "return eval(ct) except TypeError: sys.exit() except ValueError: print(\"sender has not", "pass if len(msg_lst) > 0: if username in msg_lst[0]: INDEX", "handling the outputs for w in writable: if w not", "outputs for w in writable: if w not in self.WRITABLE:", "class MAIN(IPNC): def __init__(self,secure : bool = True,file = None):", "= DSP()._convert_to_class(_data_, secure = False) if _recv_.DSP_type == \"username_secure\": resolved_data", "_recv_.msg ] ) __receiving_msg.remove(_data_) except: pass elif _recv_.DSP_type == \"DSP_MSG\":", "def SERVER(self,address : str = None, port : int =", "in send_c_msg[0]: INDEX = send_c_msg[0].index(username) s.send(bytes(f\"{len(send_c_msg[1][INDEX])}\".center(16,\"|\"),\"utf-8\")) s.send(send_c_msg[1][INDEX]) customChannelMessageSend.pop(INDEX) # except:", "self.msg = MSG data = f'DSP(\"{self.msg}\",\"{self.DSP_type}\")' data = pickle.dumps(data) pickled_data", "found: if args is None: p_data = self.__CUSTOM_CHANNEL_MSG_REC.pop(index) self.__CALLBACK_LOOP.append([function,[p_data]]) else:", ") if _recv_.DSP_type == \"DSP_REQ\": try: resolved_data = eval(_recv_.msg) resolved_data", "self.INPUTS = [] self.OUTPUTS = [] self.MESSAGE_QUEUES = {} self.REQUEST_LIST", "print(f\"Writable : {Writable}\") # time.sleep(2) for s in Writable: if", "if key == None: return self._read_yml(file) if wait: while True:", "b64_aes_key_pack } ] ) __receiving_msg.pop(INDEX) else: aes_key_pack = __client_keys[data[0]] _recv_", "( self.__RECEIVING_MSG, self.__VARIFIED_DEVICES, self.__VARIFIER_LIST, self.__CLIENT_KEYS, self.OUTPUTS, self.REQUEST_LIST, self.REQUEST_RESPONSE_LIST, self.MESSAGE_LIST, self.__CUSTOM_CHANNEL_MSG_REC,", "= r_yml, mode = \"w\") except KeyError: return False except:", "= list('ABCDEFGHIJKLMNOPQRSTUVWXYZ') special = list(\"!@#$%&*?\") number = list(\"0123456789\") if onlyText:", "# thread.daemon = True thread.start() def __server(self): data_recv_len = []", "= list(zip(*data_recv_len)) INDEX = qwe[0].index(r) try: recv_len = data_recv_len.pop(INDEX)[1] data", "AESGCM(secure_dict[\"aes_key\"]) ct = aesgcm.decrypt( secure_dict[\"nonce\"], OBJECT[-1], secure_dict[\"aad\"]) ct = pickle.loads(ct)", "None): if MSG is not None: self.msg = MSG data", ") varifierList.pop(INDEX) if len(send_c_msg) > 0: if username in send_c_msg[0]:", "TypeError(\"asyncServer() missing 1 required positional argument: 'file'\") __parent = MAIN(file,debug,MTCL,MPCL,safeMode)", "pickle.dumps(data) pickled_data = data encrypted_data = [self.device_id, self.__encrypt(pickled_data)] p_e_d =", ": node[1] } self._write_yml(file, node_dict) def _change_node_value(self,file = None, node", "message_list.pop(INDEX) except OSError: pass if len(req_lst) > 0: if username", "as e: print(\"User Disconnected\") readable.remove(r) self.INPUTS.remove(r) writable.remove(r) self.OUTPUTS.remove(r) if r", "= True): if key == None: return self._read_yml(file) if wait:", "[] self.__RECEIVING_MSG = [] get = self._get_node(file = self.__file_location,key =", "server. args: secure : bool = True -> this should", "missing 1 required positional argument: 'file'\") __parent = MAIN(file,debug,MTCL,MPCL,safeMode) self.SERVER", "INDEX = msg_lst[0].index(username) aes_key_pack = self.__CLIENT_KEYS[username] aes_key_pack = pickle.loads(base64.b64decode(aes_key_pack)) dsp_data", "__message_lst.append( [ resolved_data['target_name'], _recv_.msg ] ) __receiving_msg.remove(_data_) except: pass elif", "from cryptography.hazmat.primitives.serialization import load_ssh_public_key from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives.asymmetric", "resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg))) __request_lst.append( [ resolved_data[\"target_name\"], _recv_.msg ] ) __receiving_msg.remove(_data_)", "= None, key = None, wait = True): if key", "this should set to the default value True, file :", "import yaml import random import time class IPNC(): def __init__(self):", "dsp_data = [resolved_data[\"username\"],dsp_data] __varifier_lst.append(dsp_data) __varified_devices.append(resolved_data[\"username\"]) __client_keys[resolved_data[\"username\"]] = b64_aes_key_pack get =", "= base64.b64encode(pickle_qw) key = load_ssh_public_key( bytes( resolved_data[\"data\"], \"utf-8\" ), backend=default_backend()", "is not None: found = False index = None if", "__receiving_msg.remove(_data_) except: pass elif _recv_.DSP_type in self.__CUSTOM_CHANNEL: try: resolved_data =", "self.WRITABLE = [] self.INPUTS = [] self.OUTPUTS = [] self.MESSAGE_QUEUES", "e in self.OUTPUTS: self.OUTPUTS.remove(e) e.close() del self.MESSAGE_QUEUES[e] def receive_func(self, __receiving_msg,__varified_devices,", "if channel not in self.__CUSTOM_CHANNEL: self.__CUSTOM_CHANNEL.append(channel) else: print(f\"Channel : {channel}", "__parent = MAIN(file,debug,MTCL,MPCL,safeMode) self.SERVER = __parent.SERVER self.CREATE_CHANNEL = __parent.CREATE_CHANNEL self.LISTEN", "label = None ) ) ciphertext = base64.b64encode(ciphertext) prepare_data =", "func[0](*func[1]) def SEND(self,channel_name,target_name,data): if channel_name in self.__CUSTOM_CHANNEL: key_pack = self.__CLIENT_KEYS[target_name]", "self.__CLIENT_KEYS, self.OUTPUTS, self.REQUEST_LIST, self.REQUEST_RESPONSE_LIST, self.MESSAGE_LIST, self.__CUSTOM_CHANNEL_MSG_REC, ) ) thread2 =", "if get is not None: self.__CLIENT_KEYS = get self.__VARIFIED_DEVICES.extend(list(get.keys())) def", "dict_data = r_yml, mode = \"w\") def _get_node(self,file = None,", "self._read_yml(file) try: value = r_yml[key] return value except KeyError: pass", "self._read_yml(file) if read != None: read[node[0]] self._change_node_value(file,node) else: raise KeyError", "be None\") def __callback_loop(self,__callback_loop): while True: for index,func in enumerate(__callback_loop):", "True: r_yml = self._read_yml(file) try: value = r_yml[key] return value", "# time.sleep(2) for s in Writable: if s._closed == True", "r.recv(recv_len) try: data = data.decode().strip(\"0\").encode(\"utf-8\") except: print(\"Error in decoding\") self.__RECEIVING_MSG.append(data)", "data_len: if type(data_len) == type([]): data_recv_len.append( [ r, data_len[0] ]", "errors for e in exceptions: self.INPUTS.remove(e) if e in self.OUTPUTS:", ": bytes = None, nonce : bytes = None, aad", "IPNC(): def __init__(self): pass def _read_yml(self,file = None): with open(file)", "return None except TypeError: pass def _remove_node(self,file,node): try: r_yml =", "), backend=default_backend() ) ciphertext = key.encrypt( b64_aes_key_pack, padding.OAEP( mgf =", "ValueError: print(\"sender has not done the handshake\") class MAIN(IPNC): def", "try: value = r_yml[key] return value except KeyError: return None", "DSP()._convert_to_class( OBJECT = _data_, secure = True, secure_dict = aes_key_pack", "print(\"User Disconnected\") readable.remove(r) self.INPUTS.remove(r) writable.remove(r) self.OUTPUTS.remove(r) if r in self.WRITABLE:", "0: if username in req_res_lst[0]: INDEX = req_res_lst[0].index(username) aes_key_pack =", "= \"a+\"): with open(file, mode) as file: yaml.dump(dict_data, file) def", "nonce = key_pack[\"nonce\"], aad= key_pack[\"aad\"] )._messanger( MSG = base64.b64encode(pickle.dumps(data)) )", "handshake\") class MAIN(IPNC): def __init__(self,secure : bool = True,file =", "INDEX = send_c_msg[0].index(username) s.send(bytes(f\"{len(send_c_msg[1][INDEX])}\".center(16,\"|\"),\"utf-8\")) s.send(send_c_msg[1][INDEX]) customChannelMessageSend.pop(INDEX) # except: # pass", "[] self.MESSAGE_LIST = [] self.__VARIFIED_DEVICES = [] self.__CLIENT_KEYS = {}", "= key.encrypt( b64_aes_key_pack, padding.OAEP( mgf = padding.MGF1(algorithm = hashes.SHA256()), algorithm", "already exists.\") else: raise TypeError(\"When 'mutliple' is to True then", "self._read_yml(file = file) r_yml[node] r_yml.pop(node) self._write_yml(file = file, dict_data =", "= r_yml[key] return value except KeyError: pass except TypeError: pass", "decoding\") self.__RECEIVING_MSG.append(data) self.MESSAGE_QUEUES[r].put(pickle.loads(base64.b64decode(data))[0]) if r not in self.OUTPUTS: self.OUTPUTS.append(r) except", ": str = None, device_id : int = None, universalAesKey", "TypeError(\"'channel' should not be None\") def __callback_loop(self,__callback_loop): while True: for", "None, aad : str = None, ): if msg is", "len(send_c_msg) > 0: if username in send_c_msg[0]: INDEX = send_c_msg[0].index(username)", "connection,addr = r.accept() connection.setblocking(0) self.INPUTS.append(connection) self.MESSAGE_QUEUES[connection] = queue.Queue() else: ini", "\"|\").encode(\"utf-8\") try: s.send(bytes(f\"{len(dsp_data)}\".center(16,\"|\"),\"utf-8\")) s.send( dsp_data ) message_list.pop(INDEX) except OSError: pass", "nonce, \"aad\" : aad, } pickle_qw = pickle.dumps(qw) b64_aes_key_pack =", "= data_recv_len.pop(INDEX)[1] data = r.recv(recv_len) try: data = data.decode().strip(\"0\").encode(\"utf-8\") except:", "file, dict_data = r_yml, mode = \"w\") except KeyError: return", "def _name_generator(self,_len_ = 16, onlyText = False): lower_case = list(\"abcdefghijklmnopqrstuvwxyz\")", "and configurations. if not specified, will raise an TypeError \"\"\"", "= pickle.dumps(data) pickled_data = data encrypted_data = [self.device_id, self.__encrypt(pickled_data)] p_e_d", "} self._write_yml(file, node_dict) def _change_node_value(self,file = None, node = None):", "self.REQUEST_LIST = [] self.REQUEST_RESPONSE_LIST = [] self.MESSAGE_LIST = [] self.__VARIFIED_DEVICES", "self.INPUTS.append(connection) self.MESSAGE_QUEUES[connection] = queue.Queue() else: ini = list(zip(*data_recv_len)) if len(ini)", "= pickle.loads(base64.b64decode(eval(_recv_.msg))) __request_res_lst.append( [ resolved_data[\"target_name\"], _recv_.msg ] ) __receiving_msg.remove(_data_) except:", "e.close() del self.MESSAGE_QUEUES[e] def receive_func(self, __receiving_msg,__varified_devices, __varifier_lst, __client_keys, __outputs, __request_lst,", "key == None: return self._read_yml(file) if wait: while True: r_yml", "self.__CUSTOM_CHANNEL_MSG_REC.pop(index) self.__CALLBACK_LOOP.append([function,[p_data]]) else: p_data = self.__CUSTOM_CHANNEL_MSG_REC.pop(index) args = list(args) args.insert(0,p_data)", "# print(f\"__varified_devices : {__varified_devices}\") if data[0] not in __varified_devices: _recv_", "len(vari_lst) > 0: if username in vari_lst[0]: INDEX = vari_lst[0].index(username)", "= list(list(zip(*message_list))) req_lst = list(list(zip(*requestList))) req_res_lst = list(list(zip(*requestResList))) vari_lst =", "[] self.__CALLBACK_LOOP = [] self.__RECEIVING_MSG = [] get = self._get_node(file", "try: resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg))) __message_lst.append( [ resolved_data['target_name'], _recv_.msg ] )", "str = None,function : object = None,args = None): if", "= base64.b64encode(ciphertext) prepare_data = {\"key\" : ciphertext} dsp_data = DSP(", "= None): if MSG is not None: self.msg = MSG", "class server(): def __init__(self, file = None, debug : bool", "threading.Thread( target = self.receive_func, args = ( self.__RECEIVING_MSG, self.__VARIFIED_DEVICES, self.__VARIFIER_LIST,", "base64.b64encode(pickle.dumps(data)) ) self.__CUSTOM_CHANNEL_MSG_SEND.append( [ target_name, dsp_data ] ) class server():", "= pickle.loads(base64.b64decode(secure_dict)) aesgcm = AESGCM(secure_dict[\"aes_key\"]) ct = aesgcm.decrypt( secure_dict[\"nonce\"], OBJECT[-1],", "= None, ): if msg is not None: self.msg =", "e: pass if data_len: if type(data_len) == type([]): data_recv_len.append( [", "self._write_yml(file, node_dict) def _change_node_value(self,file = None, node = None): r_yml", "is not None: self.UNIVERSAL_AES_KEY = universalAesKey else: self.UNIVERSAL_AES_KEY = b'<KEY>'", "# __request_lst = self.REQUEST_LIST # __request_res_lst = self.REQUEST_RESPONSE_LIST # __message_lst", "KeyError: node_dict = { node[0] : node[1] } self._write_yml(file, node_dict)", "self.AAD = aad else: self.AAD = b\"au$tica&tedbut@u32nencr#cdscypteddatafdrj\" def _messanger(self,MSG =", "return value except KeyError: return None except TypeError: pass def", "+ special + number random.shuffle(_all_) return \"\".join(random.sample(_all_,_len_)) class DSP(): def", "= f\"{msg_lst[1][INDEX]}\" ).decode().center(len(msg_lst[1][INDEX]) + 100, \"|\").encode(\"utf-8\") try: s.send(bytes(f\"{len(dsp_data)}\".center(16,\"|\"),\"utf-8\")) s.send( dsp_data", "= self.__server) # thread.daemon = True thread.start() def __server(self): data_recv_len", "DSP( DSP_type = \"DSP_handshake_request_res\", universalAesKey = aes_key_pack[\"aes_key\"], nonce = aes_key_pack[\"nonce\"],", "= DSP( DSP_type = \"DSP_MSG\", universalAesKey = aes_key_pack[\"aes_key\"], nonce =", "True, secure_dict = aes_key_pack ) if _recv_.DSP_type == \"DSP_REQ\": try:", "\"DSP_REQ\": try: resolved_data = eval(_recv_.msg) resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg))) __request_lst.append( [", "if r not in self.OUTPUTS: self.OUTPUTS.append(r) except Exception as e:", "if len(msg_lst) > 0: if username in msg_lst[0]: INDEX =", "100, \"|\").encode(\"utf-8\") s.send(bytes(f\"{len(dsp_data)+100}\".center(16,\"|\"),\"utf-8\")) s.send( dsp_data ) requestList.pop(INDEX) if len(req_res_lst) >", "f\"{msg_lst[1][INDEX]}\" ).decode().center(len(msg_lst[1][INDEX]) + 100, \"|\").encode(\"utf-8\") try: s.send(bytes(f\"{len(dsp_data)}\".center(16,\"|\"),\"utf-8\")) s.send( dsp_data )", ": {data[0]}\") # print(f\"__varified_devices : {__varified_devices}\") if data[0] not in", "self.OUTPUTS.remove(r) if r in self.WRITABLE: self.WRITABLE.remove(r) del self.MESSAGE_QUEUES[r] continue #", "value except KeyError: pass except TypeError: pass else: r_yml =", "type(channel_name) == type([]): for channel in channel_name: if channel not", "value = r_yml[key] return value except KeyError: return None except", "aesgcm.decrypt(self.NONCE, OBJECT[-1], self.AAD) ct = pickle.loads(ct) return eval(ct) except TypeError:", "def _messanger(self,MSG = None): if MSG is not None: self.msg", "def __init__(self,secure : bool = True,file = None): \"\"\"async_server initializer", "pass elif _recv_.DSP_type == \"DSP_REQ_RES\": try: resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg))) __request_res_lst.append(", "universalAesKey=key_pack[\"aes_key\"], nonce = key_pack[\"nonce\"], aad= key_pack[\"aad\"] )._messanger( MSG = base64.b64encode(pickle.dumps(data))", "None: read[node[0]] self._change_node_value(file,node) else: raise KeyError except KeyError: node_dict =", "resolved_data[\"target_name\"], _recv_.msg ] ) __receiving_msg.remove(_data_) except: pass elif _recv_.DSP_type ==", "= b'\\xfe\\x1e1\\xc0\\xfc`s\\xbc6\\x9fQ\\xb2' if aad is not None: self.AAD = aad", "None: self.UNIVERSAL_AES_KEY = universalAesKey else: self.UNIVERSAL_AES_KEY = b'<KEY>' if nonce", "self.AAD) ct = pickle.loads(ct) return eval(ct) except TypeError: sys.exit() except", "__init__(self): pass def _read_yml(self,file = None): with open(file) as file:", "file) r_yml[node] r_yml.pop(node) self._write_yml(file = file, dict_data = r_yml, mode", "= self.OUTPUTS, # __request_lst = self.REQUEST_LIST # __request_res_lst = self.REQUEST_RESPONSE_LIST", "index,func in enumerate(__callback_loop): __callback_loop.pop(index) func[0](*func[1]) def SEND(self,channel_name,target_name,data): if channel_name in", ")._messanger( MSG = f\"{req_lst[1][INDEX]}\" ).decode().center(len(req_lst[1][INDEX]) + 100, \"|\").encode(\"utf-8\") s.send(bytes(f\"{len(dsp_data)+100}\".center(16,\"|\"),\"utf-8\")) s.send(", "is None: raise TypeError( \"convert_to_class() missing 1 required positional argument:", "= select.select(self.INPUTS, self.OUTPUTS, self.INPUTS) # handling the inputs for r", "special = list(\"!@#$%&*?\") number = list(\"0123456789\") if onlyText: _all_ =", "self.INPUTS.remove(r) r.close() del self.MESSAGE_QUEUES[r] continue except Exception as e: pass", "if nonce is not None: self.NONCE = nonce else: self.NONCE", "True thread.start() def __server(self): data_recv_len = [] while True: readable,", "thread3 = threading.Thread( target = self.__callback_loop, args = ( self.__CALLBACK_LOOP,", "get[resolved_data[\"username\"]] = b64_aes_key_pack self._add_node( file = self.__file_location, node = [", "while True: readable, writable, exceptions = select.select(self.INPUTS, self.OUTPUTS, self.INPUTS) #", "= [resolved_data[\"username\"],dsp_data] __varifier_lst.append(dsp_data) __varified_devices.append(resolved_data[\"username\"]) __client_keys[resolved_data[\"username\"]] = b64_aes_key_pack get = self._get_node(", "= ( self.WRITABLE, self.MESSAGE_QUEUES, self.MESSAGE_LIST, self.REQUEST_LIST, self.REQUEST_RESPONSE_LIST, self.__VARIFIER_LIST, self.__CUSTOM_CHANNEL_MSG_SEND )", "args: secure : bool = True -> this should set", "self.MESSAGE_QUEUES[r] continue else: qwe = list(zip(*data_recv_len)) INDEX = qwe[0].index(r) try:", "= base64.b64decode(OBJECT) OBJECT = pickle.loads(OBJECT) if secure == True: if", "aes_key_pack = pickle.loads(base64.b64decode(aes_key_pack)) dsp_data = DSP( DSP_type = \"DSP_handshake_request\", universalAesKey", "return eval(ct) else: aesgcm = AESGCM(self.UNIVERSAL_AES_KEY) ct = aesgcm.decrypt(self.NONCE, OBJECT[-1],", "self.__secure = secure self.__file_location = file self.READABLE = [] self.WRITABLE", "username in req_lst[0]: INDEX = req_lst[0].index(username) try: aes_key_pack = self.__CLIENT_KEYS[username]", "= list(\"abcdefghijklmnopqrstuvwxyz\") upper_case = list('ABCDEFGHIJKLMNOPQRSTUVWXYZ') special = list(\"!@#$%&*?\") number =", "except KeyError: node_dict = { node[0] : node[1] } self._write_yml(file,", "that will create the a asyncronouse tcp server. \"\"\" IPNC.__init__(self)", "in writable: if w not in self.WRITABLE: self.WRITABLE.append(w) # handling", "self.__CUSTOM_CHANNEL: key_pack = self.__CLIENT_KEYS[target_name] key_pack = pickle.loads(base64.b64decode(key_pack)) dsp_data = DSP(", "which saves all the keys and configurations. if not specified,", "int = None): self.address = address self.port = port self.sock", "if secure == True: if secure_dict is None: raise TypeError(", "Disconnected\") readable.remove(r) self.INPUTS.remove(r) writable.remove(r) self.OUTPUTS.remove(r) if r in self.WRITABLE: self.WRITABLE.remove(r)", "Disconnected\") if r in self.OUTPUTS: self.OUTPUTS.remove(r) self.INPUTS.remove(r) if r in", "else: secure_dict = pickle.loads(base64.b64decode(secure_dict)) aesgcm = AESGCM(secure_dict[\"aes_key\"]) ct = aesgcm.decrypt(", "random import time class IPNC(): def __init__(self): pass def _read_yml(self,file", "= pickle.loads(base64.b64decode(aes_key_pack)) dsp_data = DSP( DSP_type = \"DSP_handshake_request_res\", universalAesKey =", "if MSG is not None: self.msg = MSG data =", "False ) if get is not None: get[resolved_data[\"username\"]] = b64_aes_key_pack", "wait: while True: r_yml = self._read_yml(file) try: value = r_yml[key]", "argument: 'file'\") __parent = MAIN(file,debug,MTCL,MPCL,safeMode) self.SERVER = __parent.SERVER self.CREATE_CHANNEL =", "None): if channel is not None: found = False index", "aes_key = AESGCM.generate_key(256) nonce = os.urandom(32) aad = bytes(self._name_generator(),\"utf-8\") qw", "__receiving_msg.remove(_data_) except: pass elif _recv_.DSP_type == \"DSP_REQ_RES\": try: resolved_data =", "upper_case = list('ABCDEFGHIJKLMNOPQRSTUVWXYZ') special = list(\"!@#$%&*?\") number = list(\"0123456789\") if", "nonce : bytes = None, aad : str = None,", "self.OUTPUTS.remove(r) if r in self.WRITABLE: self.WRITABLE.remove(r) self.INPUTS.remove(r) r.close() del self.MESSAGE_QUEUES[r]", "\"|\").encode(\"utf-8\") s.send(bytes(f\"{len(dsp_data)+100}\".center(16,\"|\"),\"utf-8\")) s.send( dsp_data ) requestList.pop(INDEX) if len(req_res_lst) > 0:", "msg self.DSP_type = DSP_type self.device_id = device_id if universalAesKey is", "if username in send_c_msg[0]: INDEX = send_c_msg[0].index(username) s.send(bytes(f\"{len(send_c_msg[1][INDEX])}\".center(16,\"|\"),\"utf-8\")) s.send(send_c_msg[1][INDEX]) customChannelMessageSend.pop(INDEX)", "socket.SO_REUSEADDR, 1 ) self.sock.setblocking(0) self.sock.bind((self.address,self.port)) self.sock.listen(listeners) print(\"[SERVER IS ACTIVATED |", "except TypeError: pass def _remove_node(self,file,node): try: r_yml = self._read_yml(file =", "if e in self.OUTPUTS: self.OUTPUTS.remove(e) e.close() del self.MESSAGE_QUEUES[e] def receive_func(self,", "__receiving_msg = self.__RECEIVING_MSG, # __varified_devices = self.__VARIFIED_DEVICES, # __varifier_lst =", "not None: self.AAD = aad else: self.AAD = b\"au$tica&tedbut@u32nencr#cdscypteddatafdrj\" def", "get self.__VARIFIED_DEVICES.extend(list(get.keys())) def SERVER(self,address : str = None, port :", "int = None, listeners : int = None): self.address =", "if r in self.OUTPUTS: self.OUTPUTS.remove(r) self.INPUTS.remove(r) if r in self.WRITABLE:", ": str = None -> here user need to pass", "= self.__callback_loop, args = ( self.__CALLBACK_LOOP, ) ) # thread1.daemon", "self.WRITABLE: self.WRITABLE.remove(r) self.INPUTS.remove(r) r.close() del self.MESSAGE_QUEUES[r] continue except Exception as", "recv_len = data_recv_len.pop(INDEX)[1] data = r.recv(recv_len) try: data = data.decode().strip(\"0\").encode(\"utf-8\")", ") __receiving_msg.pop(INDEX) else: aes_key_pack = __client_keys[data[0]] _recv_ = DSP()._convert_to_class( OBJECT", "# __request_res_lst = self.REQUEST_RESPONSE_LIST # __message_lst = self.MESSAGE_LIS # __custom_c_m_r", "else: if channel_name not in self.__CUSTOM_CHANNEL: self.__CUSTOM_CHANNEL.append(channel_name) def LISTEN(self,channel :", "OBJECT = pickle.loads(OBJECT) if secure == True: if secure_dict is", ") return ct def _convert_to_class(self,OBJECT : bytes = None,secure :", "requestResList.pop(INDEX) if len(vari_lst) > 0: if username in vari_lst[0]: INDEX", "prepare_data = {\"key\" : ciphertext} dsp_data = DSP( DSP_type=\"username_secure_response\" )._messanger(", "connection.setblocking(0) self.INPUTS.append(connection) self.MESSAGE_QUEUES[connection] = queue.Queue() else: ini = list(zip(*data_recv_len)) if", "MAIN(file,debug,MTCL,MPCL,safeMode) self.SERVER = __parent.SERVER self.CREATE_CHANNEL = __parent.CREATE_CHANNEL self.LISTEN = __parent.LISTEN", "s.send( vari_lst[1][INDEX] ) varifierList.pop(INDEX) if len(send_c_msg) > 0: if username", "user to create multi-client server. args: secure : bool =", "else: raise TypeError(\"'channel' should not be None\") def __callback_loop(self,__callback_loop): while", "0: if username in req_lst[0]: INDEX = req_lst[0].index(username) try: aes_key_pack", "self.OUTPUTS, self.REQUEST_LIST, self.REQUEST_RESPONSE_LIST, self.MESSAGE_LIST, self.__CUSTOM_CHANNEL_MSG_REC, ) ) thread2 = threading.Thread(", "= hashes.SHA256(), label = None ) ) ciphertext = base64.b64encode(ciphertext)", "aad : str = None, ): if msg is not", "pickle.loads(base64.b64decode(_data_)) # print(f\"data[0] : {data[0]}\") # print(f\"__varified_devices : {__varified_devices}\") if", "# print(f\"Writable : {Writable}\") # time.sleep(2) for s in Writable:", "self.__RECEIVING_MSG = [] get = self._get_node(file = self.__file_location,key = hashlib.sha256(bytes(\"key\",", "MSG = f\"{req_res_lst[1][INDEX]}\" ).decode().center(len(req_res_lst[1][INDEX]) + 100, \"|\").encode(\"utf-8\") s.send(bytes(f\"{len(dsp_data)+100}\".center(16,\"|\"),\"utf-8\")) s.send( dsp_data", "resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg))) __message_lst.append( [ resolved_data['target_name'], _recv_.msg ] ) __receiving_msg.remove(_data_)", "__client_keys, __outputs, __request_lst, __request_res_lst, __message_lst, __custom_c_m_r): # __receiving_msg = self.__RECEIVING_MSG,", "DSP_type = \"DSP_handshake_request_res\", universalAesKey = aes_key_pack[\"aes_key\"], nonce = aes_key_pack[\"nonce\"], aad", "list(\"0123456789\") if onlyText: _all_ = lower_case + upper_case else: _all_", "allows user to create multi-client server. args: secure : bool", "== \"DSP_REQ\": try: resolved_data = eval(_recv_.msg) resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg))) __request_lst.append(", "= port self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.sock.setsockopt( socket.SOL_SOCKET, socket.SO_REUSEADDR, 1", "in ini[0]: try: data_len = pickle.loads(base64.b64decode(r.recv(32).decode().strip(\"0\").encode(\"utf-8\"))) except ConnectionResetError: print(\"Client Disconnected\")", "self.WRITABLE: self.WRITABLE.append(w) # handling the errors for e in exceptions:", "= False): if multiple: if type(channel_name) == type([]): for channel", "= f'DSP(\"{self.msg}\",\"{self.DSP_type}\")' data = pickle.dumps(data) pickled_data = data encrypted_data =", "TypeError: pass def _remove_node(self,file,node): try: r_yml = self._read_yml(file = file)", "= [] self.__RECEIVING_MSG = [] get = self._get_node(file = self.__file_location,key", "= DSP( DSP_type = \"DSP_handshake_request\", universalAesKey = aes_key_pack[\"aes_key\"], nonce =", "OBJECT = base64.b64decode(OBJECT) OBJECT = pickle.loads(OBJECT) if secure == True:", "= self.__file_location, node = [ hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(), get ] ) else:", "the errors for e in exceptions: self.INPUTS.remove(e) if e in", "list(list(zip(*varifierList))) send_c_msg = list(zip(*customChannelMessageSend)) except KeyError: pass if len(msg_lst) >", "print(\"Client Disconnected\") if r in self.OUTPUTS: self.OUTPUTS.remove(r) if r in", "all the keys and configurations. if not specified, will raise", "self.INPUTS.remove(r) if r in self.WRITABLE: self.WRITABLE.remove(r) r.close() del self.MESSAGE_QUEUES[r] continue", "= AESGCM.generate_key(256) nonce = os.urandom(32) aad = bytes(self._name_generator(),\"utf-8\") qw =", "else: r_yml = self._read_yml(file) try: value = r_yml[key] return value", "self.WRITABLE: self.WRITABLE.remove(r) del self.MESSAGE_QUEUES[r] continue # handling the outputs for", "lower_case + upper_case else: _all_ = lower_case + upper_case +", "= pickle.loads(base64.b64decode(key_pack)) dsp_data = DSP( DSP_type = channel_name, universalAesKey=key_pack[\"aes_key\"], nonce", "else: self.AAD = b\"au$tica&tedbut@u32nencr#cdscypteddatafdrj\" def _messanger(self,MSG = None): if MSG", "a asyncronouse tcp server. \"\"\" IPNC.__init__(self) self.__secure = secure self.__file_location", "None: self.msg = MSG data = f'DSP(\"{self.msg}\",\"{self.DSP_type}\")' data = pickle.dumps(data)", "in self.OUTPUTS: self.OUTPUTS.remove(r) if r in self.WRITABLE: self.WRITABLE.remove(r) self.INPUTS.remove(r) r.close()", ") ) # thread1.daemon = True thread1.start() # thread2.daemon =", "handling the inputs for r in readable: if r is", "else: qwe = list(zip(*data_recv_len)) INDEX = qwe[0].index(r) try: recv_len =", "if not specified, will raise an TypeError \"\"\" if not", "self.__VARIFIER_LIST, self.__CLIENT_KEYS, self.OUTPUTS, self.REQUEST_LIST, self.REQUEST_RESPONSE_LIST, self.MESSAGE_LIST, self.__CUSTOM_CHANNEL_MSG_REC, ) ) thread2", ": ciphertext} dsp_data = DSP( DSP_type=\"username_secure_response\" )._messanger( MSG = prepare_data", "aes_key_pack[\"aes_key\"], nonce = aes_key_pack[\"nonce\"], aad = aes_key_pack[\"aad\"] )._messanger( MSG =", "INDEX = qwe[0].index(r) try: recv_len = data_recv_len.pop(INDEX)[1] data = r.recv(recv_len)", "= aesgcm.decrypt(self.NONCE, OBJECT[-1], self.AAD) ct = pickle.loads(ct) return eval(ct) except", "the handshake\") class MAIN(IPNC): def __init__(self,secure : bool = True,file", "thread2.start() # thread3.daemon = True thread3.start() thread = threading.Thread(target =", "ct = aesgcm.decrypt( secure_dict[\"nonce\"], OBJECT[-1], secure_dict[\"aad\"]) ct = pickle.loads(ct) return", "aes_key_pack = pickle.loads(base64.b64decode(aes_key_pack)) dsp_data = DSP( DSP_type = \"DSP_MSG\", universalAesKey", "[] self.__CUSTOM_CHANNEL_MSG_REC = [] self.__CUSTOM_CHANNEL_MSG_SEND = [] self.__VARIFIER_LIST = []", "] ) __receiving_msg.remove(_data_) except: pass elif _recv_.DSP_type in self.__CUSTOM_CHANNEL: try:", "from cryptography.hazmat.backends import default_backend import hashlib import yaml import random", "self._write_yml(file = file, dict_data = r_yml, mode = \"w\") def", "except KeyError: pass if len(msg_lst) > 0: if username in", "self.sock.listen(listeners) print(\"[SERVER IS ACTIVATED | LISTENING]\") self.INPUTS.append(self.sock) thread1 = threading.Thread(", "if multiple: if type(channel_name) == type([]): for channel in channel_name:", "thread1.daemon = True thread1.start() # thread2.daemon = True thread2.start() #", "def __callback_loop(self,__callback_loop): while True: for index,func in enumerate(__callback_loop): __callback_loop.pop(index) func[0](*func[1])", "self.__CUSTOM_CHANNEL_MSG_REC.pop(index) args = list(args) args.insert(0,p_data) self.__CALLBACK_LOOP.append([function,args]) else: raise TypeError(\"'channel' should", "self.INPUTS.append(self.sock) thread1 = threading.Thread( target = self.receive_func, args = (", "thread3.daemon = True thread3.start() thread = threading.Thread(target = self.__server) #", "data[0] not in __varified_devices: _recv_ = DSP()._convert_to_class(_data_, secure = False)", "= None, device_id : int = None, universalAesKey : bytes", "try: recv_len = data_recv_len.pop(INDEX)[1] data = r.recv(recv_len) try: data =", "# __receiving_msg = self.__RECEIVING_MSG, # __varified_devices = self.__VARIFIED_DEVICES, # __varifier_lst", "] ) __receiving_msg.remove(_data_) except: pass elif _recv_.DSP_type == \"DSP_REQ_RES\": try:", "file: yaml.dump(dict_data, file) def _add_node(self,file = None, node = None):", "1 required positional argument: 'file'\") __parent = MAIN(file,debug,MTCL,MPCL,safeMode) self.SERVER =", "= lower_case + upper_case else: _all_ = lower_case + upper_case", "16, onlyText = False): lower_case = list(\"abcdefghijklmnopqrstuvwxyz\") upper_case = list('ABCDEFGHIJKLMNOPQRSTUVWXYZ')", "): if msg is not None: self.msg = msg else:", "bytes = None,secure : bool = True, secure_dict : list", "= {} self.__CUSTOM_CHANNEL = [] self.__CUSTOM_CHANNEL_MSG_REC = [] self.__CUSTOM_CHANNEL_MSG_SEND =", "self.__CLIENT_KEYS, # __outputs = self.OUTPUTS, # __request_lst = self.REQUEST_LIST #", "__client_keys[resolved_data[\"username\"]] = b64_aes_key_pack get = self._get_node( file = self.__file_location, key", "[] self.WRITABLE = [] self.INPUTS = [] self.OUTPUTS = []", "ct = aesgcm.encrypt( self.NONCE, data, self.AAD ) return ct def", "[self.device_id, self.__encrypt(pickled_data)] p_e_d = pickle.dumps(encrypted_data) ret = base64.b64encode(p_e_d) return ret", "try: resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg))) __custom_c_m_r.append(resolved_data) __receiving_msg.remove(_data_) except: pass except: pass", "r.close() del self.MESSAGE_QUEUES[r] continue else: qwe = list(zip(*data_recv_len)) INDEX =", "0: if username in msg_lst[0]: INDEX = msg_lst[0].index(username) aes_key_pack =", "= DSP()._convert_to_class( OBJECT = _data_, secure = True, secure_dict =", "aes_key_pack = self.__CLIENT_KEYS[username] aes_key_pack = pickle.loads(base64.b64decode(aes_key_pack)) dsp_data = DSP( DSP_type", "get is not None: self.__CLIENT_KEYS = get self.__VARIFIED_DEVICES.extend(list(get.keys())) def SERVER(self,address", "list(list(zip(*message_list))) req_lst = list(list(zip(*requestList))) req_res_lst = list(list(zip(*requestResList))) vari_lst = list(list(zip(*varifierList)))", "None, dict_data = None,mode = \"a+\"): with open(file, mode) as", "KeyError: return None except TypeError: pass def _remove_node(self,file,node): try: r_yml", "r is self.sock: connection,addr = r.accept() connection.setblocking(0) self.INPUTS.append(connection) self.MESSAGE_QUEUES[connection] =", "__varified_devices.append(resolved_data[\"username\"]) __client_keys[resolved_data[\"username\"]] = b64_aes_key_pack get = self._get_node( file = self.__file_location,", "if secure_dict is None: raise TypeError( \"convert_to_class() missing 1 required", "list(\"abcdefghijklmnopqrstuvwxyz\") upper_case = list('ABCDEFGHIJKLMNOPQRSTUVWXYZ') special = list(\"!@#$%&*?\") number = list(\"0123456789\")", "read = self._read_yml(file) if read != None: read[node[0]] self._change_node_value(file,node) else:", "self.REQUEST_RESPONSE_LIST # __message_lst = self.MESSAGE_LIS # __custom_c_m_r = self.__CUSTOM_CHANNEL_MSG_REC while", "= False): lower_case = list(\"abcdefghijklmnopqrstuvwxyz\") upper_case = list('ABCDEFGHIJKLMNOPQRSTUVWXYZ') special =", "handling the errors for e in exceptions: self.INPUTS.remove(e) if e", "= threading.Thread( target = self.receive_func, args = ( self.__RECEIVING_MSG, self.__VARIFIED_DEVICES,", "b'<KEY>' if nonce is not None: self.NONCE = nonce else:", "get = self._get_node( file = self.__file_location, key = hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(), wait", "len(req_lst) > 0: if username in req_lst[0]: INDEX = req_lst[0].index(username)", "cryptography.hazmat.primitives.serialization import load_ssh_public_key from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives.asymmetric import", "== channel: found = True index = i break if", "self.__CALLBACK_LOOP = [] self.__RECEIVING_MSG = [] get = self._get_node(file =", "_recv_.DSP_type == \"username_secure\": resolved_data = eval(_recv_.msg) aes_key = AESGCM.generate_key(256) nonce", ") ) thread2 = threading.Thread( target = self.send_func, args =", "base64.b64encode(p_e_d) return ret def __repr__(self): return \"_main.DSP._\" def __encrypt(self,data): aesgcm", "e in exceptions: self.INPUTS.remove(e) if e in self.OUTPUTS: self.OUTPUTS.remove(e) e.close()", "AESGCM(self.UNIVERSAL_AES_KEY) ct = aesgcm.decrypt(self.NONCE, OBJECT[-1], self.AAD) ct = pickle.loads(ct) return", "self.MESSAGE_QUEUES[r] continue except Exception as e: pass if data_len: if", "of multiple channel names\") else: if channel_name not in self.__CUSTOM_CHANNEL:", "default value True, file : str = None -> here", "= True index = i break if found: if args", "e: print(\"User Disconnected\") readable.remove(r) self.INPUTS.remove(r) writable.remove(r) self.OUTPUTS.remove(r) if r in", ": int = None, listeners : int = None): self.address", "r_yml = self._read_yml(file) r_yml[node[0]] = node[1] self._write_yml(file = file, dict_data", ": bytes = None, aad : str = None, ):", "\"\"\" if not file: raise TypeError(\"asyncServer() missing 1 required positional", "device_id : int = None, universalAesKey : bytes = None,", "= [] self.OUTPUTS = [] self.MESSAGE_QUEUES = {} self.REQUEST_LIST =", "ConnectionResetError: print(\"Client Disconnected\") if r in self.OUTPUTS: self.OUTPUTS.remove(r) if r", "not be None\") def __callback_loop(self,__callback_loop): while True: for index,func in", "= list(\"!@#$%&*?\") number = list(\"0123456789\") if onlyText: _all_ = lower_case", "[] self.REQUEST_RESPONSE_LIST = [] self.MESSAGE_LIST = [] self.__VARIFIED_DEVICES = []", "_recv_.DSP_type == \"DSP_MSG\": try: resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg))) __message_lst.append( [ resolved_data['target_name'],", "if len(send_c_msg) > 0: if username in send_c_msg[0]: INDEX =", "str = None, DSP_type : str = None, device_id :", "\"DSP_MSG\", universalAesKey = aes_key_pack[\"aes_key\"], nonce = aes_key_pack[\"nonce\"], aad = aes_key_pack[\"aad\"]", "pass def _read_yml(self,file = None): with open(file) as file: documents", "dsp_data = DSP( DSP_type=\"username_secure_response\" )._messanger( MSG = prepare_data ) dsp_data", "in channel_name: if channel not in self.__CUSTOM_CHANNEL: self.__CUSTOM_CHANNEL.append(channel) else: print(f\"Channel", "[] self.OUTPUTS = [] self.MESSAGE_QUEUES = {} self.REQUEST_LIST = []", "= DSP( DSP_type=\"username_secure_response\" )._messanger( MSG = prepare_data ) dsp_data =", "= aes_key_pack[\"aes_key\"], nonce = aes_key_pack[\"nonce\"], aad = aes_key_pack[\"aad\"] )._messanger( MSG", "here user need to pass a yaml file which saves", "= self.__RECEIVING_MSG, # __varified_devices = self.__VARIFIED_DEVICES, # __varifier_lst = self.__VARIFIER_LIST,", "def __init__(self): pass def _read_yml(self,file = None): with open(file) as", "multiple channel names\") else: if channel_name not in self.__CUSTOM_CHANNEL: self.__CUSTOM_CHANNEL.append(channel_name)", "__client_keys = self.__CLIENT_KEYS, # __outputs = self.OUTPUTS, # __request_lst =", "self._add_node( file = self.__file_location, node = [ hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(), get ]", "None, wait = True): if key == None: return self._read_yml(file)", "__custom_c_m_r): # __receiving_msg = self.__RECEIVING_MSG, # __varified_devices = self.__VARIFIED_DEVICES, #", "OBJECT = _data_, secure = True, secure_dict = aes_key_pack )", "MAIN(IPNC): def __init__(self,secure : bool = True,file = None): \"\"\"async_server", "bool = False): if multiple: if type(channel_name) == type([]): for", "= \"w\") except KeyError: return False except: pass def _name_generator(self,_len_", "self.WRITABLE.remove(r) self.INPUTS.remove(r) r.close() del self.MESSAGE_QUEUES[r] continue except Exception as e:", "names\") else: if channel_name not in self.__CUSTOM_CHANNEL: self.__CUSTOM_CHANNEL.append(channel_name) def LISTEN(self,channel", "print(\"sender has not done the handshake\") class MAIN(IPNC): def __init__(self,secure", "= file, dict_data = r_yml, mode = \"w\") except KeyError:", "as e: pass if data_len: if type(data_len) == type([]): data_recv_len.append(", "in msg_lst[0]: INDEX = msg_lst[0].index(username) aes_key_pack = self.__CLIENT_KEYS[username] aes_key_pack =", "DSP(): def __init__( self, msg : str = None, DSP_type", "\"convert_to_class() missing 1 required positional argument: 'secure_lst'\") else: secure_dict =", "self.__CUSTOM_CHANNEL: for i,d in enumerate(self.__CUSTOM_CHANNEL_MSG_REC): if d[\"channel\"] == channel: found", "= list(zip(*customChannelMessageSend)) except KeyError: pass if len(msg_lst) > 0: if", "import socket import queue import threading import sys import pickle", "msg else: self.msg = msg self.DSP_type = DSP_type self.device_id =", "len(req_res_lst) > 0: if username in req_res_lst[0]: INDEX = req_res_lst[0].index(username)", "try: for INDEX,_data_ in enumerate(__receiving_msg): data = pickle.loads(base64.b64decode(_data_)) # print(f\"data[0]", "except Exception as e: print(\"User Disconnected\") readable.remove(r) self.INPUTS.remove(r) writable.remove(r) self.OUTPUTS.remove(r)", "= True, secure_dict = aes_key_pack ) if _recv_.DSP_type == \"DSP_REQ\":", "cryptography.hazmat.primitives.ciphers.aead import AESGCM from cryptography.hazmat.primitives.serialization import load_ssh_public_key from cryptography.hazmat.primitives import", "None, multiple : bool = False): if multiple: if type(channel_name)", "should not be None\") def __callback_loop(self,__callback_loop): while True: for index,func", "+ 100, \"|\").encode(\"utf-8\") s.send(bytes(f\"{len(dsp_data)+100}\".center(16,\"|\"),\"utf-8\")) s.send( dsp_data ) requestList.pop(INDEX) if len(req_res_lst)", "import sys import pickle import base64 import os from cryptography.hazmat.backends", "s.fileno() == -1: Writable.remove(s) # try: try: username = message_q[s].get_nowait()", "from cryptography.hazmat.primitives.ciphers.aead import AESGCM from cryptography.hazmat.primitives.serialization import load_ssh_public_key from cryptography.hazmat.primitives", "if onlyText: _all_ = lower_case + upper_case else: _all_ =", "return False except: pass def _name_generator(self,_len_ = 16, onlyText =", "list(zip(*customChannelMessageSend)) except KeyError: pass if len(msg_lst) > 0: if username", "if found: if args is None: p_data = self.__CUSTOM_CHANNEL_MSG_REC.pop(index) self.__CALLBACK_LOOP.append([function,[p_data]])", "import random import time class IPNC(): def __init__(self): pass def", "True -> this should set to the default value True,", "key_pack[\"aad\"] )._messanger( MSG = base64.b64encode(pickle.dumps(data)) ) self.__CUSTOM_CHANNEL_MSG_SEND.append( [ target_name, dsp_data", "= [] self.WRITABLE = [] self.INPUTS = [] self.OUTPUTS =", "thread3.start() thread = threading.Thread(target = self.__server) # thread.daemon = True", "in self.OUTPUTS: self.OUTPUTS.append(r) except Exception as e: print(\"User Disconnected\") readable.remove(r)", "set to the default value True, file : str =", "file which saves all the keys and configurations. if not", "self.__VARIFIED_DEVICES = [] self.__CLIENT_KEYS = {} self.__CUSTOM_CHANNEL = [] self.__CUSTOM_CHANNEL_MSG_REC", "threading.Thread( target = self.send_func, args = ( self.WRITABLE, self.MESSAGE_QUEUES, self.MESSAGE_LIST,", "S import select import socket import queue import threading import", "False index = None if channel in self.__CUSTOM_CHANNEL: for i,d", "= [] self.__CLIENT_KEYS = {} self.__CUSTOM_CHANNEL = [] self.__CUSTOM_CHANNEL_MSG_REC =", "def LISTEN(self,channel : str = None,function : object = None,args", "= [] self.INPUTS = [] self.OUTPUTS = [] self.MESSAGE_QUEUES =", "== \"username_secure\": resolved_data = eval(_recv_.msg) aes_key = AESGCM.generate_key(256) nonce =", "+ 100, \"|\").encode(\"utf-8\") try: s.send(bytes(f\"{len(dsp_data)}\".center(16,\"|\"),\"utf-8\")) s.send( dsp_data ) message_list.pop(INDEX) except", "= r.accept() connection.setblocking(0) self.INPUTS.append(connection) self.MESSAGE_QUEUES[connection] = queue.Queue() else: ini =", "channel_name should be a list of multiple channel names\") else:", "= DSP_type self.device_id = device_id if universalAesKey is not None:", "secure_dict = aes_key_pack ) if _recv_.DSP_type == \"DSP_REQ\": try: resolved_data", "aes_key_pack[\"aad\"] )._messanger( MSG = f\"{msg_lst[1][INDEX]}\" ).decode().center(len(msg_lst[1][INDEX]) + 100, \"|\").encode(\"utf-8\") try:", "data = r.recv(recv_len) try: data = data.decode().strip(\"0\").encode(\"utf-8\") except: print(\"Error in", "[] self.__CUSTOM_CHANNEL_MSG_SEND = [] self.__VARIFIER_LIST = [] self.__CALLBACK_LOOP = []", "try: aes_key_pack = self.__CLIENT_KEYS[username] except KeyError: continue aes_key_pack = pickle.loads(base64.b64decode(aes_key_pack))", "len(msg_lst) > 0: if username in msg_lst[0]: INDEX = msg_lst[0].index(username)", "for e in exceptions: self.INPUTS.remove(e) if e in self.OUTPUTS: self.OUTPUTS.remove(e)", "\"username_secure\": resolved_data = eval(_recv_.msg) aes_key = AESGCM.generate_key(256) nonce = os.urandom(32)", "onlyText: _all_ = lower_case + upper_case else: _all_ = lower_case", "self._get_node(file = self.__file_location,key = hashlib.sha256(bytes(\"key\", \"utf-8\")).digest(), wait = False) if", "will create the a asyncronouse tcp server. \"\"\" IPNC.__init__(self) self.__secure", "= None, listeners : int = None): self.address = address", "nonce = aes_key_pack[\"nonce\"], aad = aes_key_pack[\"aad\"] )._messanger( MSG = f\"{req_res_lst[1][INDEX]}\"", "d[\"channel\"] == channel: found = True index = i break", "_recv_.DSP_type == \"DSP_REQ_RES\": try: resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg))) __request_res_lst.append( [ resolved_data[\"target_name\"],", "pickle import base64 import os from cryptography.hazmat.backends import default_backend from", "file = self.__file_location, node = [ hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(), { resolved_data[\"username\"] :", "r in self.WRITABLE: self.WRITABLE.remove(r) del self.MESSAGE_QUEUES[r] continue # handling the", "except: pass def _name_generator(self,_len_ = 16, onlyText = False): lower_case", "if channel is not None: found = False index =", "number = list(\"0123456789\") if onlyText: _all_ = lower_case + upper_case", "is to True then channel_name should be a list of", "# __varified_devices = self.__VARIFIED_DEVICES, # __varifier_lst = self.__VARIFIER_LIST, # __client_keys", ") thread3 = threading.Thread( target = self.__callback_loop, args = (", "pickle.loads(base64.b64decode(key_pack)) dsp_data = DSP( DSP_type = channel_name, universalAesKey=key_pack[\"aes_key\"], nonce =", "resolved_data = eval(_recv_.msg) aes_key = AESGCM.generate_key(256) nonce = os.urandom(32) aad", "hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(), { resolved_data[\"username\"] : b64_aes_key_pack } ] ) __receiving_msg.pop(INDEX) else:", "bytes = None, aad : str = None, ): if", "nonce is not None: self.NONCE = nonce else: self.NONCE =", "= pickle.loads(ct) return eval(ct) except TypeError: sys.exit() except ValueError: print(\"sender", "node = [ hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(), get ] ) else: self._add_node( file", "_all_ = lower_case + upper_case + special + number random.shuffle(_all_)", "if len(req_lst) > 0: if username in req_lst[0]: INDEX =", "OBJECT[-1], secure_dict[\"aad\"]) ct = pickle.loads(ct) return eval(ct) else: aesgcm =", "[ target_name, dsp_data ] ) class server(): def __init__(self, file", "not None: get[resolved_data[\"username\"]] = b64_aes_key_pack self._add_node( file = self.__file_location, node", "= i break if found: if args is None: p_data", "= aad else: self.AAD = b\"au$tica&tedbut@u32nencr#cdscypteddatafdrj\" def _messanger(self,MSG = None):", "eval(_recv_.msg) aes_key = AESGCM.generate_key(256) nonce = os.urandom(32) aad = bytes(self._name_generator(),\"utf-8\")", "__request_lst = self.REQUEST_LIST # __request_res_lst = self.REQUEST_RESPONSE_LIST # __message_lst =", "self.MESSAGE_LIST, self.REQUEST_LIST, self.REQUEST_RESPONSE_LIST, self.__VARIFIER_LIST, self.__CUSTOM_CHANNEL_MSG_SEND ) ) thread3 = threading.Thread(", "if channel in self.__CUSTOM_CHANNEL: for i,d in enumerate(self.__CUSTOM_CHANNEL_MSG_REC): if d[\"channel\"]", "ct = aesgcm.decrypt(self.NONCE, OBJECT[-1], self.AAD) ct = pickle.loads(ct) return eval(ct)", "None): try: read = self._read_yml(file) if read != None: read[node[0]]", "try: resolved_data = eval(_recv_.msg) resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg))) __request_lst.append( [ resolved_data[\"target_name\"],", "None,args = None): if channel is not None: found =", "= None): if channel is not None: found = False", "socket import queue import threading import sys import pickle import", "not in self.OUTPUTS: self.OUTPUTS.append(r) except Exception as e: print(\"User Disconnected\")", "[] self.MESSAGE_QUEUES = {} self.REQUEST_LIST = [] self.REQUEST_RESPONSE_LIST = []", "OSError: pass if len(req_lst) > 0: if username in req_lst[0]:", "node = [ hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(), { resolved_data[\"username\"] : b64_aes_key_pack } ]", "self.AAD = b\"au$tica&tedbut@u32nencr#cdscypteddatafdrj\" def _messanger(self,MSG = None): if MSG is", "= list(list(zip(*requestList))) req_res_lst = list(list(zip(*requestResList))) vari_lst = list(list(zip(*varifierList))) send_c_msg =", "hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(), wait = False ) if get is not None:", "if aad is not None: self.AAD = aad else: self.AAD", "= DSP( DSP_type = \"DSP_handshake_request_res\", universalAesKey = aes_key_pack[\"aes_key\"], nonce =", ") class server(): def __init__(self, file = None, debug :", "username in req_res_lst[0]: INDEX = req_res_lst[0].index(username) aes_key_pack = self.__CLIENT_KEYS[username] aes_key_pack", "str = None, device_id : int = None, universalAesKey :", "if r in self.OUTPUTS: self.OUTPUTS.remove(r) if r in self.WRITABLE: self.WRITABLE.remove(r)", "multiple: if type(channel_name) == type([]): for channel in channel_name: if", "msg is not None: self.msg = msg else: self.msg =", "os from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.ciphers.aead import AESGCM from", "else: self.UNIVERSAL_AES_KEY = b'<KEY>' if nonce is not None: self.NONCE", "nonce = aes_key_pack[\"nonce\"], aad = aes_key_pack[\"aad\"] )._messanger( MSG = f\"{msg_lst[1][INDEX]}\"", "def send_func(self,Writable,message_q,message_list,requestList,requestResList,varifierList,customChannelMessageSend): while True: # print(f\"Writable : {Writable}\") # time.sleep(2)", "exceptions = select.select(self.INPUTS, self.OUTPUTS, self.INPUTS) # handling the inputs for", "message_q[s].get_nowait() message_q[s].put(username) msg_lst = list(list(zip(*message_list))) req_lst = list(list(zip(*requestList))) req_res_lst =", "address self.port = port self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.sock.setsockopt( socket.SOL_SOCKET,", "get ] ) else: self._add_node( file = self.__file_location, node =", "MSG = base64.b64encode(pickle.dumps(data)) ) self.__CUSTOM_CHANNEL_MSG_SEND.append( [ target_name, dsp_data ] )", "self.__encrypt(pickled_data)] p_e_d = pickle.dumps(encrypted_data) ret = base64.b64encode(p_e_d) return ret def", "algorithm = hashes.SHA256(), label = None ) ) ciphertext =", "= \"w\") def _get_node(self,file = None, key = None, wait", "not in self.WRITABLE: self.WRITABLE.append(w) # handling the errors for e", "\"DSP_handshake_request_res\", universalAesKey = aes_key_pack[\"aes_key\"], nonce = aes_key_pack[\"nonce\"], aad = aes_key_pack[\"aad\"]", "if read != None: read[node[0]] self._change_node_value(file,node) else: raise KeyError except", "True, file : str = None -> here user need", "in self.WRITABLE: self.WRITABLE.remove(r) r.close() del self.MESSAGE_QUEUES[r] continue else: qwe =", "server. \"\"\" IPNC.__init__(self) self.__secure = secure self.__file_location = file self.READABLE", "try: OBJECT = base64.b64decode(OBJECT) OBJECT = pickle.loads(OBJECT) if secure ==", "None, DSP_type : str = None, device_id : int =", "ciphertext = base64.b64encode(ciphertext) prepare_data = {\"key\" : ciphertext} dsp_data =", "\"\"\" IPNC.__init__(self) self.__secure = secure self.__file_location = file self.READABLE =", "resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg))) __request_res_lst.append( [ resolved_data[\"target_name\"], _recv_.msg ] ) __receiving_msg.remove(_data_)", "bool = True -> this should set to the default", "r in self.OUTPUTS: self.OUTPUTS.remove(r) if r in self.WRITABLE: self.WRITABLE.remove(r) self.INPUTS.remove(r)", "def __repr__(self): return \"_main.DSP._\" def __encrypt(self,data): aesgcm = AESGCM(self.UNIVERSAL_AES_KEY,) ct", "import base64 import os from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.ciphers.aead", "argument: 'secure_lst'\") else: secure_dict = pickle.loads(base64.b64decode(secure_dict)) aesgcm = AESGCM(secure_dict[\"aes_key\"]) ct", "= list(args) args.insert(0,p_data) self.__CALLBACK_LOOP.append([function,args]) else: raise TypeError(\"'channel' should not be", "while True: for index,func in enumerate(__callback_loop): __callback_loop.pop(index) func[0](*func[1]) def SEND(self,channel_name,target_name,data):", "= self._get_node( file = self.__file_location, key = hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(), wait =", "= True thread1.start() # thread2.daemon = True thread2.start() # thread3.daemon", "except: pass def send_func(self,Writable,message_q,message_list,requestList,requestResList,varifierList,customChannelMessageSend): while True: # print(f\"Writable : {Writable}\")", "thread2 = threading.Thread( target = self.send_func, args = ( self.WRITABLE,", "multiple : bool = False): if multiple: if type(channel_name) ==", "hashlib.sha256(bytes(\"key\", \"utf-8\")).digest(), wait = False) if get is not None:", "= False ) if get is not None: get[resolved_data[\"username\"]] =", "<reponame>AnanyaRamanA/shiSock from re import S import select import socket import", "in decoding\") self.__RECEIVING_MSG.append(data) self.MESSAGE_QUEUES[r].put(pickle.loads(base64.b64decode(data))[0]) if r not in self.OUTPUTS: self.OUTPUTS.append(r)", "aes_key_pack[\"nonce\"], aad = aes_key_pack[\"aad\"] )._messanger( MSG = f\"{req_res_lst[1][INDEX]}\" ).decode().center(len(req_res_lst[1][INDEX]) +", "= pickle.loads(base64.b64decode(aes_key_pack)) dsp_data = DSP( DSP_type = \"DSP_handshake_request\", universalAesKey =", "self.__callback_loop, args = ( self.__CALLBACK_LOOP, ) ) # thread1.daemon =", ")._messanger( MSG = f\"{req_res_lst[1][INDEX]}\" ).decode().center(len(req_res_lst[1][INDEX]) + 100, \"|\").encode(\"utf-8\") s.send(bytes(f\"{len(dsp_data)+100}\".center(16,\"|\"),\"utf-8\")) s.send(", "= [ hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(), get ] ) else: self._add_node( file =", "pickle.dumps(encrypted_data) ret = base64.b64encode(p_e_d) return ret def __repr__(self): return \"_main.DSP._\"", "= aes_key_pack[\"aad\"] )._messanger( MSG = f\"{msg_lst[1][INDEX]}\" ).decode().center(len(msg_lst[1][INDEX]) + 100, \"|\").encode(\"utf-8\")", "__init__(self, file = None, debug : bool = False, MTCL", "= __parent.SERVER self.CREATE_CHANNEL = __parent.CREATE_CHANNEL self.LISTEN = __parent.LISTEN self.SEND =", "asyncronouse tcp server. \"\"\" IPNC.__init__(self) self.__secure = secure self.__file_location =", "MSG = f\"{msg_lst[1][INDEX]}\" ).decode().center(len(msg_lst[1][INDEX]) + 100, \"|\").encode(\"utf-8\") try: s.send(bytes(f\"{len(dsp_data)}\".center(16,\"|\"),\"utf-8\")) s.send(", ": bool = True,file = None): \"\"\"async_server initializer class that", "import os from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.ciphers.aead import AESGCM", "{ node[0] : node[1] } self._write_yml(file, node_dict) def _change_node_value(self,file =", "del self.MESSAGE_QUEUES[r] continue except Exception as e: pass if data_len:", "pickle.dumps(qw) b64_aes_key_pack = base64.b64encode(pickle_qw) key = load_ssh_public_key( bytes( resolved_data[\"data\"], \"utf-8\"", "for w in writable: if w not in self.WRITABLE: self.WRITABLE.append(w)", "in self.__CUSTOM_CHANNEL: self.__CUSTOM_CHANNEL.append(channel) else: print(f\"Channel : {channel} already exists.\") else:", ") if get is not None: get[resolved_data[\"username\"]] = b64_aes_key_pack self._add_node(", "import padding from cryptography.hazmat.backends import default_backend import hashlib import yaml", "username = message_q[s].get_nowait() message_q[s].put(username) msg_lst = list(list(zip(*message_list))) req_lst = list(list(zip(*requestList)))", "= message_q[s].get_nowait() message_q[s].put(username) msg_lst = list(list(zip(*message_list))) req_lst = list(list(zip(*requestList))) req_res_lst", "data = f'DSP(\"{self.msg}\",\"{self.DSP_type}\")' data = pickle.dumps(data) pickled_data = data encrypted_data", "= MSG data = f'DSP(\"{self.msg}\",\"{self.DSP_type}\")' data = pickle.dumps(data) pickled_data =", "to create multi-client server. args: secure : bool = True", "self.__CUSTOM_CHANNEL_MSG_SEND = [] self.__VARIFIER_LIST = [] self.__CALLBACK_LOOP = [] self.__RECEIVING_MSG", "else: _all_ = lower_case + upper_case + special + number", "= threading.Thread( target = self.__callback_loop, args = ( self.__CALLBACK_LOOP, )", "port : int = None, listeners : int = None):", "nonce = aes_key_pack[\"nonce\"], aad = aes_key_pack[\"aad\"] )._messanger( MSG = f\"{req_lst[1][INDEX]}\"", "bool = False, MTCL : bool = True, MPCL :", "dsp_data ) requestResList.pop(INDEX) if len(vari_lst) > 0: if username in", "self.__CUSTOM_CHANNEL_MSG_SEND ) ) thread3 = threading.Thread( target = self.__callback_loop, args", "in self.OUTPUTS: self.OUTPUTS.remove(r) self.INPUTS.remove(r) if r in self.WRITABLE: self.WRITABLE.remove(r) r.close()", "= None,args = None): if channel is not None: found", "DSP( DSP_type = channel_name, universalAesKey=key_pack[\"aes_key\"], nonce = key_pack[\"nonce\"], aad= key_pack[\"aad\"]", "\"DSP_handshake_request\", universalAesKey = aes_key_pack[\"aes_key\"], nonce = aes_key_pack[\"nonce\"], aad = aes_key_pack[\"aad\"]", "in req_lst[0]: INDEX = req_lst[0].index(username) try: aes_key_pack = self.__CLIENT_KEYS[username] except", "f\"{req_lst[1][INDEX]}\" ).decode().center(len(req_lst[1][INDEX]) + 100, \"|\").encode(\"utf-8\") s.send(bytes(f\"{len(dsp_data)+100}\".center(16,\"|\"),\"utf-8\")) s.send( dsp_data ) requestList.pop(INDEX)", "in __varified_devices: _recv_ = DSP()._convert_to_class(_data_, secure = False) if _recv_.DSP_type", "secure = False) if _recv_.DSP_type == \"username_secure\": resolved_data = eval(_recv_.msg)", "= True thread.start() def __server(self): data_recv_len = [] while True:", "if type(channel_name) == type([]): for channel in channel_name: if channel", "import select import socket import queue import threading import sys", "with open(file) as file: documents = yaml.full_load(file) return documents def", "= self.REQUEST_RESPONSE_LIST # __message_lst = self.MESSAGE_LIS # __custom_c_m_r = self.__CUSTOM_CHANNEL_MSG_REC", "= pickle.loads(base64.b64decode(r.recv(32).decode().strip(\"0\").encode(\"utf-8\"))) except ConnectionResetError: print(\"Client Disconnected\") if r in self.OUTPUTS:", "self.__CLIENT_KEYS[username] aes_key_pack = pickle.loads(base64.b64decode(aes_key_pack)) dsp_data = DSP( DSP_type = \"DSP_MSG\",", "in self.__CUSTOM_CHANNEL: for i,d in enumerate(self.__CUSTOM_CHANNEL_MSG_REC): if d[\"channel\"] == channel:", "MSG = f\"{req_lst[1][INDEX]}\" ).decode().center(len(req_lst[1][INDEX]) + 100, \"|\").encode(\"utf-8\") s.send(bytes(f\"{len(dsp_data)+100}\".center(16,\"|\"),\"utf-8\")) s.send( dsp_data", ": bool = True, MPCL : bool = False, safeMode", "_messanger(self,MSG = None): if MSG is not None: self.msg =", "channel: found = True index = i break if found:", "secure_dict = pickle.loads(base64.b64decode(secure_dict)) aesgcm = AESGCM(secure_dict[\"aes_key\"]) ct = aesgcm.decrypt( secure_dict[\"nonce\"],", "'secure_lst'\") else: secure_dict = pickle.loads(base64.b64decode(secure_dict)) aesgcm = AESGCM(secure_dict[\"aes_key\"]) ct =", "self.INPUTS.remove(e) if e in self.OUTPUTS: self.OUTPUTS.remove(e) e.close() del self.MESSAGE_QUEUES[e] def", "= self.__CLIENT_KEYS[username] except KeyError: continue aes_key_pack = pickle.loads(base64.b64decode(aes_key_pack)) dsp_data =", "has not done the handshake\") class MAIN(IPNC): def __init__(self,secure :", "documents = yaml.full_load(file) return documents def _write_yml(self,file = None, dict_data", "KeyError except KeyError: node_dict = { node[0] : node[1] }", "= self.__CUSTOM_CHANNEL_MSG_REC.pop(index) args = list(args) args.insert(0,p_data) self.__CALLBACK_LOOP.append([function,args]) else: raise TypeError(\"'channel'", "not in self.__CUSTOM_CHANNEL: self.__CUSTOM_CHANNEL.append(channel) else: print(f\"Channel : {channel} already exists.\")", "= pickle.dumps(qw) b64_aes_key_pack = base64.b64encode(pickle_qw) key = load_ssh_public_key( bytes( resolved_data[\"data\"],", "target = self.__callback_loop, args = ( self.__CALLBACK_LOOP, ) ) #", "return ret def __repr__(self): return \"_main.DSP._\" def __encrypt(self,data): aesgcm =", "then channel_name should be a list of multiple channel names\")", "inputs for r in readable: if r is self.sock: connection,addr", "= self.receive_func, args = ( self.__RECEIVING_MSG, self.__VARIFIED_DEVICES, self.__VARIFIER_LIST, self.__CLIENT_KEYS, self.OUTPUTS,", "= {} self.REQUEST_LIST = [] self.REQUEST_RESPONSE_LIST = [] self.MESSAGE_LIST =", "= self._read_yml(file = file) r_yml[node] r_yml.pop(node) self._write_yml(file = file, dict_data", "self.__RECEIVING_MSG, # __varified_devices = self.__VARIFIED_DEVICES, # __varifier_lst = self.__VARIFIER_LIST, #", "= aes_key_pack[\"nonce\"], aad = aes_key_pack[\"aad\"] )._messanger( MSG = f\"{req_res_lst[1][INDEX]}\" ).decode().center(len(req_res_lst[1][INDEX])", "username in send_c_msg[0]: INDEX = send_c_msg[0].index(username) s.send(bytes(f\"{len(send_c_msg[1][INDEX])}\".center(16,\"|\"),\"utf-8\")) s.send(send_c_msg[1][INDEX]) customChannelMessageSend.pop(INDEX) #", "def _read_yml(self,file = None): with open(file) as file: documents =", "= eval(_recv_.msg) resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg))) __request_lst.append( [ resolved_data[\"target_name\"], _recv_.msg ]", "= send_c_msg[0].index(username) s.send(bytes(f\"{len(send_c_msg[1][INDEX])}\".center(16,\"|\"),\"utf-8\")) s.send(send_c_msg[1][INDEX]) customChannelMessageSend.pop(INDEX) # except: # pass def", "if username in req_lst[0]: INDEX = req_lst[0].index(username) try: aes_key_pack =", "self.REQUEST_RESPONSE_LIST = [] self.MESSAGE_LIST = [] self.__VARIFIED_DEVICES = [] self.__CLIENT_KEYS", "safeMode : bool = True): \"\"\" This class allows user", "True: for index,func in enumerate(__callback_loop): __callback_loop.pop(index) func[0](*func[1]) def SEND(self,channel_name,target_name,data): if", "node = None): try: read = self._read_yml(file) if read !=", "pass except: pass def send_func(self,Writable,message_q,message_list,requestList,requestResList,varifierList,customChannelMessageSend): while True: # print(f\"Writable :", "list(list(zip(*requestResList))) vari_lst = list(list(zip(*varifierList))) send_c_msg = list(zip(*customChannelMessageSend)) except KeyError: pass", "aad = aes_key_pack[\"aad\"] )._messanger( MSG = f\"{req_lst[1][INDEX]}\" ).decode().center(len(req_lst[1][INDEX]) + 100,", "select.select(self.INPUTS, self.OUTPUTS, self.INPUTS) # handling the inputs for r in", "try: data_len = pickle.loads(base64.b64decode(r.recv(32).decode().strip(\"0\").encode(\"utf-8\"))) except ConnectionResetError: print(\"Client Disconnected\") if r", "s.send( dsp_data ) message_list.pop(INDEX) except OSError: pass if len(req_lst) >", "self.__CALLBACK_LOOP.append([function,[p_data]]) else: p_data = self.__CUSTOM_CHANNEL_MSG_REC.pop(index) args = list(args) args.insert(0,p_data) self.__CALLBACK_LOOP.append([function,args])", "vari_lst[0]: INDEX = vari_lst[0].index(username) s.send(bytes(f\"{len(vari_lst[1][INDEX])}\".center(16,\"|\"),\"utf-8\")) s.send( vari_lst[1][INDEX] ) varifierList.pop(INDEX) if", "self, msg : str = None, DSP_type : str =", "if channel_name not in self.__CUSTOM_CHANNEL: self.__CUSTOM_CHANNEL.append(channel_name) def LISTEN(self,channel : str", "= self.REQUEST_LIST # __request_res_lst = self.REQUEST_RESPONSE_LIST # __message_lst = self.MESSAGE_LIS", "file : str = None -> here user need to", "b\"au$tica&tedbut@u32nencr#cdscypteddatafdrj\" def _messanger(self,MSG = None): if MSG is not None:", "False, MTCL : bool = True, MPCL : bool =", "= None,function : object = None,args = None): if channel", "args.insert(0,p_data) self.__CALLBACK_LOOP.append([function,args]) else: raise TypeError(\"'channel' should not be None\") def", "node[1] } self._write_yml(file, node_dict) def _change_node_value(self,file = None, node =", "import threading import sys import pickle import base64 import os", "universalAesKey is not None: self.UNIVERSAL_AES_KEY = universalAesKey else: self.UNIVERSAL_AES_KEY =", "__receiving_msg,__varified_devices, __varifier_lst, __client_keys, __outputs, __request_lst, __request_res_lst, __message_lst, __custom_c_m_r): # __receiving_msg", "a yaml file which saves all the keys and configurations.", ": bool = False): if multiple: if type(channel_name) == type([]):", "if len(vari_lst) > 0: if username in vari_lst[0]: INDEX =", "'mutliple' is to True then channel_name should be a list", "channel in self.__CUSTOM_CHANNEL: for i,d in enumerate(self.__CUSTOM_CHANNEL_MSG_REC): if d[\"channel\"] ==", "in enumerate(__callback_loop): __callback_loop.pop(index) func[0](*func[1]) def SEND(self,channel_name,target_name,data): if channel_name in self.__CUSTOM_CHANNEL:", "= pickle.loads(ct) return eval(ct) else: aesgcm = AESGCM(self.UNIVERSAL_AES_KEY) ct =", "{data[0]}\") # print(f\"__varified_devices : {__varified_devices}\") if data[0] not in __varified_devices:", "to pass a yaml file which saves all the keys", "TypeError \"\"\" if not file: raise TypeError(\"asyncServer() missing 1 required", ").decode().center(len(req_res_lst[1][INDEX]) + 100, \"|\").encode(\"utf-8\") s.send(bytes(f\"{len(dsp_data)+100}\".center(16,\"|\"),\"utf-8\")) s.send( dsp_data ) requestResList.pop(INDEX) if", "\"utf-8\" ), backend=default_backend() ) ciphertext = key.encrypt( b64_aes_key_pack, padding.OAEP( mgf", "= __client_keys[data[0]] _recv_ = DSP()._convert_to_class( OBJECT = _data_, secure =", "] ) class server(): def __init__(self, file = None, debug", "in self.WRITABLE: self.WRITABLE.remove(r) del self.MESSAGE_QUEUES[r] continue # handling the outputs", "else: self._add_node( file = self.__file_location, node = [ hashlib.sha256(bytes(\"key\",\"utf-8\")).digest(), {", "break if found: if args is None: p_data = self.__CUSTOM_CHANNEL_MSG_REC.pop(index)", "False): if multiple: if type(channel_name) == type([]): for channel in", ": int = None): self.address = address self.port = port", "pickle.loads(ct) return eval(ct) except TypeError: sys.exit() except ValueError: print(\"sender has", "upper_case + special + number random.shuffle(_all_) return \"\".join(random.sample(_all_,_len_)) class DSP():", "= [] self.__CALLBACK_LOOP = [] self.__RECEIVING_MSG = [] get =", "+ 100, \"|\").encode(\"utf-8\") s.send(bytes(f\"{len(dsp_data)+100}\".center(16,\"|\"),\"utf-8\")) s.send( dsp_data ) requestResList.pop(INDEX) if len(vari_lst)", "MSG = prepare_data ) dsp_data = [resolved_data[\"username\"],dsp_data] __varifier_lst.append(dsp_data) __varified_devices.append(resolved_data[\"username\"]) __client_keys[resolved_data[\"username\"]]", "readable, writable, exceptions = select.select(self.INPUTS, self.OUTPUTS, self.INPUTS) # handling the", "\"a+\"): with open(file, mode) as file: yaml.dump(dict_data, file) def _add_node(self,file", "is not None: get[resolved_data[\"username\"]] = b64_aes_key_pack self._add_node( file = self.__file_location,", "{ \"aes_key\" : aes_key, \"nonce\" : nonce, \"aad\" : aad,", "self.__CUSTOM_CHANNEL_MSG_REC, ) ) thread2 = threading.Thread( target = self.send_func, args", "universalAesKey else: self.UNIVERSAL_AES_KEY = b'<KEY>' if nonce is not None:", ": aad, } pickle_qw = pickle.dumps(qw) b64_aes_key_pack = base64.b64encode(pickle_qw) key", "list(args) args.insert(0,p_data) self.__CALLBACK_LOOP.append([function,args]) else: raise TypeError(\"'channel' should not be None\")", "_recv_ = DSP()._convert_to_class(_data_, secure = False) if _recv_.DSP_type == \"username_secure\":", "_recv_.DSP_type == \"DSP_REQ\": try: resolved_data = eval(_recv_.msg) resolved_data = pickle.loads(base64.b64decode(eval(_recv_.msg)))", "None\") def __callback_loop(self,__callback_loop): while True: for index,func in enumerate(__callback_loop): __callback_loop.pop(index)", "msg_lst[0]: INDEX = msg_lst[0].index(username) aes_key_pack = self.__CLIENT_KEYS[username] aes_key_pack = pickle.loads(base64.b64decode(aes_key_pack))", "get = self._get_node(file = self.__file_location,key = hashlib.sha256(bytes(\"key\", \"utf-8\")).digest(), wait =", "send_c_msg[0].index(username) s.send(bytes(f\"{len(send_c_msg[1][INDEX])}\".center(16,\"|\"),\"utf-8\")) s.send(send_c_msg[1][INDEX]) customChannelMessageSend.pop(INDEX) # except: # pass def CREATE_CHANNEL(self,channel_name", "= self._read_yml(file) try: value = r_yml[key] return value except KeyError:", "exists.\") else: raise TypeError(\"When 'mutliple' is to True then channel_name", "int = None, universalAesKey : bytes = None, nonce :", "if msg is not None: self.msg = msg else: self.msg", "as file: yaml.dump(dict_data, file) def _add_node(self,file = None, node =", "None, device_id : int = None, universalAesKey : bytes =", "s.send(bytes(f\"{len(dsp_data)+100}\".center(16,\"|\"),\"utf-8\")) s.send( dsp_data ) requestResList.pop(INDEX) if len(vari_lst) > 0: if", "data, self.AAD ) return ct def _convert_to_class(self,OBJECT : bytes =", "secure_dict is None: raise TypeError( \"convert_to_class() missing 1 required positional", "| LISTENING]\") self.INPUTS.append(self.sock) thread1 = threading.Thread( target = self.receive_func, args", "encrypted_data = [self.device_id, self.__encrypt(pickled_data)] p_e_d = pickle.dumps(encrypted_data) ret = base64.b64encode(p_e_d)", "list('ABCDEFGHIJKLMNOPQRSTUVWXYZ') special = list(\"!@#$%&*?\") number = list(\"0123456789\") if onlyText: _all_", "if _recv_.DSP_type == \"username_secure\": resolved_data = eval(_recv_.msg) aes_key = AESGCM.generate_key(256)", "-> here user need to pass a yaml file which", ") # thread1.daemon = True thread1.start() # thread2.daemon = True", ") else: print(\"User Disconnected\") if r in self.OUTPUTS: self.OUTPUTS.remove(r) self.INPUTS.remove(r)", "aesgcm = AESGCM(secure_dict[\"aes_key\"]) ct = aesgcm.decrypt( secure_dict[\"nonce\"], OBJECT[-1], secure_dict[\"aad\"]) ct", "> 0: if username in req_res_lst[0]: INDEX = req_res_lst[0].index(username) aes_key_pack", "> 0: if username in req_lst[0]: INDEX = req_lst[0].index(username) try:", "INDEX = req_lst[0].index(username) try: aes_key_pack = self.__CLIENT_KEYS[username] except KeyError: continue", "aesgcm = AESGCM(self.UNIVERSAL_AES_KEY,) ct = aesgcm.encrypt( self.NONCE, data, self.AAD )", "number random.shuffle(_all_) return \"\".join(random.sample(_all_,_len_)) class DSP(): def __init__( self, msg", "username in vari_lst[0]: INDEX = vari_lst[0].index(username) s.send(bytes(f\"{len(vari_lst[1][INDEX])}\".center(16,\"|\"),\"utf-8\")) s.send( vari_lst[1][INDEX] )", "= self._read_yml(file) r_yml[node[0]] = node[1] self._write_yml(file = file, dict_data =", "= msg_lst[0].index(username) aes_key_pack = self.__CLIENT_KEYS[username] aes_key_pack = pickle.loads(base64.b64decode(aes_key_pack)) dsp_data =", "_recv_ = DSP()._convert_to_class( OBJECT = _data_, secure = True, secure_dict", "the a asyncronouse tcp server. \"\"\" IPNC.__init__(self) self.__secure = secure", "threading.Thread(target = self.__server) # thread.daemon = True thread.start() def __server(self):", "_get_node(self,file = None, key = None, wait = True): if", "socket.SOCK_STREAM) self.sock.setsockopt( socket.SOL_SOCKET, socket.SO_REUSEADDR, 1 ) self.sock.setblocking(0) self.sock.bind((self.address,self.port)) self.sock.listen(listeners) print(\"[SERVER", "__varified_devices = self.__VARIFIED_DEVICES, # __varifier_lst = self.__VARIFIER_LIST, # __client_keys =" ]
[ "5): decode_sequence, # Search result done (1, 7): decode_sequence, #", "False elif pyasn1_version <= '0.3.7': def encodeValue(self, value, encodeFun, **options):", "def decode_sequence(message, start, stop, context_decoders=None): decoded = [] while start", "is free software: you can redistribute it and/or modify #", "if data[1] <= 127: # BER definite length - short", "3): decode_octet_string # } BIND_RESPONSE_CONTEXT = { 7: decode_octet_string #", "bits counts the number of following octets containing the value", "127 bytes long return data[1], 2 else: # BER definite", "0b00011111 ber_decoder = DECODERS[(ber_class, octet & 0b00011111)] if ber_class <", "encoding Boolean with the value 0xFF for TRUE # THIS", "# THIS IS NOT PART OF THE FAST BER DECODER", "last 7 bits counts the number of following octets containing", "value, defMode, maxChunkSize): return value and (255,) or (0,), False,", "bytearray(x) DECODERS = { # Universal (0, 1): decode_boolean, #", "deepcopy(typeMap) customTagMap[Boolean.tagSet] = LDAPBooleanEncoder() customTypeMap[Boolean.typeId] = LDAPBooleanEncoder() encode = Encoder(customTagMap,", "= context_decoders[ber_type](message, start, start + ber_len) # call value decode", "= Encoder(customTagMap, customTypeMap) # end of monkey patching # a", "= RESULT_CODES[response_dict['result']] response_dict['dn'] = to_unicode(response[1][3], from_server=True) # matchedDN response_dict['message'] =", "referral in response[3][3]]) # referrals else: response_dict['referrals'] = None return", "file is part of ldap3. # # ldap3 is free", "# for monkeypatching of boolean value from ..core.results import RESULT_CODES", "maxChunkSize): return value and (255,) or (0,), False, False elif", "def decode_extended_response(message, start, stop, context_decoders=None): return decode_sequence(message, start, stop, EXTENDED_RESPONSE_CONTEXT)", "response_dict ###### if str is not bytes: # Python 3", "length - long form. Highest bit of byte 1 is", "WITHOUT ANY WARRANTY; without even the implied warranty of #", "data[1], 2 else: # BER definite length - long form.", "return value def decode_octet_string(message, start, stop, context_decoders=None): return message[start: stop]", "be up to 127 bytes long return data[1], 2 else:", "of the GNU Lesser General Public License # along with", "String (0, 10): decode_integer, # Enumerated (0, 16): decode_sequence, #", "encodeFun, **options): return value and (255,) or (0,), False, False", "for length decoded = decode_sequence(message, ber_value_offset, ber_len + ber_value_offset, LDAP_MESSAGE_CONTEXT)", "for TRUE # THIS IS NOT PART OF THE FAST", "any later version. # # ldap3 is distributed in the", "BooleanEncoder, encode from pyasn1.type.univ import Boolean from pyasn1.compat.octets import ints2octs", "(255,) or (0,), False, False elif pyasn1_version <= '0.3.4': def", "if pyasn1_version <= '0.2.3': from pyasn1.compat.octets import ints2octs _true =", "ber_len + ber_value_offset, LDAP_MESSAGE_CONTEXT) return { 'messageID': decoded[0][3], 'protocolOp': decoded[1][2],", "(2, 3): decode_octet_string # } BIND_RESPONSE_CONTEXT = { 7: decode_octet_string", "byte * (256 ** cont) return value_length, bytes_length + 2", "Delete response (1, 13): decode_sequence, # ModifyDN response (1, 15):", "warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.", "else None ber_len, ber_value_offset = compute_ber_size(get_bytes(message[start: start + 10])) start", "{ 10: decode_octet_string, # ResponseName 11: decode_octet_string # Response Value", "else None } def decode_sequence(message, start, stop, context_decoders=None): decoded =", "{ # Universal (0, 1): decode_boolean, # Boolean (0, 2):", "def get_byte(x): return ord(x) def get_bytes(x): return bytearray(x) DECODERS =", "in other modules from pyasn1.codec.ber.encoder import Encoder # for monkeypatching", "or # (at your option) any later version. # #", "# it under the terms of the GNU Lesser General", "pyasn1_version <= '0.3.1': def encodeValue(self, encodeFun, value, defMode, maxChunkSize): return", "== 0 else True def decode_bind_response(message, start, stop, context_decoders=None): return", "Python 3 def get_byte(x): return x def get_bytes(x): return x", "decode_octet_string # } BIND_RESPONSE_CONTEXT = { 7: decode_octet_string # SaslCredentials", "get_byte(x): return ord(x) def get_bytes(x): return bytearray(x) DECODERS = {", "from ..core.results import RESULT_CODES from ..utils.conv import to_unicode from ..protocol.convert", "elif pyasn1_version <= '0.3.1': def encodeValue(self, encodeFun, value, defMode, maxChunkSize):", "stop, context_decoders=None): return decode_sequence(message, start, stop, EXTENDED_RESPONSE_CONTEXT) def decode_intermediate_response(message, start,", "return { 'messageID': decoded[0][3], 'protocolOp': decoded[1][2], 'payload': decoded[1][3], 'controls': decoded[2][3]", "False if pyasn1_version <= '0.2.3': from pyasn1.compat.octets import ints2octs _true", "= LDAPBooleanEncoder() customTypeMap[Boolean.typeId] = LDAPBooleanEncoder() encode = Encoder(customTagMap, customTypeMap) #", "(0,), False, False customTagMap = deepcopy(tagMap) customTypeMap = deepcopy(typeMap) customTagMap[Boolean.tagSet]", "2 + bytes_length]: cont -= 1 value_length += byte *", "monkeypatching of boolean value from ..core.results import RESULT_CODES from ..utils.conv", "function else: # try: value = context_decoders[ber_type](message, start, start +", "referrals_to_list([to_unicode(referral[3], from_server=True) for referral in response[3][3]]) # referrals else: response_dict['referrals']", "long return data[1], 2 else: # BER definite length -", "This file is part of ldap3. # # ldap3 is", "= bool(octet & 0b00100000) ber_type = octet & 0b00011111 ber_decoder", "# by the Free Software Foundation, either version 3 of", "def decode_integer(message, start, stop, context_decoders=None): first = message[start] value =", "pyasn1_version <= '0.3.4': def encodeValue(self, encodeFun, value, defMode, maxChunkSize, ifNotEmpty=False):", "**options): return value and (255,) or (0,), False, False customTagMap", "ber_value_offset = compute_ber_size(get_bytes(message[:10])) # get start of sequence, at maximum", "# for usage in other modules from pyasn1.codec.ber.encoder import Encoder", "# referrals else: response_dict['referrals'] = None return response_dict ###### if", "ber_class < 2 else None ber_len, ber_value_offset = compute_ber_size(get_bytes(message[start: start", "'0.2.3': from pyasn1.compat.octets import ints2octs _true = ints2octs((255,)) _false =", "Foundation, either version 3 of the License, or # (at", "& 0b10000000), bool(octet & 0b01000000))] ber_constructed = bool(octet & 0b00100000)", "start + ber_len, context_decoders) # call value decode function else:", "# Universal (False, True): 1, # Application (True, False): 2,", "in the COPYING and COPYING.LESSER files. # If not, see", "= compute_ber_size(get_bytes(message[start: start + 10])) start += ber_value_offset if ber_decoder:", "typeMap, AbstractItemEncoder from pyasn1.type.univ import Boolean from copy import deepcopy", "decode_sequence(message, start, stop, context_decoders=None): decoded = [] while start <", "decoder # for usage in other modules from pyasn1.codec.ber.encoder import", "Universal (False, True): 1, # Application (True, False): 2, #", "0 for octet in message[start: stop]: value = value <<", "= DECODERS[(ber_class, octet & 0b00011111)] if ber_class < 2 else", "bytes_length for byte in data[2: 2 + bytes_length]: cont -=", "return False if message[start: stop] == 0 else True def", "False, False else: def encodeValue(self, value, asn1Spec, encodeFun, **options): return", "of value and value offset \"\"\" if data[1] <= 127:", "data[1] - 128 value_length = 0 cont = bytes_length for", "response[3][3]]) # referrals else: response_dict['referrals'] = None return response_dict ######", "import to_unicode from ..protocol.convert import referrals_to_list CLASSES = {(False, False):", "= bytes_length for byte in data[2: 2 + bytes_length]: cont", "NOT PART OF THE FAST BER DECODER if pyasn1_version ==", "# Copyright 2015 - 2018 <NAME> # # This file", "bits - Value can be up to 127 bytes long", "value and value offset \"\"\" if data[1] <= 127: #", "ints2octs((255,)) tagMap[Boolean.tagSet] = BooleanCEREncoder() else: from pyasn1.codec.ber.encoder import tagMap, typeMap,", "# if ber_type == 3: # Referral in result #", "response_dict['message'] = to_unicode(response[2][3], from_server=True) # diagnosticMessage if len(response) == 4:", "usage in other modules from pyasn1.codec.ber.encoder import Encoder # for", "General Public License for more details. # # You should", "decode_octet_string, # IntermediateResponseName 1: decode_octet_string # IntermediateResponseValue } LDAP_MESSAGE_CONTEXT =", "Controls 3: decode_sequence # Referral } CONTROLS_CONTEXT = { 0:", "# call value decode function for context class # except", "+ 2 def decode_message_fast(message): ber_len, ber_value_offset = compute_ber_size(get_bytes(message[:10])) # get", "# If not, see <http://www.gnu.org/licenses/>. from pyasn1 import __version__ as", "Response Value } INTERMEDIATE_RESPONSE_CONTEXT = { 0: decode_octet_string, # IntermediateResponseName", "CONTROLS_CONTEXT) def ldap_result_to_dict_fast(response): response_dict = dict() response_dict['result'] = int(response[0][3]) #", "# else: # raise # re-raise, should never happen decoded.append((ber_class,", "the Free Software Foundation, either version 3 of the License,", "Monkeypatching of pyasn1 for encoding Boolean with the value 0xFF", "can be up to 127 bytes long return data[1], 2", "19): decode_sequence, # Search result reference (1, 24): decode_extended_response, #", "= to_unicode(response[1][3], from_server=True) # matchedDN response_dict['message'] = to_unicode(response[2][3], from_server=True) #", "(1, 7): decode_sequence, # Modify response (1, 9): decode_sequence, #", "3 bytes for length decoded = decode_sequence(message, ber_value_offset, ber_len +", "else: from pyasn1.codec.ber.encoder import tagMap, typeMap, AbstractItemEncoder from pyasn1.type.univ import", "(0, 2): decode_integer, # Integer (0, 4): decode_octet_string, # Octet", "even the implied warranty of # MERCHANTABILITY or FITNESS FOR", "sequence, at maximum 3 bytes for length decoded = decode_sequence(message,", "False): 0, # Universal (False, True): 1, # Application (True,", "(0,), False, False elif pyasn1_version <= '0.3.4': def encodeValue(self, encodeFun,", "free software: you can redistribute it and/or modify # it", "GNU Lesser General Public License as published # by the", "..core.results import RESULT_CODES from ..utils.conv import to_unicode from ..protocol.convert import", "else: def encodeValue(self, value, asn1Spec, encodeFun, **options): return value and", "IS NOT PART OF THE FAST BER DECODER if pyasn1_version", "or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU", "the implied warranty of # MERCHANTABILITY or FITNESS FOR A", "result reference (1, 24): decode_extended_response, # Extended response (1, 25):", "decode_sequence(message, ber_value_offset, ber_len + ber_value_offset, LDAP_MESSAGE_CONTEXT) return { 'messageID': decoded[0][3],", "tagMap, typeMap, AbstractItemEncoder from pyasn1.type.univ import Boolean from copy import", "License # along with ldap3 in the COPYING and COPYING.LESSER", "= int(response[0][3]) # resultCode response_dict['description'] = RESULT_CODES[response_dict['result']] response_dict['dn'] = to_unicode(response[1][3],", "AbstractItemEncoder from pyasn1.type.univ import Boolean from copy import deepcopy class", "# This file is part of ldap3. # # ldap3", "2 def get_byte(x): return ord(x) def get_bytes(x): return bytearray(x) DECODERS", "_true = ints2octs((255,)) _false = ints2octs((0,)) def encodeValue(self, encodeFun, value,", "return value and (255,) or (0,), False, False else: def", "ints2octs _true = ints2octs((255,)) _false = ints2octs((0,)) def encodeValue(self, encodeFun,", "# ldap3 is distributed in the hope that it will", "of byte 1 is 0, message length is in the", "True): 1, # Application (True, False): 2, # Context (True,", "stop]: value = value << 8 | get_byte(octet) return value", "# Python 3 def get_byte(x): return x def get_bytes(x): return", "from ..utils.conv import to_unicode from ..protocol.convert import referrals_to_list CLASSES =", "0: decode_octet_string, # IntermediateResponseName 1: decode_octet_string # IntermediateResponseValue } LDAP_MESSAGE_CONTEXT", "THIS IS NOT PART OF THE FAST BER DECODER if", "can redistribute it and/or modify # it under the terms", "16): decode_sequence, # Sequence (0, 17): decode_sequence, # Set #", "is in the last 7 bits - Value can be", "bool(octet & 0b00100000) ber_type = octet & 0b00011111 ber_decoder =", "# Controls 3: decode_sequence # Referral } CONTROLS_CONTEXT = {", "} def decode_sequence(message, start, stop, context_decoders=None): decoded = [] while", "= CLASSES[(bool(octet & 0b10000000), bool(octet & 0b01000000))] ber_constructed = bool(octet", "Bind response (1, 4): decode_sequence, # Search result entry (1,", "decode_extended_response, # Extended response (1, 25): decode_intermediate_response, # intermediate response", "to_unicode(response[1][3], from_server=True) # matchedDN response_dict['message'] = to_unicode(response[2][3], from_server=True) # diagnosticMessage", "response (1, 11): decode_sequence, # Delete response (1, 13): decode_sequence,", "ber_len, ber_value_offset = compute_ber_size(get_bytes(message[:10])) # get start of sequence, at", "stop, context_decoders=None): return decode_sequence(message, start, stop, CONTROLS_CONTEXT) def ldap_result_to_dict_fast(response): response_dict", "start, stop, context_decoders=None): return decode_sequence(message, start, stop, CONTROLS_CONTEXT) def ldap_result_to_dict_fast(response):", "__version__ as pyasn1_version from pyasn1.codec.ber import decoder # for usage", "(256 ** cont) return value_length, bytes_length + 2 def decode_message_fast(message):", "of monkey patching # a fast BER decoder for LDAP", "decode_sequence(message, start, stop, CONTROLS_CONTEXT) def ldap_result_to_dict_fast(response): response_dict = dict() response_dict['result']", "have received a copy of the GNU Lesser General Public", "2 else None ber_len, ber_value_offset = compute_ber_size(get_bytes(message[start: start + 10]))", "never happen decoded.append((ber_class, ber_constructed, ber_type, value)) start += ber_len return", "Extended response (1, 25): decode_intermediate_response, # intermediate response (2, 3):", "(True, False): 2, # Context (True, True): 3} # Private", "only def compute_ber_size(data): \"\"\" Compute size according to BER definite", "decoded = [] while start < stop: octet = get_byte(message[start])", "dict() response_dict['result'] = int(response[0][3]) # resultCode response_dict['description'] = RESULT_CODES[response_dict['result']] response_dict['dn']", "RESULT_CODES[response_dict['result']] response_dict['dn'] = to_unicode(response[1][3], from_server=True) # matchedDN response_dict['message'] = to_unicode(response[2][3],", "BER definite length rules Returns size of value and value", "False): 2, # Context (True, True): 3} # Private #", "ber_len, ber_value_offset = compute_ber_size(get_bytes(message[start: start + 10])) start += ber_value_offset", "= ints2octs((0,)) def encodeValue(self, encodeFun, value, defMode, maxChunkSize): return value", "value = ber_decoder(message, start, start + ber_len, context_decoders) # call", "def encodeValue(self, encodeFun, value, defMode, maxChunkSize): return value and (255,)", "value and (255,) or (0,), False, False customTagMap = deepcopy(tagMap)", "class # except KeyError: # if ber_type == 3: #", "value and (255,) or (0,), False, False else: def encodeValue(self,", "1): decode_bind_response, # Bind response (1, 4): decode_sequence, # Search", "intermediate response (2, 3): decode_octet_string # } BIND_RESPONSE_CONTEXT = {", "License for more details. # # You should have received", "def decode_octet_string(message, start, stop, context_decoders=None): return message[start: stop] def decode_boolean(message,", "from pyasn1.compat.octets import ints2octs _true = ints2octs((255,)) _false = ints2octs((0,))", "for monkeypatching of boolean value from ..core.results import RESULT_CODES from", "(255,) or (0,), False, False else: def encodeValue(self, value, asn1Spec,", "import Boolean from pyasn1.compat.octets import ints2octs class BooleanCEREncoder(BooleanEncoder): _true =", "fast BER decoder for LDAP responses only def compute_ber_size(data): \"\"\"", "Search result entry (1, 5): decode_sequence, # Search result done", "ints2octs class BooleanCEREncoder(BooleanEncoder): _true = ints2octs((255,)) tagMap[Boolean.tagSet] = BooleanCEREncoder() else:", "done (1, 7): decode_sequence, # Modify response (1, 9): decode_sequence,", "GNU Lesser General Public License # along with ldap3 in", "7: decode_octet_string # SaslCredentials } EXTENDED_RESPONSE_CONTEXT = { 10: decode_octet_string,", "_true = ints2octs((255,)) tagMap[Boolean.tagSet] = BooleanCEREncoder() else: from pyasn1.codec.ber.encoder import", "start, start + ber_len) # call value decode function for", "decode_octet_string # IntermediateResponseValue } LDAP_MESSAGE_CONTEXT = { 0: decode_controls, #", "# GNU Lesser General Public License for more details. #", "the License, or # (at your option) any later version.", "elif pyasn1_version <= '0.3.4': def encodeValue(self, encodeFun, value, defMode, maxChunkSize,", "else: # Python 2 def get_byte(x): return ord(x) def get_bytes(x):", "maximum 3 bytes for length decoded = decode_sequence(message, ber_value_offset, ber_len", "(1, 5): decode_sequence, # Search result done (1, 7): decode_sequence,", "stop, context_decoders=None): decoded = [] while start < stop: octet", "False else: def encodeValue(self, value, asn1Spec, encodeFun, **options): return value", "decode_sequence(message, start, stop, EXTENDED_RESPONSE_CONTEXT) def decode_intermediate_response(message, start, stop, context_decoders=None): return", "response (1, 15): decode_sequence, # Compare response (1, 19): decode_sequence,", "Context (True, True): 3} # Private # Monkeypatching of pyasn1", "A PARTICULAR PURPOSE. See the # GNU Lesser General Public", "from pyasn1.compat.octets import ints2octs class BooleanCEREncoder(BooleanEncoder): _true = ints2octs((255,)) tagMap[Boolean.tagSet]", "import decoder # for usage in other modules from pyasn1.codec.ber.encoder", "monkey patching # a fast BER decoder for LDAP responses", "EXTENDED_RESPONSE_CONTEXT) def decode_intermediate_response(message, start, stop, context_decoders=None): return decode_sequence(message, start, stop,", "response_dict['referrals'] = None return response_dict ###### if str is not", "<NAME> # # This file is part of ldap3. #", "<< 8 | get_byte(octet) return value def decode_octet_string(message, start, stop,", "ldap_result_to_dict_fast(response): response_dict = dict() response_dict['result'] = int(response[0][3]) # resultCode response_dict['description']", "Author: <NAME> # # Copyright 2015 - 2018 <NAME> #", "decode_octet_string # SaslCredentials } EXTENDED_RESPONSE_CONTEXT = { 10: decode_octet_string, #", "get_byte(first) & 0x80 else 0 for octet in message[start: stop]:", "start, stop, EXTENDED_RESPONSE_CONTEXT) def decode_intermediate_response(message, start, stop, context_decoders=None): return decode_sequence(message,", "not, see <http://www.gnu.org/licenses/>. from pyasn1 import __version__ as pyasn1_version from", "2015 - 2018 <NAME> # # This file is part", "in result # value = decode_sequence(message, start, start + ber_len)", "compute_ber_size(get_bytes(message[start: start + 10])) start += ber_value_offset if ber_decoder: value", "2): decode_integer, # Integer (0, 4): decode_octet_string, # Octet String", "'0.3.1': def encodeValue(self, encodeFun, value, defMode, maxChunkSize): return value and", "# Response Value } INTERMEDIATE_RESPONSE_CONTEXT = { 0: decode_octet_string, #", "FOR A PARTICULAR PURPOSE. See the # GNU Lesser General", "0xFF for TRUE # THIS IS NOT PART OF THE", "elif pyasn1_version <= '0.3.7': def encodeValue(self, value, encodeFun, **options): return", "# Context (True, True): 3} # Private # Monkeypatching of", "details. # # You should have received a copy of", "**options): return value and (255,) or (0,), False, False else:", "# Delete response (1, 13): decode_sequence, # ModifyDN response (1,", "decode_sequence(message, start, stop, BIND_RESPONSE_CONTEXT) def decode_extended_response(message, start, stop, context_decoders=None): return", "octet & 0b00011111 ber_decoder = DECODERS[(ber_class, octet & 0b00011111)] if", "response_dict['referrals'] = referrals_to_list([to_unicode(referral[3], from_server=True) for referral in response[3][3]]) # referrals", "= get_byte(message[start]) ber_class = CLASSES[(bool(octet & 0b10000000), bool(octet & 0b01000000))]", "LDAPBooleanEncoder(AbstractItemEncoder): supportIndefLenMode = False if pyasn1_version <= '0.2.3': from pyasn1.compat.octets", "stop, context_decoders=None): return message[start: stop] def decode_boolean(message, start, stop, context_decoders=None):", "self._true or self._false, 0 elif pyasn1_version <= '0.3.1': def encodeValue(self,", "customTypeMap) # end of monkey patching # a fast BER", "def decode_bind_response(message, start, stop, context_decoders=None): return decode_sequence(message, start, stop, BIND_RESPONSE_CONTEXT)", "context_decoders=None): return decode_sequence(message, start, stop, INTERMEDIATE_RESPONSE_CONTEXT) def decode_controls(message, start, stop,", "terms of the GNU Lesser General Public License as published", "start, stop, INTERMEDIATE_RESPONSE_CONTEXT) def decode_controls(message, start, stop, context_decoders=None): return decode_sequence(message,", "customTypeMap[Boolean.typeId] = LDAPBooleanEncoder() encode = Encoder(customTagMap, customTypeMap) # end of", "Search result done (1, 7): decode_sequence, # Modify response (1,", "LDAPBooleanEncoder() customTypeMap[Boolean.typeId] = LDAPBooleanEncoder() encode = Encoder(customTagMap, customTypeMap) # end", "int(response[0][3]) # resultCode response_dict['description'] = RESULT_CODES[response_dict['result']] response_dict['dn'] = to_unicode(response[1][3], from_server=True)", "BER definite length - short form. Highest bit of byte", "10])) start += ber_value_offset if ber_decoder: value = ber_decoder(message, start,", "128 value_length = 0 cont = bytes_length for byte in", "# Application (True, False): 2, # Context (True, True): 3}", "len(response) == 4: response_dict['referrals'] = referrals_to_list([to_unicode(referral[3], from_server=True) for referral in", "= {(False, False): 0, # Universal (False, True): 1, #", "# Extended response (1, 25): decode_intermediate_response, # intermediate response (2,", "of pyasn1 for encoding Boolean with the value 0xFF for", "pyasn1.codec.ber.encoder import Encoder # for monkeypatching of boolean value from", "ber_value_offset, ber_len + ber_value_offset, LDAP_MESSAGE_CONTEXT) return { 'messageID': decoded[0][3], 'protocolOp':", "function for context class # except KeyError: # if ber_type", "stop, EXTENDED_RESPONSE_CONTEXT) def decode_intermediate_response(message, start, stop, context_decoders=None): return decode_sequence(message, start,", "pyasn1.codec.ber import decoder # for usage in other modules from", "in data[2: 2 + bytes_length]: cont -= 1 value_length +=", "start += ber_value_offset if ber_decoder: value = ber_decoder(message, start, start", "context_decoders=None): return decode_sequence(message, start, stop, BIND_RESPONSE_CONTEXT) def decode_extended_response(message, start, stop,", "(0, 4): decode_octet_string, # Octet String (0, 10): decode_integer, #", "ints2octs((0,)) def encodeValue(self, encodeFun, value, defMode, maxChunkSize): return value and", "defMode, maxChunkSize): return value and (255,) or (0,), False, False", "number of following octets containing the value length bytes_length =", "class BooleanCEREncoder(BooleanEncoder): _true = ints2octs((255,)) tagMap[Boolean.tagSet] = BooleanCEREncoder() else: from", "responses only def compute_ber_size(data): \"\"\" Compute size according to BER", "IntermediateResponseName 1: decode_octet_string # IntermediateResponseValue } LDAP_MESSAGE_CONTEXT = { 0:", "the GNU Lesser General Public License # along with ldap3", "& 0x80 else 0 for octet in message[start: stop]: value", "= [] while start < stop: octet = get_byte(message[start]) ber_class", "0, message length is in the last 7 bits -", "{ 'messageID': decoded[0][3], 'protocolOp': decoded[1][2], 'payload': decoded[1][3], 'controls': decoded[2][3] if", "<= '0.3.7': def encodeValue(self, value, encodeFun, **options): return value and", "pyasn1 for encoding Boolean with the value 0xFF for TRUE", "# Bind response (1, 4): decode_sequence, # Search result entry", "4): decode_octet_string, # Octet String (0, 10): decode_integer, # Enumerated", "Universal (0, 1): decode_boolean, # Boolean (0, 2): decode_integer, #", "value and self._true or self._false, 0 elif pyasn1_version <= '0.3.1':", "# Referral in result # value = decode_sequence(message, start, start", "- short form. Highest bit of byte 1 is 0,", "encodeValue(self, encodeFun, value, defMode, maxChunkSize): return value and (255,) or", "COPYING and COPYING.LESSER files. # If not, see <http://www.gnu.org/licenses/>. from", "ints2octs((255,)) _false = ints2octs((0,)) def encodeValue(self, encodeFun, value, defMode, maxChunkSize):", "OF THE FAST BER DECODER if pyasn1_version == 'xxx0.2.3': from", "###### if str is not bytes: # Python 3 def", "= BooleanCEREncoder() else: from pyasn1.codec.ber.encoder import tagMap, typeMap, AbstractItemEncoder from", "ResponseName 11: decode_octet_string # Response Value } INTERMEDIATE_RESPONSE_CONTEXT = {", "deepcopy(tagMap) customTypeMap = deepcopy(typeMap) customTagMap[Boolean.tagSet] = LDAPBooleanEncoder() customTypeMap[Boolean.typeId] = LDAPBooleanEncoder()", "to BER definite length rules Returns size of value and", "for byte in data[2: 2 + bytes_length]: cont -= 1", "# } BIND_RESPONSE_CONTEXT = { 7: decode_octet_string # SaslCredentials }", "for LDAP responses only def compute_ber_size(data): \"\"\" Compute size according", "} INTERMEDIATE_RESPONSE_CONTEXT = { 0: decode_octet_string, # IntermediateResponseName 1: decode_octet_string", "length bytes_length = data[1] - 128 value_length = 0 cont", "as pyasn1_version from pyasn1.codec.ber import decoder # for usage in", "10: decode_octet_string, # ResponseName 11: decode_octet_string # Response Value }", "encodeFun, value, defMode, maxChunkSize): return value and (255,) or (0,),", "from pyasn1.codec.ber.encoder import tagMap, typeMap, AbstractItemEncoder from pyasn1.type.univ import Boolean", "and (255,) or (0,), False, False customTagMap = deepcopy(tagMap) customTypeMap", "return x else: # Python 2 def get_byte(x): return ord(x)", "(0, 10): decode_integer, # Enumerated (0, 16): decode_sequence, # Sequence", "decode_sequence # Referral } CONTROLS_CONTEXT = { 0: decode_sequence #", "that it will be useful, # but WITHOUT ANY WARRANTY;", "copy of the GNU Lesser General Public License # along", "0, # Universal (False, True): 1, # Application (True, False):", "{ 0: decode_octet_string, # IntermediateResponseName 1: decode_octet_string # IntermediateResponseValue }", "value, encodeFun, **options): return value and (255,) or (0,), False,", "False, False elif pyasn1_version <= '0.3.7': def encodeValue(self, value, encodeFun,", "import tagMap, typeMap, AbstractItemEncoder from pyasn1.type.univ import Boolean from copy", "either version 3 of the License, or # (at your", "return value and (255,) or (0,), False, False elif pyasn1_version", "defMode, maxChunkSize): return value and self._true or self._false, 0 elif", "def decode_message_fast(message): ber_len, ber_value_offset = compute_ber_size(get_bytes(message[:10])) # get start of", "4): decode_sequence, # Search result entry (1, 5): decode_sequence, #", "context_decoders=None): return decode_sequence(message, start, stop, CONTROLS_CONTEXT) def ldap_result_to_dict_fast(response): response_dict =", "# # ldap3 is distributed in the hope that it", "if str is not bytes: # Python 3 def get_byte(x):", "more details. # # You should have received a copy", "to_unicode(response[2][3], from_server=True) # diagnosticMessage if len(response) == 4: response_dict['referrals'] =", "= LDAPBooleanEncoder() encode = Encoder(customTagMap, customTypeMap) # end of monkey", "bit of byte 1 is 1, last 7 bits counts", "be useful, # but WITHOUT ANY WARRANTY; without even the", "LDAP responses only def compute_ber_size(data): \"\"\" Compute size according to", "# Python 2 def get_byte(x): return ord(x) def get_bytes(x): return", "rules Returns size of value and value offset \"\"\" if", "decoded[0][3], 'protocolOp': decoded[1][2], 'payload': decoded[1][3], 'controls': decoded[2][3] if len(decoded) ==", "Created on 2015.08.19 # # Author: <NAME> # # Copyright", "See the # GNU Lesser General Public License for more", "ber_decoder = DECODERS[(ber_class, octet & 0b00011111)] if ber_class < 2", "True def decode_bind_response(message, start, stop, context_decoders=None): return decode_sequence(message, start, stop,", "of boolean value from ..core.results import RESULT_CODES from ..utils.conv import", "from pyasn1.type.univ import Boolean from pyasn1.compat.octets import ints2octs class BooleanCEREncoder(BooleanEncoder):", "# call value decode function else: # try: value =", "value = context_decoders[ber_type](message, start, start + ber_len) # call value", "# # Copyright 2015 - 2018 <NAME> # # This", "Public License as published # by the Free Software Foundation,", "-1 if get_byte(first) & 0x80 else 0 for octet in", "for context class # except KeyError: # if ber_type ==", "asn1Spec, encodeFun, **options): return value and (255,) or (0,), False,", "data[2: 2 + bytes_length]: cont -= 1 value_length += byte", "decode_sequence, # Add response (1, 11): decode_sequence, # Delete response", "octets containing the value length bytes_length = data[1] - 128", "return bytearray(x) DECODERS = { # Universal (0, 1): decode_boolean,", "# IntermediateResponseValue } LDAP_MESSAGE_CONTEXT = { 0: decode_controls, # Controls", "Encoder(customTagMap, customTypeMap) # end of monkey patching # a fast", "should have received a copy of the GNU Lesser General", "= False if pyasn1_version <= '0.2.3': from pyasn1.compat.octets import ints2octs", "+ ber_value_offset, LDAP_MESSAGE_CONTEXT) return { 'messageID': decoded[0][3], 'protocolOp': decoded[1][2], 'payload':", "stop] == 0 else True def decode_bind_response(message, start, stop, context_decoders=None):", "ber_value_offset, LDAP_MESSAGE_CONTEXT) return { 'messageID': decoded[0][3], 'protocolOp': decoded[1][2], 'payload': decoded[1][3],", "happen decoded.append((ber_class, ber_constructed, ber_type, value)) start += ber_len return decoded", "15): decode_sequence, # Compare response (1, 19): decode_sequence, # Search", "decoded = decode_sequence(message, ber_value_offset, ber_len + ber_value_offset, LDAP_MESSAGE_CONTEXT) return {", "<= '0.3.1': def encodeValue(self, encodeFun, value, defMode, maxChunkSize): return value", "decoded.append((ber_class, ber_constructed, ber_type, value)) start += ber_len return decoded def", "response_dict = dict() response_dict['result'] = int(response[0][3]) # resultCode response_dict['description'] =", "Boolean from pyasn1.compat.octets import ints2octs class BooleanCEREncoder(BooleanEncoder): _true = ints2octs((255,))", "a fast BER decoder for LDAP responses only def compute_ber_size(data):", "# except KeyError: # if ber_type == 3: # Referral", "(0,), False, False else: def encodeValue(self, value, asn1Spec, encodeFun, **options):", "or (0,), False, False customTagMap = deepcopy(tagMap) customTypeMap = deepcopy(typeMap)", "# BER definite length - long form. Highest bit of", "stop, BIND_RESPONSE_CONTEXT) def decode_extended_response(message, start, stop, context_decoders=None): return decode_sequence(message, start,", "in the last 7 bits - Value can be up", "+ bytes_length]: cont -= 1 value_length += byte * (256", "pyasn1.type.univ import Boolean from pyasn1.compat.octets import ints2octs class BooleanCEREncoder(BooleanEncoder): _true", "decoded[1][2], 'payload': decoded[1][3], 'controls': decoded[2][3] if len(decoded) == 3 else", "Integer (0, 4): decode_octet_string, # Octet String (0, 10): decode_integer,", "return message[start: stop] def decode_boolean(message, start, stop, context_decoders=None): return False", "decoded[1][3], 'controls': decoded[2][3] if len(decoded) == 3 else None }", "message[start: stop] == 0 else True def decode_bind_response(message, start, stop,", "ber_decoder(message, start, start + ber_len, context_decoders) # call value decode", "import __version__ as pyasn1_version from pyasn1.codec.ber import decoder # for", "decode_sequence, # Sequence (0, 17): decode_sequence, # Set # Application", "from pyasn1.codec.ber.encoder import tagMap, BooleanEncoder, encode from pyasn1.type.univ import Boolean", "= { 0: decode_controls, # Controls 3: decode_sequence # Referral", "# but WITHOUT ANY WARRANTY; without even the implied warranty", "Free Software Foundation, either version 3 of the License, or", "def get_byte(x): return x def get_bytes(x): return x else: #", "decode_bind_response, # Bind response (1, 4): decode_sequence, # Search result", "or (0,), False, False elif pyasn1_version <= '0.3.4': def encodeValue(self,", "value, defMode, maxChunkSize, ifNotEmpty=False): return value and (255,) or (0,),", "def get_bytes(x): return bytearray(x) DECODERS = { # Universal (0,", "{ 0: decode_controls, # Controls 3: decode_sequence # Referral }", "and/or modify # it under the terms of the GNU", "implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR", "= decode_sequence(message, start, start + ber_len) # else: # raise", "ber_len return decoded def decode_integer(message, start, stop, context_decoders=None): first =", "BIND_RESPONSE_CONTEXT = { 7: decode_octet_string # SaslCredentials } EXTENDED_RESPONSE_CONTEXT =", "# get start of sequence, at maximum 3 bytes for", "= deepcopy(typeMap) customTagMap[Boolean.tagSet] = LDAPBooleanEncoder() customTypeMap[Boolean.typeId] = LDAPBooleanEncoder() encode =", "= ints2octs((255,)) _false = ints2octs((0,)) def encodeValue(self, encodeFun, value, defMode,", "# Boolean (0, 2): decode_integer, # Integer (0, 4): decode_octet_string,", "pyasn1_version from pyasn1.codec.ber import decoder # for usage in other", "decode_octet_string # Response Value } INTERMEDIATE_RESPONSE_CONTEXT = { 0: decode_octet_string,", "..utils.conv import to_unicode from ..protocol.convert import referrals_to_list CLASSES = {(False,", "octet & 0b00011111)] if ber_class < 2 else None ber_len,", "pyasn1_version <= '0.2.3': from pyasn1.compat.octets import ints2octs _true = ints2octs((255,))", "ber_constructed, ber_type, value)) start += ber_len return decoded def decode_integer(message,", "# (at your option) any later version. # # ldap3", "decode_octet_string, # Octet String (0, 10): decode_integer, # Enumerated (0,", "LDAPBooleanEncoder() encode = Encoder(customTagMap, customTypeMap) # end of monkey patching", "decode_sequence, # ModifyDN response (1, 15): decode_sequence, # Compare response", "it and/or modify # it under the terms of the", "the number of following octets containing the value length bytes_length", "bool(octet & 0b01000000))] ber_constructed = bool(octet & 0b00100000) ber_type =", "= decode_sequence(message, ber_value_offset, ber_len + ber_value_offset, LDAP_MESSAGE_CONTEXT) return { 'messageID':", "decode_octet_string, # ResponseName 11: decode_octet_string # Response Value } INTERMEDIATE_RESPONSE_CONTEXT", "Copyright 2015 - 2018 <NAME> # # This file is", "under the terms of the GNU Lesser General Public License", "value_length = 0 cont = bytes_length for byte in data[2:", "BooleanCEREncoder(BooleanEncoder): _true = ints2octs((255,)) tagMap[Boolean.tagSet] = BooleanCEREncoder() else: from pyasn1.codec.ber.encoder", "it will be useful, # but WITHOUT ANY WARRANTY; without", "or (0,), False, False elif pyasn1_version <= '0.3.7': def encodeValue(self,", "return decode_sequence(message, start, stop, INTERMEDIATE_RESPONSE_CONTEXT) def decode_controls(message, start, stop, context_decoders=None):", "= { 10: decode_octet_string, # ResponseName 11: decode_octet_string # Response", "13): decode_sequence, # ModifyDN response (1, 15): decode_sequence, # Compare", "False if message[start: stop] == 0 else True def decode_bind_response(message,", "# Universal (0, 1): decode_boolean, # Boolean (0, 2): decode_integer,", "Sequence (0, 17): decode_sequence, # Set # Application (1, 1):", "encode from pyasn1.type.univ import Boolean from pyasn1.compat.octets import ints2octs class", "start, stop, context_decoders=None): first = message[start] value = -1 if", "# end of monkey patching # a fast BER decoder", "get start of sequence, at maximum 3 bytes for length", "context_decoders) # call value decode function else: # try: value", "self._false, 0 elif pyasn1_version <= '0.3.1': def encodeValue(self, encodeFun, value,", "BER DECODER if pyasn1_version == 'xxx0.2.3': from pyasn1.codec.ber.encoder import tagMap,", "not bytes: # Python 3 def get_byte(x): return x def", "SaslCredentials } EXTENDED_RESPONSE_CONTEXT = { 10: decode_octet_string, # ResponseName 11:", "decode_octet_string(message, start, stop, context_decoders=None): return message[start: stop] def decode_boolean(message, start,", "FAST BER DECODER if pyasn1_version == 'xxx0.2.3': from pyasn1.codec.ber.encoder import", "value def decode_octet_string(message, start, stop, context_decoders=None): return message[start: stop] def", "message length is in the last 7 bits - Value", "of the License, or # (at your option) any later", "version. # # ldap3 is distributed in the hope that", "hope that it will be useful, # but WITHOUT ANY", "== 3: # Referral in result # value = decode_sequence(message,", "decode_sequence(message, start, stop, INTERMEDIATE_RESPONSE_CONTEXT) def decode_controls(message, start, stop, context_decoders=None): return", "size of value and value offset \"\"\" if data[1] <=", "Boolean with the value 0xFF for TRUE # THIS IS", "reference (1, 24): decode_extended_response, # Extended response (1, 25): decode_intermediate_response,", "octet in message[start: stop]: value = value << 8 |", "re-raise, should never happen decoded.append((ber_class, ber_constructed, ber_type, value)) start +=", "= dict() response_dict['result'] = int(response[0][3]) # resultCode response_dict['description'] = RESULT_CODES[response_dict['result']]", "# Author: <NAME> # # Copyright 2015 - 2018 <NAME>", "ifNotEmpty=False): return value and (255,) or (0,), False, False elif", "# Modify response (1, 9): decode_sequence, # Add response (1,", "response (1, 19): decode_sequence, # Search result reference (1, 24):", "ber_value_offset = compute_ber_size(get_bytes(message[start: start + 10])) start += ber_value_offset if", "from_server=True) for referral in response[3][3]]) # referrals else: response_dict['referrals'] =", "True): 3} # Private # Monkeypatching of pyasn1 for encoding", "# Search result done (1, 7): decode_sequence, # Modify response", "def encodeValue(self, encodeFun, value, defMode, maxChunkSize, ifNotEmpty=False): return value and", "encodeFun, value, defMode, maxChunkSize): return value and self._true or self._false,", "INTERMEDIATE_RESPONSE_CONTEXT = { 0: decode_octet_string, # IntermediateResponseName 1: decode_octet_string #", "decode_controls(message, start, stop, context_decoders=None): return decode_sequence(message, start, stop, CONTROLS_CONTEXT) def", "response_dict['dn'] = to_unicode(response[1][3], from_server=True) # matchedDN response_dict['message'] = to_unicode(response[2][3], from_server=True)", "(1, 11): decode_sequence, # Delete response (1, 13): decode_sequence, #", "& 0b00100000) ber_type = octet & 0b00011111 ber_decoder = DECODERS[(ber_class,", "25): decode_intermediate_response, # intermediate response (2, 3): decode_octet_string # }", "import RESULT_CODES from ..utils.conv import to_unicode from ..protocol.convert import referrals_to_list", "value length bytes_length = data[1] - 128 value_length = 0", "0 elif pyasn1_version <= '0.3.1': def encodeValue(self, encodeFun, value, defMode,", "(255,) or (0,), False, False elif pyasn1_version <= '0.3.7': def", "decode_integer, # Enumerated (0, 16): decode_sequence, # Sequence (0, 17):", "ModifyDN response (1, 15): decode_sequence, # Compare response (1, 19):", "context_decoders=None): return decode_sequence(message, start, stop, EXTENDED_RESPONSE_CONTEXT) def decode_intermediate_response(message, start, stop,", "Python 2 def get_byte(x): return ord(x) def get_bytes(x): return bytearray(x)", "in message[start: stop]: value = value << 8 | get_byte(octet)", "value_length += byte * (256 ** cont) return value_length, bytes_length", "# Compare response (1, 19): decode_sequence, # Search result reference", "will be useful, # but WITHOUT ANY WARRANTY; without even", "PARTICULAR PURPOSE. See the # GNU Lesser General Public License", "bytes_length + 2 def decode_message_fast(message): ber_len, ber_value_offset = compute_ber_size(get_bytes(message[:10])) #", "class LDAPBooleanEncoder(AbstractItemEncoder): supportIndefLenMode = False if pyasn1_version <= '0.2.3': from", "+= ber_value_offset if ber_decoder: value = ber_decoder(message, start, start +", "(1, 25): decode_intermediate_response, # intermediate response (2, 3): decode_octet_string #", "< 2 else None ber_len, ber_value_offset = compute_ber_size(get_bytes(message[start: start +", "start, start + ber_len) # else: # raise # re-raise,", "definite length - short form. Highest bit of byte 1", "with the value 0xFF for TRUE # THIS IS NOT", "the value length bytes_length = data[1] - 128 value_length =", "decode function for context class # except KeyError: # if", "CLASSES[(bool(octet & 0b10000000), bool(octet & 0b01000000))] ber_constructed = bool(octet &", "length - short form. Highest bit of byte 1 is", "size according to BER definite length rules Returns size of", "decode_integer, # Integer (0, 4): decode_octet_string, # Octet String (0,", "stop] def decode_boolean(message, start, stop, context_decoders=None): return False if message[start:", "ber_len) # call value decode function for context class #", "return value_length, bytes_length + 2 def decode_message_fast(message): ber_len, ber_value_offset =", "# ResponseName 11: decode_octet_string # Response Value } INTERMEDIATE_RESPONSE_CONTEXT =", "7): decode_sequence, # Modify response (1, 9): decode_sequence, # Add", "Octet String (0, 10): decode_integer, # Enumerated (0, 16): decode_sequence,", "the GNU Lesser General Public License as published # by", "== 3 else None } def decode_sequence(message, start, stop, context_decoders=None):", "value, asn1Spec, encodeFun, **options): return value and (255,) or (0,),", "# # Author: <NAME> # # Copyright 2015 - 2018", "form. Highest bit of byte 1 is 0, message length", "decode function else: # try: value = context_decoders[ber_type](message, start, start", "raise # re-raise, should never happen decoded.append((ber_class, ber_constructed, ber_type, value))", "127: # BER definite length - short form. Highest bit", "and (255,) or (0,), False, False else: def encodeValue(self, value,", "ber_constructed = bool(octet & 0b00100000) ber_type = octet & 0b00011111", "# re-raise, should never happen decoded.append((ber_class, ber_constructed, ber_type, value)) start", "False customTagMap = deepcopy(tagMap) customTypeMap = deepcopy(typeMap) customTagMap[Boolean.tagSet] = LDAPBooleanEncoder()", "EXTENDED_RESPONSE_CONTEXT = { 10: decode_octet_string, # ResponseName 11: decode_octet_string #", "response_dict['result'] = int(response[0][3]) # resultCode response_dict['description'] = RESULT_CODES[response_dict['result']] response_dict['dn'] =", "else: # raise # re-raise, should never happen decoded.append((ber_class, ber_constructed,", "# IntermediateResponseName 1: decode_octet_string # IntermediateResponseValue } LDAP_MESSAGE_CONTEXT = {", "ldap3 is distributed in the hope that it will be", "pyasn1.codec.ber.encoder import tagMap, typeMap, AbstractItemEncoder from pyasn1.type.univ import Boolean from", "- 128 value_length = 0 cont = bytes_length for byte", "tagMap, BooleanEncoder, encode from pyasn1.type.univ import Boolean from pyasn1.compat.octets import", "return value and self._true or self._false, 0 elif pyasn1_version <=", "if len(response) == 4: response_dict['referrals'] = referrals_to_list([to_unicode(referral[3], from_server=True) for referral", "maxChunkSize): return value and self._true or self._false, 0 elif pyasn1_version", "(at your option) any later version. # # ldap3 is", "+= ber_len return decoded def decode_integer(message, start, stop, context_decoders=None): first", "bytes_length]: cont -= 1 value_length += byte * (256 **", "maxChunkSize, ifNotEmpty=False): return value and (255,) or (0,), False, False", "for more details. # # You should have received a", "pyasn1.compat.octets import ints2octs _true = ints2octs((255,)) _false = ints2octs((0,)) def", "else: response_dict['referrals'] = None return response_dict ###### if str is", "(1, 9): decode_sequence, # Add response (1, 11): decode_sequence, #", "redistribute it and/or modify # it under the terms of", "} LDAP_MESSAGE_CONTEXT = { 0: decode_controls, # Controls 3: decode_sequence", "get_byte(x): return x def get_bytes(x): return x else: # Python", "(1, 24): decode_extended_response, # Extended response (1, 25): decode_intermediate_response, #", "= { 0: decode_octet_string, # IntermediateResponseName 1: decode_octet_string # IntermediateResponseValue", "Compute size according to BER definite length rules Returns size", "else: # try: value = context_decoders[ber_type](message, start, start + ber_len)", "the last 7 bits - Value can be up to", "return decode_sequence(message, start, stop, CONTROLS_CONTEXT) def ldap_result_to_dict_fast(response): response_dict = dict()", "with ldap3 in the COPYING and COPYING.LESSER files. # If", "-= 1 value_length += byte * (256 ** cont) return", "def decode_intermediate_response(message, start, stop, context_decoders=None): return decode_sequence(message, start, stop, INTERMEDIATE_RESPONSE_CONTEXT)", "= to_unicode(response[2][3], from_server=True) # diagnosticMessage if len(response) == 4: response_dict['referrals']", "Public License # along with ldap3 in the COPYING and", "# raise # re-raise, should never happen decoded.append((ber_class, ber_constructed, ber_type,", "cont = bytes_length for byte in data[2: 2 + bytes_length]:", "decode_boolean(message, start, stop, context_decoders=None): return False if message[start: stop] ==", "# Search result reference (1, 24): decode_extended_response, # Extended response", "context class # except KeyError: # if ber_type == 3:", "# intermediate response (2, 3): decode_octet_string # } BIND_RESPONSE_CONTEXT =", "and COPYING.LESSER files. # If not, see <http://www.gnu.org/licenses/>. from pyasn1", "in response[3][3]]) # referrals else: response_dict['referrals'] = None return response_dict", "# Monkeypatching of pyasn1 for encoding Boolean with the value", "LDAP_MESSAGE_CONTEXT) return { 'messageID': decoded[0][3], 'protocolOp': decoded[1][2], 'payload': decoded[1][3], 'controls':", "def decode_controls(message, start, stop, context_decoders=None): return decode_sequence(message, start, stop, CONTROLS_CONTEXT)", "(1, 15): decode_sequence, # Compare response (1, 19): decode_sequence, #", "start, stop, context_decoders=None): return False if message[start: stop] == 0", "along with ldap3 in the COPYING and COPYING.LESSER files. #", "ber_type, value)) start += ber_len return decoded def decode_integer(message, start,", "| get_byte(octet) return value def decode_octet_string(message, start, stop, context_decoders=None): return", "4: response_dict['referrals'] = referrals_to_list([to_unicode(referral[3], from_server=True) for referral in response[3][3]]) #", "on 2015.08.19 # # Author: <NAME> # # Copyright 2015", "data[1] <= 127: # BER definite length - short form.", "return response_dict ###### if str is not bytes: # Python", "ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY", "decoded[2][3] if len(decoded) == 3 else None } def decode_sequence(message,", "If not, see <http://www.gnu.org/licenses/>. from pyasn1 import __version__ as pyasn1_version", "def encodeValue(self, value, encodeFun, **options): return value and (255,) or", "byte in data[2: 2 + bytes_length]: cont -= 1 value_length", "# Add response (1, 11): decode_sequence, # Delete response (1,", "RESULT_CODES from ..utils.conv import to_unicode from ..protocol.convert import referrals_to_list CLASSES", "2 else: # BER definite length - long form. Highest", "& 0b00011111)] if ber_class < 2 else None ber_len, ber_value_offset", "context_decoders=None): first = message[start] value = -1 if get_byte(first) &", "from pyasn1 import __version__ as pyasn1_version from pyasn1.codec.ber import decoder", "from copy import deepcopy class LDAPBooleanEncoder(AbstractItemEncoder): supportIndefLenMode = False if", "PART OF THE FAST BER DECODER if pyasn1_version == 'xxx0.2.3':", "customTagMap = deepcopy(tagMap) customTypeMap = deepcopy(typeMap) customTagMap[Boolean.tagSet] = LDAPBooleanEncoder() customTypeMap[Boolean.typeId]", "10): decode_integer, # Enumerated (0, 16): decode_sequence, # Sequence (0,", "# Private # Monkeypatching of pyasn1 for encoding Boolean with", "= ints2octs((255,)) tagMap[Boolean.tagSet] = BooleanCEREncoder() else: from pyasn1.codec.ber.encoder import tagMap,", "Returns size of value and value offset \"\"\" if data[1]", "start, stop, CONTROLS_CONTEXT) def ldap_result_to_dict_fast(response): response_dict = dict() response_dict['result'] =", "General Public License as published # by the Free Software", "the hope that it will be useful, # but WITHOUT", "<http://www.gnu.org/licenses/>. from pyasn1 import __version__ as pyasn1_version from pyasn1.codec.ber import", "your option) any later version. # # ldap3 is distributed", "see <http://www.gnu.org/licenses/>. from pyasn1 import __version__ as pyasn1_version from pyasn1.codec.ber", "1 is 1, last 7 bits counts the number of", "0b10000000), bool(octet & 0b01000000))] ber_constructed = bool(octet & 0b00100000) ber_type", "diagnosticMessage if len(response) == 4: response_dict['referrals'] = referrals_to_list([to_unicode(referral[3], from_server=True) for", "decode_sequence(message, start, start + ber_len) # else: # raise #", "PURPOSE. See the # GNU Lesser General Public License for", "def get_bytes(x): return x else: # Python 2 def get_byte(x):", "= { 7: decode_octet_string # SaslCredentials } EXTENDED_RESPONSE_CONTEXT = {", "decode_sequence, # Set # Application (1, 1): decode_bind_response, # Bind", "software: you can redistribute it and/or modify # it under", "start < stop: octet = get_byte(message[start]) ber_class = CLASSES[(bool(octet &", "or (0,), False, False else: def encodeValue(self, value, asn1Spec, encodeFun,", "# Integer (0, 4): decode_octet_string, # Octet String (0, 10):", "part of ldap3. # # ldap3 is free software: you", "False, False customTagMap = deepcopy(tagMap) customTypeMap = deepcopy(typeMap) customTagMap[Boolean.tagSet] =", "- Value can be up to 127 bytes long return", "0: decode_controls, # Controls 3: decode_sequence # Referral } CONTROLS_CONTEXT", "decode_sequence, # Search result reference (1, 24): decode_extended_response, # Extended", "of the GNU Lesser General Public License as published #", "referrals else: response_dict['referrals'] = None return response_dict ###### if str", "start of sequence, at maximum 3 bytes for length decoded", "'payload': decoded[1][3], 'controls': decoded[2][3] if len(decoded) == 3 else None", "for encoding Boolean with the value 0xFF for TRUE #", "Application (1, 1): decode_bind_response, # Bind response (1, 4): decode_sequence,", "boolean value from ..core.results import RESULT_CODES from ..utils.conv import to_unicode", "message[start: stop]: value = value << 8 | get_byte(octet) return", "response (1, 4): decode_sequence, # Search result entry (1, 5):", "published # by the Free Software Foundation, either version 3", "according to BER definite length rules Returns size of value", "pyasn1.type.univ import Boolean from copy import deepcopy class LDAPBooleanEncoder(AbstractItemEncoder): supportIndefLenMode", "value decode function for context class # except KeyError: #", "value = value << 8 | get_byte(octet) return value def", "== 4: response_dict['referrals'] = referrals_to_list([to_unicode(referral[3], from_server=True) for referral in response[3][3]])", "bytes_length = data[1] - 128 value_length = 0 cont =", "INTERMEDIATE_RESPONSE_CONTEXT) def decode_controls(message, start, stop, context_decoders=None): return decode_sequence(message, start, stop,", "'messageID': decoded[0][3], 'protocolOp': decoded[1][2], 'payload': decoded[1][3], 'controls': decoded[2][3] if len(decoded)", "bytes for length decoded = decode_sequence(message, ber_value_offset, ber_len + ber_value_offset,", "if ber_class < 2 else None ber_len, ber_value_offset = compute_ber_size(get_bytes(message[start:", "if get_byte(first) & 0x80 else 0 for octet in message[start:", "byte 1 is 0, message length is in the last", "length is in the last 7 bits - Value can", "You should have received a copy of the GNU Lesser", "of byte 1 is 1, last 7 bits counts the", "# try: value = context_decoders[ber_type](message, start, start + ber_len) #", "else True def decode_bind_response(message, start, stop, context_decoders=None): return decode_sequence(message, start,", "0 cont = bytes_length for byte in data[2: 2 +", "[] while start < stop: octet = get_byte(message[start]) ber_class =", "# ModifyDN response (1, 15): decode_sequence, # Compare response (1,", "# SaslCredentials } EXTENDED_RESPONSE_CONTEXT = { 10: decode_octet_string, # ResponseName", "\"\"\" # Created on 2015.08.19 # # Author: <NAME> #", "17): decode_sequence, # Set # Application (1, 1): decode_bind_response, #", "def encodeValue(self, encodeFun, value, defMode, maxChunkSize): return value and self._true", "long form. Highest bit of byte 1 is 1, last", "and self._true or self._false, 0 elif pyasn1_version <= '0.3.1': def", "start + 10])) start += ber_value_offset if ber_decoder: value =", "else 0 for octet in message[start: stop]: value = value", "1, # Application (True, False): 2, # Context (True, True):", "response (2, 3): decode_octet_string # } BIND_RESPONSE_CONTEXT = { 7:", "= { # Universal (0, 1): decode_boolean, # Boolean (0,", "Referral } CONTROLS_CONTEXT = { 0: decode_sequence # Control }", "9): decode_sequence, # Add response (1, 11): decode_sequence, # Delete", "ber_len, context_decoders) # call value decode function else: # try:", "= octet & 0b00011111 ber_decoder = DECODERS[(ber_class, octet & 0b00011111)]", "form. Highest bit of byte 1 is 1, last 7", "value offset \"\"\" if data[1] <= 127: # BER definite", "# Search result entry (1, 5): decode_sequence, # Search result", "start, stop, context_decoders=None): decoded = [] while start < stop:", "distributed in the hope that it will be useful, #", "None ber_len, ber_value_offset = compute_ber_size(get_bytes(message[start: start + 10])) start +=", "+ ber_len) # call value decode function for context class", "License, or # (at your option) any later version. #", "patching # a fast BER decoder for LDAP responses only", "= message[start] value = -1 if get_byte(first) & 0x80 else", "Application (True, False): 2, # Context (True, True): 3} #", "value 0xFF for TRUE # THIS IS NOT PART OF", "Boolean from copy import deepcopy class LDAPBooleanEncoder(AbstractItemEncoder): supportIndefLenMode = False", "+ 10])) start += ber_value_offset if ber_decoder: value = ber_decoder(message,", "context_decoders[ber_type](message, start, start + ber_len) # call value decode function", "ber_type = octet & 0b00011111 ber_decoder = DECODERS[(ber_class, octet &", "result # value = decode_sequence(message, start, start + ber_len) #", "3} # Private # Monkeypatching of pyasn1 for encoding Boolean", "first = message[start] value = -1 if get_byte(first) & 0x80", "ber_type == 3: # Referral in result # value =", "start + ber_len) # else: # raise # re-raise, should", "start, stop, context_decoders=None): return message[start: stop] def decode_boolean(message, start, stop,", "later version. # # ldap3 is distributed in the hope", "import ints2octs _true = ints2octs((255,)) _false = ints2octs((0,)) def encodeValue(self,", "and (255,) or (0,), False, False elif pyasn1_version <= '0.3.4':", "\"\"\" Compute size according to BER definite length rules Returns", "'protocolOp': decoded[1][2], 'payload': decoded[1][3], 'controls': decoded[2][3] if len(decoded) == 3", "start, stop, context_decoders=None): return decode_sequence(message, start, stop, EXTENDED_RESPONSE_CONTEXT) def decode_intermediate_response(message,", "License as published # by the Free Software Foundation, either", "def compute_ber_size(data): \"\"\" Compute size according to BER definite length", "** cont) return value_length, bytes_length + 2 def decode_message_fast(message): ber_len,", "None } def decode_sequence(message, start, stop, context_decoders=None): decoded = []", "# value = decode_sequence(message, start, start + ber_len) # else:", "encodeValue(self, encodeFun, value, defMode, maxChunkSize): return value and self._true or", "if pyasn1_version == 'xxx0.2.3': from pyasn1.codec.ber.encoder import tagMap, BooleanEncoder, encode", "return decode_sequence(message, start, stop, BIND_RESPONSE_CONTEXT) def decode_extended_response(message, start, stop, context_decoders=None):", "Value } INTERMEDIATE_RESPONSE_CONTEXT = { 0: decode_octet_string, # IntermediateResponseName 1:", "pyasn1.codec.ber.encoder import tagMap, BooleanEncoder, encode from pyasn1.type.univ import Boolean from", "context_decoders=None): decoded = [] while start < stop: octet =", "useful, # but WITHOUT ANY WARRANTY; without even the implied", "Enumerated (0, 16): decode_sequence, # Sequence (0, 17): decode_sequence, #", "False, False elif pyasn1_version <= '0.3.4': def encodeValue(self, encodeFun, value,", "decode_boolean, # Boolean (0, 2): decode_integer, # Integer (0, 4):", "None return response_dict ###### if str is not bytes: #", "return decode_sequence(message, start, stop, EXTENDED_RESPONSE_CONTEXT) def decode_intermediate_response(message, start, stop, context_decoders=None):", "you can redistribute it and/or modify # it under the", "# Sequence (0, 17): decode_sequence, # Set # Application (1,", "_false = ints2octs((0,)) def encodeValue(self, encodeFun, value, defMode, maxChunkSize): return", "KeyError: # if ber_type == 3: # Referral in result", "entry (1, 5): decode_sequence, # Search result done (1, 7):", "if len(decoded) == 3 else None } def decode_sequence(message, start,", "decoded def decode_integer(message, start, stop, context_decoders=None): first = message[start] value", "Lesser General Public License as published # by the Free", "files. # If not, see <http://www.gnu.org/licenses/>. from pyasn1 import __version__", "# You should have received a copy of the GNU", "decode_extended_response(message, start, stop, context_decoders=None): return decode_sequence(message, start, stop, EXTENDED_RESPONSE_CONTEXT) def", "while start < stop: octet = get_byte(message[start]) ber_class = CLASSES[(bool(octet", "= 0 cont = bytes_length for byte in data[2: 2", "import ints2octs class BooleanCEREncoder(BooleanEncoder): _true = ints2octs((255,)) tagMap[Boolean.tagSet] = BooleanCEREncoder()", "the terms of the GNU Lesser General Public License as", "try: value = context_decoders[ber_type](message, start, start + ber_len) # call", "Lesser General Public License for more details. # # You", "<NAME> # # Copyright 2015 - 2018 <NAME> # #", "get_byte(octet) return value def decode_octet_string(message, start, stop, context_decoders=None): return message[start:", "message[start: stop] def decode_boolean(message, start, stop, context_decoders=None): return False if", "decode_intermediate_response, # intermediate response (2, 3): decode_octet_string # } BIND_RESPONSE_CONTEXT", "get_bytes(x): return bytearray(x) DECODERS = { # Universal (0, 1):", "== 'xxx0.2.3': from pyasn1.codec.ber.encoder import tagMap, BooleanEncoder, encode from pyasn1.type.univ", "= data[1] - 128 value_length = 0 cont = bytes_length", "from pyasn1.type.univ import Boolean from copy import deepcopy class LDAPBooleanEncoder(AbstractItemEncoder):", "(255,) or (0,), False, False customTagMap = deepcopy(tagMap) customTypeMap =", "BER decoder for LDAP responses only def compute_ber_size(data): \"\"\" Compute", "# diagnosticMessage if len(response) == 4: response_dict['referrals'] = referrals_to_list([to_unicode(referral[3], from_server=True)", "for octet in message[start: stop]: value = value << 8", "= None return response_dict ###### if str is not bytes:", "get_bytes(x): return x else: # Python 2 def get_byte(x): return", "(False, True): 1, # Application (True, False): 2, # Context", "THE FAST BER DECODER if pyasn1_version == 'xxx0.2.3': from pyasn1.codec.ber.encoder", "DECODERS = { # Universal (0, 1): decode_boolean, # Boolean", "except KeyError: # if ber_type == 3: # Referral in", "is 1, last 7 bits counts the number of following", "return ord(x) def get_bytes(x): return bytearray(x) DECODERS = { #", "Value can be up to 127 bytes long return data[1],", "stop, CONTROLS_CONTEXT) def ldap_result_to_dict_fast(response): response_dict = dict() response_dict['result'] = int(response[0][3])", "# matchedDN response_dict['message'] = to_unicode(response[2][3], from_server=True) # diagnosticMessage if len(response)", "definite length rules Returns size of value and value offset", "& 0b01000000))] ber_constructed = bool(octet & 0b00100000) ber_type = octet", "value_length, bytes_length + 2 def decode_message_fast(message): ber_len, ber_value_offset = compute_ber_size(get_bytes(message[:10]))", "2 def decode_message_fast(message): ber_len, ber_value_offset = compute_ber_size(get_bytes(message[:10])) # get start", "(0,), False, False elif pyasn1_version <= '0.3.7': def encodeValue(self, value,", "value = -1 if get_byte(first) & 0x80 else 0 for", "3: # Referral in result # value = decode_sequence(message, start,", "Set # Application (1, 1): decode_bind_response, # Bind response (1,", "start, stop, BIND_RESPONSE_CONTEXT) def decode_extended_response(message, start, stop, context_decoders=None): return decode_sequence(message,", "a copy of the GNU Lesser General Public License #", "Boolean (0, 2): decode_integer, # Integer (0, 4): decode_octet_string, #", "decode_sequence, # Search result entry (1, 5): decode_sequence, # Search", "pyasn1_version == 'xxx0.2.3': from pyasn1.codec.ber.encoder import tagMap, BooleanEncoder, encode from", "DECODERS[(ber_class, octet & 0b00011111)] if ber_class < 2 else None", "if ber_type == 3: # Referral in result # value", "+ ber_len) # else: # raise # re-raise, should never", "= value << 8 | get_byte(octet) return value def decode_octet_string(message,", "3 else None } def decode_sequence(message, start, stop, context_decoders=None): decoded", "& 0b00011111 ber_decoder = DECODERS[(ber_class, octet & 0b00011111)] if ber_class", "\"\"\" \"\"\" # Created on 2015.08.19 # # Author: <NAME>", "{(False, False): 0, # Universal (False, True): 1, # Application", "to_unicode from ..protocol.convert import referrals_to_list CLASSES = {(False, False): 0,", "the value 0xFF for TRUE # THIS IS NOT PART", "pyasn1.compat.octets import ints2octs class BooleanCEREncoder(BooleanEncoder): _true = ints2octs((255,)) tagMap[Boolean.tagSet] =", "0 else True def decode_bind_response(message, start, stop, context_decoders=None): return decode_sequence(message,", "Private # Monkeypatching of pyasn1 for encoding Boolean with the", "value << 8 | get_byte(octet) return value def decode_octet_string(message, start,", "3 def get_byte(x): return x def get_bytes(x): return x else:", "of ldap3. # # ldap3 is free software: you can", "of following octets containing the value length bytes_length = data[1]", "'xxx0.2.3': from pyasn1.codec.ber.encoder import tagMap, BooleanEncoder, encode from pyasn1.type.univ import", "+ ber_len, context_decoders) # call value decode function else: #", "return value and (255,) or (0,), False, False customTagMap =", "decode_sequence, # Modify response (1, 9): decode_sequence, # Add response", "Highest bit of byte 1 is 0, message length is", "COPYING.LESSER files. # If not, see <http://www.gnu.org/licenses/>. from pyasn1 import", "len(decoded) == 3 else None } def decode_sequence(message, start, stop,", "..protocol.convert import referrals_to_list CLASSES = {(False, False): 0, # Universal", "encodeValue(self, encodeFun, value, defMode, maxChunkSize, ifNotEmpty=False): return value and (255,)", "is 0, message length is in the last 7 bits", "is not bytes: # Python 3 def get_byte(x): return x", "value = decode_sequence(message, start, start + ber_len) # else: #", "# Application (1, 1): decode_bind_response, # Bind response (1, 4):", "pyasn1_version <= '0.3.7': def encodeValue(self, value, encodeFun, **options): return value", "<= '0.2.3': from pyasn1.compat.octets import ints2octs _true = ints2octs((255,)) _false", "bytes: # Python 3 def get_byte(x): return x def get_bytes(x):", "and (255,) or (0,), False, False elif pyasn1_version <= '0.3.7':", "ber_value_offset if ber_decoder: value = ber_decoder(message, start, start + ber_len,", "containing the value length bytes_length = data[1] - 128 value_length", "end of monkey patching # a fast BER decoder for", "of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See", "0b01000000))] ber_constructed = bool(octet & 0b00100000) ber_type = octet &", "return data[1], 2 else: # BER definite length - long", "def ldap_result_to_dict_fast(response): response_dict = dict() response_dict['result'] = int(response[0][3]) # resultCode", "# Set # Application (1, 1): decode_bind_response, # Bind response", "7 bits - Value can be up to 127 bytes", "3: decode_sequence # Referral } CONTROLS_CONTEXT = { 0: decode_sequence", "call value decode function for context class # except KeyError:", "ber_len) # else: # raise # re-raise, should never happen", "as published # by the Free Software Foundation, either version", "it under the terms of the GNU Lesser General Public", "supportIndefLenMode = False if pyasn1_version <= '0.2.3': from pyasn1.compat.octets import", "stop, context_decoders=None): return False if message[start: stop] == 0 else", "should never happen decoded.append((ber_class, ber_constructed, ber_type, value)) start += ber_len", "import Encoder # for monkeypatching of boolean value from ..core.results", "encodeValue(self, value, encodeFun, **options): return value and (255,) or (0,),", "if message[start: stop] == 0 else True def decode_bind_response(message, start,", "from_server=True) # matchedDN response_dict['message'] = to_unicode(response[2][3], from_server=True) # diagnosticMessage if", "str is not bytes: # Python 3 def get_byte(x): return", "decode_bind_response(message, start, stop, context_decoders=None): return decode_sequence(message, start, stop, BIND_RESPONSE_CONTEXT) def", "'0.3.4': def encodeValue(self, encodeFun, value, defMode, maxChunkSize, ifNotEmpty=False): return value", "{ 7: decode_octet_string # SaslCredentials } EXTENDED_RESPONSE_CONTEXT = { 10:", "but WITHOUT ANY WARRANTY; without even the implied warranty of", "Encoder # for monkeypatching of boolean value from ..core.results import", "0b00100000) ber_type = octet & 0b00011111 ber_decoder = DECODERS[(ber_class, octet", "get_byte(message[start]) ber_class = CLASSES[(bool(octet & 0b10000000), bool(octet & 0b01000000))] ber_constructed", "cont -= 1 value_length += byte * (256 ** cont)", "start, start + ber_len, context_decoders) # call value decode function", "value, defMode, maxChunkSize): return value and self._true or self._false, 0", "0b00011111)] if ber_class < 2 else None ber_len, ber_value_offset =", "of sequence, at maximum 3 bytes for length decoded =", "Public License for more details. # # You should have", "\"\"\" if data[1] <= 127: # BER definite length -", "1 value_length += byte * (256 ** cont) return value_length,", "return decoded def decode_integer(message, start, stop, context_decoders=None): first = message[start]", "BooleanCEREncoder() else: from pyasn1.codec.ber.encoder import tagMap, typeMap, AbstractItemEncoder from pyasn1.type.univ", "for usage in other modules from pyasn1.codec.ber.encoder import Encoder #", "stop, context_decoders=None): return decode_sequence(message, start, stop, BIND_RESPONSE_CONTEXT) def decode_extended_response(message, start,", "(1, 1): decode_bind_response, # Bind response (1, 4): decode_sequence, #", "pyasn1 import __version__ as pyasn1_version from pyasn1.codec.ber import decoder #", "# a fast BER decoder for LDAP responses only def", "message[start] value = -1 if get_byte(first) & 0x80 else 0", "decode_intermediate_response(message, start, stop, context_decoders=None): return decode_sequence(message, start, stop, INTERMEDIATE_RESPONSE_CONTEXT) def", "1, last 7 bits counts the number of following octets", "resultCode response_dict['description'] = RESULT_CODES[response_dict['result']] response_dict['dn'] = to_unicode(response[1][3], from_server=True) # matchedDN", "context_decoders=None): return False if message[start: stop] == 0 else True", "DECODER if pyasn1_version == 'xxx0.2.3': from pyasn1.codec.ber.encoder import tagMap, BooleanEncoder,", "(True, True): 3} # Private # Monkeypatching of pyasn1 for", "(1, 13): decode_sequence, # ModifyDN response (1, 15): decode_sequence, #", "Compare response (1, 19): decode_sequence, # Search result reference (1,", "stop, context_decoders=None): first = message[start] value = -1 if get_byte(first)", "FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser", "other modules from pyasn1.codec.ber.encoder import Encoder # for monkeypatching of", "definite length - long form. Highest bit of byte 1", "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #", "Referral in result # value = decode_sequence(message, start, start +", "received a copy of the GNU Lesser General Public License", "Software Foundation, either version 3 of the License, or #", "def decode_boolean(message, start, stop, context_decoders=None): return False if message[start: stop]", "deepcopy class LDAPBooleanEncoder(AbstractItemEncoder): supportIndefLenMode = False if pyasn1_version <= '0.2.3':", "GNU Lesser General Public License for more details. # #", "encode = Encoder(customTagMap, customTypeMap) # end of monkey patching #", "+= byte * (256 ** cont) return value_length, bytes_length +", "context_decoders=None): return message[start: stop] def decode_boolean(message, start, stop, context_decoders=None): return", "(1, 19): decode_sequence, # Search result reference (1, 24): decode_extended_response,", "2015.08.19 # # Author: <NAME> # # Copyright 2015 -", "tagMap[Boolean.tagSet] = BooleanCEREncoder() else: from pyasn1.codec.ber.encoder import tagMap, typeMap, AbstractItemEncoder", "response_dict['description'] = RESULT_CODES[response_dict['result']] response_dict['dn'] = to_unicode(response[1][3], from_server=True) # matchedDN response_dict['message']", "'0.3.7': def encodeValue(self, value, encodeFun, **options): return value and (255,)", "compute_ber_size(data): \"\"\" Compute size according to BER definite length rules", "} BIND_RESPONSE_CONTEXT = { 7: decode_octet_string # SaslCredentials } EXTENDED_RESPONSE_CONTEXT", "General Public License # along with ldap3 in the COPYING", "LDAP_MESSAGE_CONTEXT = { 0: decode_controls, # Controls 3: decode_sequence #", "start += ber_len return decoded def decode_integer(message, start, stop, context_decoders=None):", "modules from pyasn1.codec.ber.encoder import Encoder # for monkeypatching of boolean", "copy import deepcopy class LDAPBooleanEncoder(AbstractItemEncoder): supportIndefLenMode = False if pyasn1_version", "length rules Returns size of value and value offset \"\"\"", "else: # BER definite length - long form. Highest bit", "False elif pyasn1_version <= '0.3.4': def encodeValue(self, encodeFun, value, defMode,", "* (256 ** cont) return value_length, bytes_length + 2 def", "0x80 else 0 for octet in message[start: stop]: value =", "the # GNU Lesser General Public License for more details.", "encodeValue(self, value, asn1Spec, encodeFun, **options): return value and (255,) or", "start + ber_len) # call value decode function for context", "following octets containing the value length bytes_length = data[1] -", "11): decode_sequence, # Delete response (1, 13): decode_sequence, # ModifyDN", "version 3 of the License, or # (at your option)", "= compute_ber_size(get_bytes(message[:10])) # get start of sequence, at maximum 3", "1): decode_boolean, # Boolean (0, 2): decode_integer, # Integer (0,", "# BER definite length - short form. Highest bit of", "Highest bit of byte 1 is 1, last 7 bits", "at maximum 3 bytes for length decoded = decode_sequence(message, ber_value_offset,", "8 | get_byte(octet) return value def decode_octet_string(message, start, stop, context_decoders=None):", "octet = get_byte(message[start]) ber_class = CLASSES[(bool(octet & 0b10000000), bool(octet &", "counts the number of following octets containing the value length", "ldap3 in the COPYING and COPYING.LESSER files. # If not,", "2018 <NAME> # # This file is part of ldap3.", "is part of ldap3. # # ldap3 is free software:", "# Created on 2015.08.19 # # Author: <NAME> # #", "import deepcopy class LDAPBooleanEncoder(AbstractItemEncoder): supportIndefLenMode = False if pyasn1_version <=", "up to 127 bytes long return data[1], 2 else: #", "(1, 4): decode_sequence, # Search result entry (1, 5): decode_sequence,", "# Referral } CONTROLS_CONTEXT = { 0: decode_sequence # Control", "value decode function else: # try: value = context_decoders[ber_type](message, start,", "x def get_bytes(x): return x else: # Python 2 def", "from_server=True) # diagnosticMessage if len(response) == 4: response_dict['referrals'] = referrals_to_list([to_unicode(referral[3],", "decode_controls, # Controls 3: decode_sequence # Referral } CONTROLS_CONTEXT =", "bit of byte 1 is 0, message length is in", "= referrals_to_list([to_unicode(referral[3], from_server=True) for referral in response[3][3]]) # referrals else:", "- long form. Highest bit of byte 1 is 1,", "result entry (1, 5): decode_sequence, # Search result done (1,", "defMode, maxChunkSize, ifNotEmpty=False): return value and (255,) or (0,), False,", "ldap3 is free software: you can redistribute it and/or modify", "ldap3. # # ldap3 is free software: you can redistribute", "by the Free Software Foundation, either version 3 of the", "<= '0.3.4': def encodeValue(self, encodeFun, value, defMode, maxChunkSize, ifNotEmpty=False): return", "7 bits counts the number of following octets containing the", "for referral in response[3][3]]) # referrals else: response_dict['referrals'] = None", "Add response (1, 11): decode_sequence, # Delete response (1, 13):", "value and (255,) or (0,), False, False elif pyasn1_version <=", "11: decode_octet_string # Response Value } INTERMEDIATE_RESPONSE_CONTEXT = { 0:", "# # ldap3 is free software: you can redistribute it", "# ldap3 is free software: you can redistribute it and/or", "decode_sequence, # Compare response (1, 19): decode_sequence, # Search result", "import tagMap, BooleanEncoder, encode from pyasn1.type.univ import Boolean from pyasn1.compat.octets", "Modify response (1, 9): decode_sequence, # Add response (1, 11):", "cont) return value_length, bytes_length + 2 def decode_message_fast(message): ber_len, ber_value_offset", "Lesser General Public License # along with ldap3 in the", "= -1 if get_byte(first) & 0x80 else 0 for octet", "def encodeValue(self, value, asn1Spec, encodeFun, **options): return value and (255,)", "offset \"\"\" if data[1] <= 127: # BER definite length", "= ber_decoder(message, start, start + ber_len, context_decoders) # call value", "start, stop, context_decoders=None): return decode_sequence(message, start, stop, INTERMEDIATE_RESPONSE_CONTEXT) def decode_controls(message,", "customTagMap[Boolean.tagSet] = LDAPBooleanEncoder() customTypeMap[Boolean.typeId] = LDAPBooleanEncoder() encode = Encoder(customTagMap, customTypeMap)", "response (1, 9): decode_sequence, # Add response (1, 11): decode_sequence,", "# resultCode response_dict['description'] = RESULT_CODES[response_dict['result']] response_dict['dn'] = to_unicode(response[1][3], from_server=True) #", "from ..protocol.convert import referrals_to_list CLASSES = {(False, False): 0, #", "stop, INTERMEDIATE_RESPONSE_CONTEXT) def decode_controls(message, start, stop, context_decoders=None): return decode_sequence(message, start,", "x else: # Python 2 def get_byte(x): return ord(x) def", "import Boolean from copy import deepcopy class LDAPBooleanEncoder(AbstractItemEncoder): supportIndefLenMode =", "decode_message_fast(message): ber_len, ber_value_offset = compute_ber_size(get_bytes(message[:10])) # get start of sequence,", "< stop: octet = get_byte(message[start]) ber_class = CLASSES[(bool(octet & 0b10000000),", "in the hope that it will be useful, # but", "ber_decoder: value = ber_decoder(message, start, start + ber_len, context_decoders) #", "(0, 17): decode_sequence, # Set # Application (1, 1): decode_bind_response,", "start, stop, context_decoders=None): return decode_sequence(message, start, stop, BIND_RESPONSE_CONTEXT) def decode_extended_response(message,", "modify # it under the terms of the GNU Lesser", "ord(x) def get_bytes(x): return bytearray(x) DECODERS = { # Universal", "or self._false, 0 elif pyasn1_version <= '0.3.1': def encodeValue(self, encodeFun,", "1: decode_octet_string # IntermediateResponseValue } LDAP_MESSAGE_CONTEXT = { 0: decode_controls,", "if ber_decoder: value = ber_decoder(message, start, start + ber_len, context_decoders)", "= deepcopy(tagMap) customTypeMap = deepcopy(typeMap) customTagMap[Boolean.tagSet] = LDAPBooleanEncoder() customTypeMap[Boolean.typeId] =", "decoder for LDAP responses only def compute_ber_size(data): \"\"\" Compute size", "1 is 0, message length is in the last 7", "value)) start += ber_len return decoded def decode_integer(message, start, stop,", "return x def get_bytes(x): return x else: # Python 2", "'controls': decoded[2][3] if len(decoded) == 3 else None } def", "BIND_RESPONSE_CONTEXT) def decode_extended_response(message, start, stop, context_decoders=None): return decode_sequence(message, start, stop,", "and value offset \"\"\" if data[1] <= 127: # BER", "short form. Highest bit of byte 1 is 0, message", "# # This file is part of ldap3. # #", "import referrals_to_list CLASSES = {(False, False): 0, # Universal (False,", "IntermediateResponseValue } LDAP_MESSAGE_CONTEXT = { 0: decode_controls, # Controls 3:", "customTypeMap = deepcopy(typeMap) customTagMap[Boolean.tagSet] = LDAPBooleanEncoder() customTypeMap[Boolean.typeId] = LDAPBooleanEncoder() encode", "decode_sequence, # Delete response (1, 13): decode_sequence, # ModifyDN response", "TRUE # THIS IS NOT PART OF THE FAST BER", "decode_integer(message, start, stop, context_decoders=None): first = message[start] value = -1", "2, # Context (True, True): 3} # Private # Monkeypatching", "decode_sequence, # Search result done (1, 7): decode_sequence, # Modify", "stop, context_decoders=None): return decode_sequence(message, start, stop, INTERMEDIATE_RESPONSE_CONTEXT) def decode_controls(message, start,", "result done (1, 7): decode_sequence, # Modify response (1, 9):", "WARRANTY; without even the implied warranty of # MERCHANTABILITY or", "# along with ldap3 in the COPYING and COPYING.LESSER files.", "last 7 bits - Value can be up to 127", "(0, 16): decode_sequence, # Sequence (0, 17): decode_sequence, # Set", "compute_ber_size(get_bytes(message[:10])) # get start of sequence, at maximum 3 bytes", "Search result reference (1, 24): decode_extended_response, # Extended response (1,", "byte 1 is 1, last 7 bits counts the number", "ber_class = CLASSES[(bool(octet & 0b10000000), bool(octet & 0b01000000))] ber_constructed =", "CLASSES = {(False, False): 0, # Universal (False, True): 1,", "call value decode function else: # try: value = context_decoders[ber_type](message,", "} EXTENDED_RESPONSE_CONTEXT = { 10: decode_octet_string, # ResponseName 11: decode_octet_string", "to 127 bytes long return data[1], 2 else: # BER", "# Enumerated (0, 16): decode_sequence, # Sequence (0, 17): decode_sequence,", "the COPYING and COPYING.LESSER files. # If not, see <http://www.gnu.org/licenses/>.", "BER definite length - long form. Highest bit of byte", "(0, 1): decode_boolean, # Boolean (0, 2): decode_integer, # Integer", "from pyasn1.codec.ber.encoder import Encoder # for monkeypatching of boolean value", "24): decode_extended_response, # Extended response (1, 25): decode_intermediate_response, # intermediate", "is distributed in the hope that it will be useful,", "- 2018 <NAME> # # This file is part of", "3 of the License, or # (at your option) any", "stop: octet = get_byte(message[start]) ber_class = CLASSES[(bool(octet & 0b10000000), bool(octet", "matchedDN response_dict['message'] = to_unicode(response[2][3], from_server=True) # diagnosticMessage if len(response) ==", "from pyasn1.codec.ber import decoder # for usage in other modules", "referrals_to_list CLASSES = {(False, False): 0, # Universal (False, True):", "# # You should have received a copy of the", "# Octet String (0, 10): decode_integer, # Enumerated (0, 16):", "length decoded = decode_sequence(message, ber_value_offset, ber_len + ber_value_offset, LDAP_MESSAGE_CONTEXT) return", "option) any later version. # # ldap3 is distributed in", "# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the", "encodeFun, value, defMode, maxChunkSize, ifNotEmpty=False): return value and (255,) or", "response (1, 13): decode_sequence, # ModifyDN response (1, 15): decode_sequence,", "value from ..core.results import RESULT_CODES from ..utils.conv import to_unicode from", "response (1, 25): decode_intermediate_response, # intermediate response (2, 3): decode_octet_string", "without even the implied warranty of # MERCHANTABILITY or FITNESS", "<= 127: # BER definite length - short form. Highest", "bytes long return data[1], 2 else: # BER definite length" ]
[ "nl=tf.nn.relu, out_channel=32): x = Conv2D('conv0', x) x = Conv2D('conv1', x)", "dict of dict: ``dict[layername] = {arg: val}`` \"\"\" if len(_ArgScopeStack)", "collections import defaultdict import tensorflow as tf _ArgScopeStack = []", "val}`` \"\"\" if len(_ArgScopeStack) > 0: return _ArgScopeStack[-1] else: return", "= copy.copy(get_arg_scope()[cls.__name__]) actual_args.update(kwargs) instance = original_init(self, *args, **actual_args) return instance", "the function ``func`` returns multiple input or output tensors, only", "return defaultdict(dict) def add_arg_scope(cls): \"\"\"Decorator for function to support argscope", "l in layers: assert hasattr(l, '__arg_scope_enabled__'), \"Argscope not supported for", "layers: assert hasattr(l, '__arg_scope_enabled__'), \"Argscope not supported for {}\".format(l) #", "outer scope new_scope = copy.deepcopy(get_arg_scope()) for l in layers: new_scope[l.__name__].update(kwargs)", "layers (list or layer): layer or list of layers to", "\"\"\" Args: layers (list or layer): layer or list of", "functools import wraps from collections import defaultdict import tensorflow as", "where all appearance of these layer will by default have", "of dict: ``dict[layername] = {arg: val}`` \"\"\" if len(_ArgScopeStack) >", "_ArgScopeStack[-1] def get_arg_scope(): \"\"\" Returns: dict: the current argscope. An", "from contextlib import contextmanager from functools import wraps from collections", "arg_scope(Conv2D, kernel_shape=3, nl=tf.nn.relu, out_channel=32): x = Conv2D('conv0', x) x =", "for function to support argscope Example: .. code-block:: python from", "only the first input/output tensor shape is displayed during logging.", "original_init = cls.__init__ @wraps(original_init) def wrapped_init(self, *args, **kwargs): actual_args =", "the arguments specified by kwargs. Example: .. code-block:: python with", "by kwargs. Example: .. code-block:: python with arg_scope(Conv2D, kernel_shape=3, nl=tf.nn.relu,", "override argscope \"\"\" if not isinstance(layers, list): layers = [layers]", "[layers] for l in layers: assert hasattr(l, '__arg_scope_enabled__'), \"Argscope not", "= copy.deepcopy(get_arg_scope()) for l in layers: new_scope[l.__name__].update(kwargs) _ArgScopeStack.append(new_scope) yield del", "during logging. Returns: The decorated function. \"\"\" original_init = cls.__init__", "def wrapped_init(self, *args, **kwargs): actual_args = copy.copy(get_arg_scope()[cls.__name__]) actual_args.update(kwargs) instance =", "actual_args = copy.copy(get_arg_scope()[cls.__name__]) actual_args.update(kwargs) instance = original_init(self, *args, **actual_args) return", "mylib import MyClass myfunc = add_arg_scope(MyClass) Args: func: A function", "Conv2D('conv0', x) x = Conv2D('conv1', x) x = Conv2D('conv2', x,", "_ArgScopeStack[-1] else: return defaultdict(dict) def add_arg_scope(cls): \"\"\"Decorator for function to", "Args: func: A function mapping one or multiple tensors to", "shape is displayed during logging. Returns: The decorated function. \"\"\"", "argscope is a dict of dict: ``dict[layername] = {arg: val}``", "len(_ArgScopeStack) > 0: return _ArgScopeStack[-1] else: return defaultdict(dict) def add_arg_scope(cls):", "dict: the current argscope. An argscope is a dict of", "import MyClass myfunc = add_arg_scope(MyClass) Args: func: A function mapping", "is displayed during logging. Returns: The decorated function. \"\"\" original_init", "import defaultdict import tensorflow as tf _ArgScopeStack = [] @contextmanager", "\"\"\" if not isinstance(layers, list): layers = [layers] for l", "MyClass myfunc = add_arg_scope(MyClass) Args: func: A function mapping one", "kernel_shape=3, nl=tf.nn.relu, out_channel=32): x = Conv2D('conv0', x) x = Conv2D('conv1',", "new_scope[l.__name__].update(kwargs) _ArgScopeStack.append(new_scope) yield del _ArgScopeStack[-1] def get_arg_scope(): \"\"\" Returns: dict:", "= Conv2D('conv1', x) x = Conv2D('conv2', x, out_channel=64) # override", "function mapping one or multiple tensors to one or multiple", "import copy from contextlib import contextmanager from functools import wraps", "need to deepcopy so that changes to new_scope does not", "return _ArgScopeStack[-1] else: return defaultdict(dict) def add_arg_scope(cls): \"\"\"Decorator for function", "instance = original_init(self, *args, **actual_args) return instance cls.__arg_scope_enabled__ = True", "affect outer scope new_scope = copy.deepcopy(get_arg_scope()) for l in layers:", "l in layers: new_scope[l.__name__].update(kwargs) _ArgScopeStack.append(new_scope) yield del _ArgScopeStack[-1] def get_arg_scope():", "def add_arg_scope(cls): \"\"\"Decorator for function to support argscope Example: ..", "to one or multiple tensors. Remarks: If the function ``func``", "from functools import wraps from collections import defaultdict import tensorflow", "output tensors, only the first input/output tensor shape is displayed", "yield del _ArgScopeStack[-1] def get_arg_scope(): \"\"\" Returns: dict: the current", "assert hasattr(l, '__arg_scope_enabled__'), \"Argscope not supported for {}\".format(l) # need", "so that changes to new_scope does not affect outer scope", "for {}\".format(l) # need to deepcopy so that changes to", "input or output tensors, only the first input/output tensor shape", "decorated function. \"\"\" original_init = cls.__init__ @wraps(original_init) def wrapped_init(self, *args,", "x = Conv2D('conv1', x) x = Conv2D('conv2', x, out_channel=64) #", "arguments. Returns: a context where all appearance of these layer", "x = Conv2D('conv0', x) x = Conv2D('conv1', x) x =", "= {arg: val}`` \"\"\" if len(_ArgScopeStack) > 0: return _ArgScopeStack[-1]", ".. code-block:: python with arg_scope(Conv2D, kernel_shape=3, nl=tf.nn.relu, out_channel=32): x =", "\"\"\" Returns: dict: the current argscope. An argscope is a", "multiple tensors to one or multiple tensors. Remarks: If the", "contextlib import contextmanager from functools import wraps from collections import", "= [] @contextmanager def arg_scope(layers, **kwargs): \"\"\" Args: layers (list", "of these layer will by default have the arguments specified", "tensors to one or multiple tensors. Remarks: If the function", "first input/output tensor shape is displayed during logging. Returns: The", "tensor shape is displayed during logging. Returns: The decorated function.", "wrapped_init(self, *args, **kwargs): actual_args = copy.copy(get_arg_scope()[cls.__name__]) actual_args.update(kwargs) instance = original_init(self,", "python from mylib import MyClass myfunc = add_arg_scope(MyClass) Args: func:", "tensors, only the first input/output tensor shape is displayed during", "= Conv2D('conv2', x, out_channel=64) # override argscope \"\"\" if not", "# override argscope \"\"\" if not isinstance(layers, list): layers =", "multiple tensors. Remarks: If the function ``func`` returns multiple input", ".. code-block:: python from mylib import MyClass myfunc = add_arg_scope(MyClass)", "of layers to apply the arguments. Returns: a context where", "does not affect outer scope new_scope = copy.deepcopy(get_arg_scope()) for l", "Returns: The decorated function. \"\"\" original_init = cls.__init__ @wraps(original_init) def", "layers to apply the arguments. Returns: a context where all", "all appearance of these layer will by default have the", "out_channel=32): x = Conv2D('conv0', x) x = Conv2D('conv1', x) x", "or multiple tensors. Remarks: If the function ``func`` returns multiple", "to apply the arguments. Returns: a context where all appearance", "\"\"\" original_init = cls.__init__ @wraps(original_init) def wrapped_init(self, *args, **kwargs): actual_args", "cls.__init__ @wraps(original_init) def wrapped_init(self, *args, **kwargs): actual_args = copy.copy(get_arg_scope()[cls.__name__]) actual_args.update(kwargs)", "x = Conv2D('conv2', x, out_channel=64) # override argscope \"\"\" if", "**kwargs): actual_args = copy.copy(get_arg_scope()[cls.__name__]) actual_args.update(kwargs) instance = original_init(self, *args, **actual_args)", "list): layers = [layers] for l in layers: assert hasattr(l,", "\"\"\" if len(_ArgScopeStack) > 0: return _ArgScopeStack[-1] else: return defaultdict(dict)", "= cls.__init__ @wraps(original_init) def wrapped_init(self, *args, **kwargs): actual_args = copy.copy(get_arg_scope()[cls.__name__])", "for l in layers: assert hasattr(l, '__arg_scope_enabled__'), \"Argscope not supported", "copy.copy(get_arg_scope()[cls.__name__]) actual_args.update(kwargs) instance = original_init(self, *args, **actual_args) return instance cls.__arg_scope_enabled__", "deepcopy so that changes to new_scope does not affect outer", "not supported for {}\".format(l) # need to deepcopy so that", "(list or layer): layer or list of layers to apply", "new_scope = copy.deepcopy(get_arg_scope()) for l in layers: new_scope[l.__name__].update(kwargs) _ArgScopeStack.append(new_scope) yield", "x) x = Conv2D('conv2', x, out_channel=64) # override argscope \"\"\"", "Returns: dict: the current argscope. An argscope is a dict", "argscope. An argscope is a dict of dict: ``dict[layername] =", "function. \"\"\" original_init = cls.__init__ @wraps(original_init) def wrapped_init(self, *args, **kwargs):", "specified by kwargs. Example: .. code-block:: python with arg_scope(Conv2D, kernel_shape=3,", "= [layers] for l in layers: assert hasattr(l, '__arg_scope_enabled__'), \"Argscope", "kwargs. Example: .. code-block:: python with arg_scope(Conv2D, kernel_shape=3, nl=tf.nn.relu, out_channel=32):", "to new_scope does not affect outer scope new_scope = copy.deepcopy(get_arg_scope())", "support argscope Example: .. code-block:: python from mylib import MyClass", "**actual_args) return instance cls.__arg_scope_enabled__ = True cls.__init__ = wrapped_init return", "for l in layers: new_scope[l.__name__].update(kwargs) _ArgScopeStack.append(new_scope) yield del _ArgScopeStack[-1] def", "displayed during logging. Returns: The decorated function. \"\"\" original_init =", "list of layers to apply the arguments. Returns: a context", "defaultdict import tensorflow as tf _ArgScopeStack = [] @contextmanager def", "def get_arg_scope(): \"\"\" Returns: dict: the current argscope. An argscope", "from mylib import MyClass myfunc = add_arg_scope(MyClass) Args: func: A", "\"Argscope not supported for {}\".format(l) # need to deepcopy so", "not affect outer scope new_scope = copy.deepcopy(get_arg_scope()) for l in", "to support argscope Example: .. code-block:: python from mylib import", "layer or list of layers to apply the arguments. Returns:", "layers: new_scope[l.__name__].update(kwargs) _ArgScopeStack.append(new_scope) yield del _ArgScopeStack[-1] def get_arg_scope(): \"\"\" Returns:", "current argscope. An argscope is a dict of dict: ``dict[layername]", "code-block:: python with arg_scope(Conv2D, kernel_shape=3, nl=tf.nn.relu, out_channel=32): x = Conv2D('conv0',", "tensors. Remarks: If the function ``func`` returns multiple input or", "one or multiple tensors. Remarks: If the function ``func`` returns", "**kwargs): \"\"\" Args: layers (list or layer): layer or list", "Args: layers (list or layer): layer or list of layers", "in layers: new_scope[l.__name__].update(kwargs) _ArgScopeStack.append(new_scope) yield del _ArgScopeStack[-1] def get_arg_scope(): \"\"\"", "actual_args.update(kwargs) instance = original_init(self, *args, **actual_args) return instance cls.__arg_scope_enabled__ =", "arguments specified by kwargs. Example: .. code-block:: python with arg_scope(Conv2D,", "'__arg_scope_enabled__'), \"Argscope not supported for {}\".format(l) # need to deepcopy", "An argscope is a dict of dict: ``dict[layername] = {arg:", "layer will by default have the arguments specified by kwargs.", "as tf _ArgScopeStack = [] @contextmanager def arg_scope(layers, **kwargs): \"\"\"", "def arg_scope(layers, **kwargs): \"\"\" Args: layers (list or layer): layer", "have the arguments specified by kwargs. Example: .. code-block:: python", "The decorated function. \"\"\" original_init = cls.__init__ @wraps(original_init) def wrapped_init(self,", "with arg_scope(Conv2D, kernel_shape=3, nl=tf.nn.relu, out_channel=32): x = Conv2D('conv0', x) x", "Example: .. code-block:: python from mylib import MyClass myfunc =", "if not isinstance(layers, list): layers = [layers] for l in", "or list of layers to apply the arguments. Returns: a", "@wraps(original_init) def wrapped_init(self, *args, **kwargs): actual_args = copy.copy(get_arg_scope()[cls.__name__]) actual_args.update(kwargs) instance", "``func`` returns multiple input or output tensors, only the first", "hasattr(l, '__arg_scope_enabled__'), \"Argscope not supported for {}\".format(l) # need to", "\"\"\"Decorator for function to support argscope Example: .. code-block:: python", "a context where all appearance of these layer will by", "appearance of these layer will by default have the arguments", "``dict[layername] = {arg: val}`` \"\"\" if len(_ArgScopeStack) > 0: return", "out_channel=64) # override argscope \"\"\" if not isinstance(layers, list): layers", "add_arg_scope(cls): \"\"\"Decorator for function to support argscope Example: .. code-block::", "argscope \"\"\" if not isinstance(layers, list): layers = [layers] for", "defaultdict(dict) def add_arg_scope(cls): \"\"\"Decorator for function to support argscope Example:", "or multiple tensors to one or multiple tensors. Remarks: If", "copy from contextlib import contextmanager from functools import wraps from", "layer): layer or list of layers to apply the arguments.", "Remarks: If the function ``func`` returns multiple input or output", "*args, **kwargs): actual_args = copy.copy(get_arg_scope()[cls.__name__]) actual_args.update(kwargs) instance = original_init(self, *args,", "{}\".format(l) # need to deepcopy so that changes to new_scope", "the first input/output tensor shape is displayed during logging. Returns:", "a dict of dict: ``dict[layername] = {arg: val}`` \"\"\" if", "these layer will by default have the arguments specified by", "Conv2D('conv1', x) x = Conv2D('conv2', x, out_channel=64) # override argscope", "else: return defaultdict(dict) def add_arg_scope(cls): \"\"\"Decorator for function to support", "Example: .. code-block:: python with arg_scope(Conv2D, kernel_shape=3, nl=tf.nn.relu, out_channel=32): x", "Conv2D('conv2', x, out_channel=64) # override argscope \"\"\" if not isinstance(layers,", "that changes to new_scope does not affect outer scope new_scope", "mapping one or multiple tensors to one or multiple tensors.", "scope new_scope = copy.deepcopy(get_arg_scope()) for l in layers: new_scope[l.__name__].update(kwargs) _ArgScopeStack.append(new_scope)", "[] @contextmanager def arg_scope(layers, **kwargs): \"\"\" Args: layers (list or", "code-block:: python from mylib import MyClass myfunc = add_arg_scope(MyClass) Args:", "# need to deepcopy so that changes to new_scope does", "import wraps from collections import defaultdict import tensorflow as tf", "x, out_channel=64) # override argscope \"\"\" if not isinstance(layers, list):", "> 0: return _ArgScopeStack[-1] else: return defaultdict(dict) def add_arg_scope(cls): \"\"\"Decorator", "*args, **actual_args) return instance cls.__arg_scope_enabled__ = True cls.__init__ = wrapped_init", "x) x = Conv2D('conv1', x) x = Conv2D('conv2', x, out_channel=64)", "tensorflow as tf _ArgScopeStack = [] @contextmanager def arg_scope(layers, **kwargs):", "new_scope does not affect outer scope new_scope = copy.deepcopy(get_arg_scope()) for", "copy.deepcopy(get_arg_scope()) for l in layers: new_scope[l.__name__].update(kwargs) _ArgScopeStack.append(new_scope) yield del _ArgScopeStack[-1]", "changes to new_scope does not affect outer scope new_scope =", "in layers: assert hasattr(l, '__arg_scope_enabled__'), \"Argscope not supported for {}\".format(l)", "get_arg_scope(): \"\"\" Returns: dict: the current argscope. An argscope is", "from collections import defaultdict import tensorflow as tf _ArgScopeStack =", "by default have the arguments specified by kwargs. Example: ..", "layers = [layers] for l in layers: assert hasattr(l, '__arg_scope_enabled__'),", "_ArgScopeStack.append(new_scope) yield del _ArgScopeStack[-1] def get_arg_scope(): \"\"\" Returns: dict: the", "python with arg_scope(Conv2D, kernel_shape=3, nl=tf.nn.relu, out_channel=32): x = Conv2D('conv0', x)", "import contextmanager from functools import wraps from collections import defaultdict", "is a dict of dict: ``dict[layername] = {arg: val}`` \"\"\"", "dict: ``dict[layername] = {arg: val}`` \"\"\" if len(_ArgScopeStack) > 0:", "returns multiple input or output tensors, only the first input/output", "contextmanager from functools import wraps from collections import defaultdict import", "the arguments. Returns: a context where all appearance of these", "0: return _ArgScopeStack[-1] else: return defaultdict(dict) def add_arg_scope(cls): \"\"\"Decorator for", "default have the arguments specified by kwargs. Example: .. code-block::", "If the function ``func`` returns multiple input or output tensors,", "if len(_ArgScopeStack) > 0: return _ArgScopeStack[-1] else: return defaultdict(dict) def", "add_arg_scope(MyClass) Args: func: A function mapping one or multiple tensors", "the current argscope. An argscope is a dict of dict:", "A function mapping one or multiple tensors to one or", "logging. Returns: The decorated function. \"\"\" original_init = cls.__init__ @wraps(original_init)", "function ``func`` returns multiple input or output tensors, only the", "_ArgScopeStack = [] @contextmanager def arg_scope(layers, **kwargs): \"\"\" Args: layers", "multiple input or output tensors, only the first input/output tensor", "or layer): layer or list of layers to apply the", "context where all appearance of these layer will by default", "= Conv2D('conv0', x) x = Conv2D('conv1', x) x = Conv2D('conv2',", "import tensorflow as tf _ArgScopeStack = [] @contextmanager def arg_scope(layers,", "del _ArgScopeStack[-1] def get_arg_scope(): \"\"\" Returns: dict: the current argscope.", "will by default have the arguments specified by kwargs. Example:", "function to support argscope Example: .. code-block:: python from mylib", "argscope Example: .. code-block:: python from mylib import MyClass myfunc", "to deepcopy so that changes to new_scope does not affect", "= add_arg_scope(MyClass) Args: func: A function mapping one or multiple", "wraps from collections import defaultdict import tensorflow as tf _ArgScopeStack", "isinstance(layers, list): layers = [layers] for l in layers: assert", "apply the arguments. Returns: a context where all appearance of", "one or multiple tensors to one or multiple tensors. Remarks:", "original_init(self, *args, **actual_args) return instance cls.__arg_scope_enabled__ = True cls.__init__ =", "return instance cls.__arg_scope_enabled__ = True cls.__init__ = wrapped_init return cls", "func: A function mapping one or multiple tensors to one", "tf _ArgScopeStack = [] @contextmanager def arg_scope(layers, **kwargs): \"\"\" Args:", "@contextmanager def arg_scope(layers, **kwargs): \"\"\" Args: layers (list or layer):", "not isinstance(layers, list): layers = [layers] for l in layers:", "or output tensors, only the first input/output tensor shape is", "Returns: a context where all appearance of these layer will", "input/output tensor shape is displayed during logging. Returns: The decorated", "arg_scope(layers, **kwargs): \"\"\" Args: layers (list or layer): layer or", "{arg: val}`` \"\"\" if len(_ArgScopeStack) > 0: return _ArgScopeStack[-1] else:", "supported for {}\".format(l) # need to deepcopy so that changes", "myfunc = add_arg_scope(MyClass) Args: func: A function mapping one or", "= original_init(self, *args, **actual_args) return instance cls.__arg_scope_enabled__ = True cls.__init__" ]
[ "as f: raw_data = f.read() re_table = textfsm.TextFSM(template) data =", "= open(template_file) with open(\"datafile\") as f: raw_data = f.read() re_table", "a.write(intstatus) a.close() template_file= \"ifconfig-template.template\" template = open(template_file) with open(\"datafile\") as", ": x[0].split(':')[0], 'TX' : int(x[1])+int(random.randint(1,100)) } NL.append(NLD) print(NL) import json", "open(template_file) with open(\"datafile\") as f: raw_data = f.read() re_table =", "subprocess.run('ifconfig',stdout=subprocess.PIPE) intstatus = res.stdout.decode('ascii') with open(\"datafile\",\"w+\") as a: a.write(intstatus) a.close()", "import textfsm import subprocess import random res = subprocess.run('ifconfig',stdout=subprocess.PIPE) intstatus", "res = subprocess.run('ifconfig',stdout=subprocess.PIPE) intstatus = res.stdout.decode('ascii') with open(\"datafile\",\"w+\") as a:", "with open(\"datafile\",\"w+\") as a: a.write(intstatus) a.close() template_file= \"ifconfig-template.template\" template =", "as a: a.write(intstatus) a.close() template_file= \"ifconfig-template.template\" template = open(template_file) with", "re_table = textfsm.TextFSM(template) data = re_table.ParseText(raw_data) print(data) NL = []", "'Interface' : x[0].split(':')[0], 'TX' : int(x[1])+int(random.randint(1,100)) } NL.append(NLD) print(NL) import", "template_file= \"ifconfig-template.template\" template = open(template_file) with open(\"datafile\") as f: raw_data", "template = open(template_file) with open(\"datafile\") as f: raw_data = f.read()", "for x in data: NLD = { 'Interface' : x[0].split(':')[0],", "= res.stdout.decode('ascii') with open(\"datafile\",\"w+\") as a: a.write(intstatus) a.close() template_file= \"ifconfig-template.template\"", "<filename>CORN-TEST/textfsm_parse.py import textfsm import subprocess import random res = subprocess.run('ifconfig',stdout=subprocess.PIPE)", "open(\"datafile\") as f: raw_data = f.read() re_table = textfsm.TextFSM(template) data", "textfsm import subprocess import random res = subprocess.run('ifconfig',stdout=subprocess.PIPE) intstatus =", "[] for x in data: NLD = { 'Interface' :", "in data: NLD = { 'Interface' : x[0].split(':')[0], 'TX' :", "} NL.append(NLD) print(NL) import json print('#'*12) print(json.dumps(NL)) #Enter template FileName", "data = re_table.ParseText(raw_data) print(data) NL = [] for x in", "= subprocess.run('ifconfig',stdout=subprocess.PIPE) intstatus = res.stdout.decode('ascii') with open(\"datafile\",\"w+\") as a: a.write(intstatus)", "open(\"datafile\",\"w+\") as a: a.write(intstatus) a.close() template_file= \"ifconfig-template.template\" template = open(template_file)", "subprocess import random res = subprocess.run('ifconfig',stdout=subprocess.PIPE) intstatus = res.stdout.decode('ascii') with", "int(x[1])+int(random.randint(1,100)) } NL.append(NLD) print(NL) import json print('#'*12) print(json.dumps(NL)) #Enter template", "textfsm.TextFSM(template) data = re_table.ParseText(raw_data) print(data) NL = [] for x", "= textfsm.TextFSM(template) data = re_table.ParseText(raw_data) print(data) NL = [] for", "res.stdout.decode('ascii') with open(\"datafile\",\"w+\") as a: a.write(intstatus) a.close() template_file= \"ifconfig-template.template\" template", "re_table.ParseText(raw_data) print(data) NL = [] for x in data: NLD", "NL.append(NLD) print(NL) import json print('#'*12) print(json.dumps(NL)) #Enter template FileName :ifconfig-template.template", "\"ifconfig-template.template\" template = open(template_file) with open(\"datafile\") as f: raw_data =", "raw_data = f.read() re_table = textfsm.TextFSM(template) data = re_table.ParseText(raw_data) print(data)", "random res = subprocess.run('ifconfig',stdout=subprocess.PIPE) intstatus = res.stdout.decode('ascii') with open(\"datafile\",\"w+\") as", "NL = [] for x in data: NLD = {", "import subprocess import random res = subprocess.run('ifconfig',stdout=subprocess.PIPE) intstatus = res.stdout.decode('ascii')", "{ 'Interface' : x[0].split(':')[0], 'TX' : int(x[1])+int(random.randint(1,100)) } NL.append(NLD) print(NL)", "x in data: NLD = { 'Interface' : x[0].split(':')[0], 'TX'", "x[0].split(':')[0], 'TX' : int(x[1])+int(random.randint(1,100)) } NL.append(NLD) print(NL) import json print('#'*12)", "print(json.dumps(NL)) #Enter template FileName :ifconfig-template.template #Input Data file : ifconfig_output.txt", "= { 'Interface' : x[0].split(':')[0], 'TX' : int(x[1])+int(random.randint(1,100)) } NL.append(NLD)", "print('#'*12) print(json.dumps(NL)) #Enter template FileName :ifconfig-template.template #Input Data file :", "NLD = { 'Interface' : x[0].split(':')[0], 'TX' : int(x[1])+int(random.randint(1,100)) }", "a: a.write(intstatus) a.close() template_file= \"ifconfig-template.template\" template = open(template_file) with open(\"datafile\")", "= f.read() re_table = textfsm.TextFSM(template) data = re_table.ParseText(raw_data) print(data) NL", "print(NL) import json print('#'*12) print(json.dumps(NL)) #Enter template FileName :ifconfig-template.template #Input", "data: NLD = { 'Interface' : x[0].split(':')[0], 'TX' : int(x[1])+int(random.randint(1,100))", ": int(x[1])+int(random.randint(1,100)) } NL.append(NLD) print(NL) import json print('#'*12) print(json.dumps(NL)) #Enter", "'TX' : int(x[1])+int(random.randint(1,100)) } NL.append(NLD) print(NL) import json print('#'*12) print(json.dumps(NL))", "f.read() re_table = textfsm.TextFSM(template) data = re_table.ParseText(raw_data) print(data) NL =", "import json print('#'*12) print(json.dumps(NL)) #Enter template FileName :ifconfig-template.template #Input Data", "json print('#'*12) print(json.dumps(NL)) #Enter template FileName :ifconfig-template.template #Input Data file", "= [] for x in data: NLD = { 'Interface'", "= re_table.ParseText(raw_data) print(data) NL = [] for x in data:", "import random res = subprocess.run('ifconfig',stdout=subprocess.PIPE) intstatus = res.stdout.decode('ascii') with open(\"datafile\",\"w+\")", "print(data) NL = [] for x in data: NLD =", "f: raw_data = f.read() re_table = textfsm.TextFSM(template) data = re_table.ParseText(raw_data)", "intstatus = res.stdout.decode('ascii') with open(\"datafile\",\"w+\") as a: a.write(intstatus) a.close() template_file=", "with open(\"datafile\") as f: raw_data = f.read() re_table = textfsm.TextFSM(template)", "a.close() template_file= \"ifconfig-template.template\" template = open(template_file) with open(\"datafile\") as f:" ]
[ "32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck+16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck+16, self.rect.y, 32, 32)) if", "1 >= 21: self.walkCount = 0 screen.blit(self.p2walkRight[self.walkCount // 3], (self.rect.x,", "pygame.image.load('Images/p1left1.png'), pygame.image.load('Images/p1left2.png'), pygame.image.load('Images/p1left3.png'), pygame.image.load('Images/p1left2.png'), pygame.image.load('Images/p1left1.png'), pygame.image.load('Images/p1left.png')] p1walkLeftshield = [pygame.image.load('Images/p1leftshield.png'), pygame.image.load('Images/p1left1shield.png'),", "pygame.image.load('Images/p1front1shield.png'), pygame.image.load('Images/p1front2shield.png'), pygame.image.load('Images/p1front3shield.png'), pygame.image.load('Images/p1front2shield.png'), pygame.image.load('Images/p1front1shield.png'), pygame.image.load('Images/p1frontshield.png')] p1walkBack = [pygame.image.load('Images/p1back.png'), pygame.image.load('Images/p1back1.png'),", "32, 32)) if self.expleft: screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck, self.rect.y, 32,", "16, 32, 32)) if self.doubleexpup: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck-32)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck-32,", "self.upcheck+16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck+16, 32, 32)) if self.expdown: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck))", "= [pygame.image.load('Images/p2frontshield.png'), pygame.image.load('Images/p2front1shield.png'), pygame.image.load('Images/p2front2shield.png'), pygame.image.load('Images/p2front3shield.png'), pygame.image.load('Images/p2front2shield.png'), pygame.image.load('Images/p2front1shield.png'), pygame.image.load('Images/p2frontshield.png')] p2walkBackshield =", "// 5], (self.rect.x, self.rect.y)) self.animationcount += 1 def reset(self): self.spawntimer=0", "self.bomb_type==1: for i in self.walls: if i.rect.collidepoint(self.leftcheck, self.rect.y): self.expleft =", "self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck+16, self.rect.y, 32, 32)) if self.expright: screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck, self.rect.y))", "self.rect.y = y self.rect.x = x class powerup(pygame.sprite.Sprite): superspeedanimation=[pygame.image.load('Images/superspeed1.png'), pygame.image.load('Images/superspeed2.png'),", "self.animationcount=0 def draw(self, screen): if self.number==1: if self.exists and self.spawntimer>50:", "screen.blit(self.shieldanimation[self.animationcount // 5], (self.rect.x, self.rect.y)) self.animationcount += 1 else: if", "self.upcheck = self.rect.y - 32 self.downcheck = self.rect.y + self.height", "self.rect.y = y self.canmove = True self.front = True self.change_x=0", "1 elif self.back: if self.shield: if self.change_y == 0: screen.blit(pygame.image.load('Images/p1backshield.png'),", "== 0: screen.blit(pygame.image.load('Images/p1left.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1", "screen.blit(self.p1walkFrontshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 else: if", "pygame.image.load('Images/death16.png'), pygame.image.load('Images/death17.png'), pygame.image.load('Images/death18.png'),pygame.image.load('Images/death19.png'), pygame.image.load('Images/death20.png')] p1walkLeft = [pygame.image.load('Images/p1left.png'), pygame.image.load('Images/p1left1.png'), pygame.image.load('Images/p1left2.png'), pygame.image.load('Images/p1left3.png'),", "// 3], (self.rect.x, self.rect.y)) self.walkCount += 1 elif self.left: if", "50: if self.animationcount + 1 >= 30: self.animationcount = 0", "self.change_x += x self.change_y += y if self.superspeed and self.change_x==0", "draw(self, screen): if self.bomb_count < 30: if self.bomb_type==0: screen.blit(pygame.image.load('Images/bomb3.png'), (self.rect.x,", "pygame.image.load('Images/p2leftshield.png')] p2walkRightshield = [pygame.image.load('Images/p2rightshield.png'), pygame.image.load('Images/p2right1shield.png'), pygame.image.load('Images/p2right2shield.png'), pygame.image.load('Images/p2right3shield.png'), pygame.image.load('Images/p2right2shield.png'), pygame.image.load('Images/p2right1shield.png'), pygame.image.load('Images/p2rightshield.png')]", "(self.rect.x, self.rect.y)) else: if self.walkCount + 1 >= 21: self.walkCount", "32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck+16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck+16, 32, 32)) if", "self.rect.y): self.expright = False if i.rect.collidepoint(self.rightcheck+32, self.rect.y): self.doubleexpright = False", "1 else: if self.change_x == 0: screen.blit(pygame.image.load('Images/p1right.png'), (self.rect.x, self.rect.y)) else:", "self.spawntimer > 50: if self.animationcount + 1 >= 30: self.animationcount", "= 0 self.walls = None self.alive = True self.canmove =", "self.doubleexpleft: screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck-32, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck-32, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck-16,", "pygame.image.load('Images/death13.png'), pygame.image.load('Images/death14.png'), pygame.image.load('Images/death15.png'), pygame.image.load('Images/death16.png'), pygame.image.load('Images/death17.png'), pygame.image.load('Images/death18.png'),pygame.image.load('Images/death19.png'), pygame.image.load('Images/death20.png')] p1walkLeft = [pygame.image.load('Images/p1left.png'),", "self.rect.y = y self.rect.x = x self.number = number self.spawntimer=0", "(self.rightcheck+32, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck+32, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck+16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck+16,", "1 else: if self.change_x == 0: screen.blit(pygame.image.load('Images/p2left.png'), (self.rect.x, self.rect.y)) else:", "self.rect.x - 32 self.rightcheck = self.rect.x + self.width self.upcheck =", "screen.blit(self.p1walkLeft[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 elif self.right:", "pygame.image.load('Images/p2left1.png'), pygame.image.load('Images/p2left.png')] p2walkRight = [pygame.image.load('Images/p2right.png'), pygame.image.load('Images/p2right1.png'), pygame.image.load('Images/p2right2.png'), pygame.image.load('Images/p2right3.png'), pygame.image.load('Images/p2right2.png'), pygame.image.load('Images/p2right1.png'),", "0: screen.blit(pygame.image.load('Images/p2rightshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >=", "self.upcheck-32)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck-32, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck-16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck-16,", "p2walkBackshield = [pygame.image.load('Images/p2backshield.png'), pygame.image.load('Images/p2back1shield.png'), pygame.image.load('Images/p2back2shield.png'), pygame.image.load('Images/p2back3shield.png'), pygame.image.load('Images/p2back2shield.png'), pygame.image.load('Images/p2back1shield.png'), pygame.image.load('Images/p2backshield.png')] #", "self.rect.y)) self.walkCount += 1 else: if self.change_y == 0: screen.blit(pygame.image.load('Images/p2front.png'),", "(self.rect.x, self.rect.y)) else: screen.blit(pygame.image.load('Images2/megabomb1.png'), (self.rect.x, self.rect.y)) elif self.bomb_count < 120:", "i.rect.collidepoint(self.rect.x,self.upcheck): self.expup = False if i.rect.collidepoint(self.rect.x,self.downcheck): self.expdown = False screen.blit(pygame.image.load('Images/explosion.png'),", "== 0: screen.blit(pygame.image.load('Images/bomb1.png'), (self.rect.x, self.rect.y)) else: screen.blit(pygame.image.load('Images2/megabomb1.png'), (self.rect.x, self.rect.y)) elif", "pygame.image.load('Images/p1left1shield.png'), pygame.image.load('Images/p1left2shield.png'), pygame.image.load('Images/p1left3shield.png'), pygame.image.load('Images/p1left2shield.png'), pygame.image.load('Images/p1left1shield.png'), pygame.image.load('Images/p1leftshield.png')] p1walkRight = [pygame.image.load('Images/p1right.png'), pygame.image.load('Images/p1right1.png'),", "(self.leftcheck, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck+16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck+16,", "self.rightcheck = self.rect.x + self.width self.upcheck = self.rect.y - 32", "> 0: self.rect.bottom = block.rect.top else: self.rect.top = block.rect.bottom def", "pygame.image.load('Images/shield1.png')] megabombanimation = [pygame.image.load('Images2/megabombicon1.png'), pygame.image.load('Images2/megabombicon2.png'), pygame.image.load('Images2/megabombicon3.png'), pygame.image.load('Images2/megabombicon3.png'), pygame.image.load('Images2/megabombicon2.png'), pygame.image.load('Images2/megabombicon1.png')] def", "else: screen.blit(pygame.image.load('Images2/megabomb1.png'), (self.rect.x, self.rect.y)) elif self.bomb_count < 120: if self.bomb_type==0:", "pygame.image.load('Images/superspeed2.png'), pygame.image.load('Images/superspeed1.png')] shieldanimation = [pygame.image.load('Images/shield1.png'), pygame.image.load('Images/shield2.png'), pygame.image.load('Images/shield3.png'), pygame.image.load('Images/shield3.png'), pygame.image.load('Images/shield2.png'), pygame.image.load('Images/shield1.png')]", "16, 32, 32)) if self.doubleexpdown: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck+32)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck+32,", "[] def draw(self, screen): if self.bomb_count < 30: if self.bomb_type==0:", "[pygame.image.load('Images/p2right.png'), pygame.image.load('Images/p2right1.png'), pygame.image.load('Images/p2right2.png'), pygame.image.load('Images/p2right3.png'), pygame.image.load('Images/p2right2.png'), pygame.image.load('Images/p2right1.png'), pygame.image.load('Images/p2right.png')] p2walkFront = [pygame.image.load('Images/p2front.png'),", "True self.canmove = True self.deathCount = 0 self.gotomenu=False self.speed=3 self.superspeed=False", "0 self.rect.x = x self.rect.y = y self.canmove = True", "block_hit_list = pygame.sprite.spritecollide(self, self.walls, False) for block in block_hit_list: if", "self.upcheck + 16, 32, 32)) if self.doubleexpup: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck-32))", "self.downcheck+32)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck+32, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck+16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck+16,", "screen.blit(self.superspeedanimation[self.animationcount // 5], (self.rect.x, self.rect.y)) self.animationcount += 1 elif self.number==2:", "and self.deathCount < 200: screen.blit(self.death[self.deathCount // 10], (self.rect.x, self.rect.y)) self.deathCount", "pygame.image.load('Images/p1back2.png'), pygame.image.load('Images/p1back3.png'), pygame.image.load('Images/p1back2.png'), pygame.image.load('Images/p1back1.png'), pygame.image.load('Images/p1back.png')] p1walkBackshield = [pygame.image.load('Images/p1backshield.png'), pygame.image.load('Images/p1back1shield.png'), pygame.image.load('Images/p1back2shield.png'),", "pygame.image.load('Images/p2right.png')] p2walkFront = [pygame.image.load('Images/p2front.png'), pygame.image.load('Images/p2front1.png'), pygame.image.load('Images/p2front2.png'), pygame.image.load('Images/p2front3.png'), pygame.image.load('Images/p2front2.png'), pygame.image.load('Images/p2front1.png'), pygame.image.load('Images/p2front.png')]", "3], (self.rect.x, self.rect.y)) self.walkCount += 1 else: if self.change_y ==", "self.doubleexpleft = True self.expright = True self.doubleexpright = True self.expup", "pygame.image.load('Images/p2back3shield.png'), pygame.image.load('Images/p2back2shield.png'), pygame.image.load('Images/p2back1shield.png'), pygame.image.load('Images/p2backshield.png')] # Constructor function def __init__(self, x,", "= [pygame.image.load('Images/shield1.png'), pygame.image.load('Images/shield2.png'), pygame.image.load('Images/shield3.png'), pygame.image.load('Images/shield3.png'), pygame.image.load('Images/shield2.png'), pygame.image.load('Images/shield1.png')] megabombanimation = [pygame.image.load('Images2/megabombicon1.png'),", "self.shield: if self.change_y == 0: screen.blit(pygame.image.load('Images/p1frontshield.png'), (self.rect.x, self.rect.y)) else: if", "self.gotomenu=False self.speed=3 self.superspeed=False self.superspeedcount=0 self.shield=False self.shieldcount=0 self.megabombs=False self.megabombcount = 0", "death = [pygame.image.load('Images/death1.png'), pygame.image.load('Images/death2.png'), pygame.image.load('Images/death3.png'), pygame.image.load('Images/death4.png'), pygame.image.load('Images/death5.png'), pygame.image.load('Images/death6.png'), pygame.image.load('Images/death7.png'), pygame.image.load('Images/death8.png'),", "True self.front = True self.change_x=0 self.change_y=0 self.superspeed=False self.speed=3 self.shield=False self.megabombs=False", "self.front=False self.back=True elif self.change_y >0: self.left=False self.right=False self.front=True self.back=False block_hit_list", "== 0: screen.blit(pygame.image.load('Images/p2front.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1", "= 0 screen.blit(self.p1walkRight[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1", "+ 1 >= 21: self.walkCount = 0 screen.blit(self.p1walkFront[self.walkCount // 3],", "if self.alive == False and self.deathCount < 200: screen.blit(self.death[self.deathCount //", "self.gotomenu = False self.alive = True self.deathCount = 0 self.rect.x", "32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck - 16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck - 16,", "self.walkCount = 0 screen.blit(self.p1walkBackshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount +=", "self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p2walkLeftshield[self.walkCount //", "pygame.image.load('Images/p1front.png')] p1walkFrontshield = [pygame.image.load('Images/p1frontshield.png'), pygame.image.load('Images/p1front1shield.png'), pygame.image.load('Images/p1front2shield.png'), pygame.image.load('Images/p1front3shield.png'), pygame.image.load('Images/p1front2shield.png'), pygame.image.load('Images/p1front1shield.png'), pygame.image.load('Images/p1frontshield.png')]", "(self.leftcheck+16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck+16, self.rect.y, 32, 32)) if self.expright: screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck,", "32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck-16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck-16, self.rect.y, 32, 32)) if", "pygame.image.load('Images/p2back2.png'), pygame.image.load('Images/p2back3.png'), pygame.image.load('Images/p2back2.png'), pygame.image.load('Images/p2back1.png'), pygame.image.load('Images/p2back.png')] p2walkLeftshield = [pygame.image.load('Images/p2leftshield.png'), pygame.image.load('Images/p2left1shield.png'), pygame.image.load('Images/p2left2shield.png'),", "if self.change_y == 0: screen.blit(pygame.image.load('Images/p2backshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount", "self.rect.y)) self.walkCount += 1 else: if self.change_x == 0: screen.blit(pygame.image.load('Images/p2right.png'),", "= [pygame.image.load('Images/p2leftshield.png'), pygame.image.load('Images/p2left1shield.png'), pygame.image.load('Images/p2left2shield.png'), pygame.image.load('Images/p2left3shield.png'), pygame.image.load('Images/p2left2shield.png'), pygame.image.load('Images/p2left1shield.png'), pygame.image.load('Images/p2leftshield.png')] p2walkRightshield =", "else: self.rect.left = block.rect.right self.rect.y += self.change_y if self.change_y <0:", "self.canmove: self.rect.x += self.change_x if self.change_x <0: self.left=True self.right=False self.front=False", "+= 1 else: if self.change_y == 0: screen.blit(pygame.image.load('Images/p1front.png'), (self.rect.x, self.rect.y))", "elif self.back: if self.shield: if self.change_y == 0: screen.blit(pygame.image.load('Images/p2backshield.png'), (self.rect.x,", "and self.spawntimer>50: if self.animationcount + 1 >= 30: self.animationcount =", "p1walkRight = [pygame.image.load('Images/p1right.png'), pygame.image.load('Images/p1right1.png'), pygame.image.load('Images/p1right2.png'), pygame.image.load('Images/p1right3.png'), pygame.image.load('Images/p1right2.png'), pygame.image.load('Images/p1right1.png'), pygame.image.load('Images/p1right.png')] p1walkRightshield", "self.doubleexpleft = False if i.rect.collidepoint(self.rightcheck, self.rect.y): self.expright = False if", "1 >= 30: self.animationcount = 0 screen.blit(self.superspeedanimation[self.animationcount // 5], (self.rect.x,", "self.left=False self.right=False self.front=True self.back=False block_hit_list = pygame.sprite.spritecollide(self, self.walls, False) for", "= False if i.rect.collidepoint(self.rect.x, self.upcheck): self.expup = False if i.rect.collidepoint(self.rect.x,", "+= self.change_x if self.change_x <0: self.left=True self.right=False self.front=False self.back=False elif", "= False screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rect.x, self.rect.y, 32, 32)) if", "= False if i.rect.collidepoint(self.leftcheck-32, self.rect.y): self.doubleexpleft = False if i.rect.collidepoint(self.rightcheck,", "0 screen.blit(self.p1walkRight[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 elif", "+= self.change_y if self.change_y <0: self.left=False self.right=False self.front=False self.back=True elif", "0 screen.blit(self.superspeedanimation[self.animationcount // 5], (self.rect.x, self.rect.y)) self.animationcount += 1 elif", "x, y, number): super().__init__() self.image = pygame.Surface([24, 28]) self.image.fill((0,0,0)) self.rect", "if self.superspeedcount>=150: self.superspeed = False self.speed=3 self.superspeedcount=0 def update(self): if", "+ 1 >= 21: self.walkCount = 0 screen.blit(self.p1walkLeft[self.walkCount // 3],", "self.image = pygame.Surface([24, 28]) self.image.fill((0,0,0)) self.rect = self.image.get_rect() self.rect.y =", "= 0 screen.blit(self.p1walkBackshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1", "self.expleft = False if i.rect.collidepoint(self.rightcheck,self.rect.y): self.expright = False if i.rect.collidepoint(self.rect.x,self.upcheck):", "[pygame.image.load('Images2/megabombicon1.png'), pygame.image.load('Images2/megabombicon2.png'), pygame.image.load('Images2/megabombicon3.png'), pygame.image.load('Images2/megabombicon3.png'), pygame.image.load('Images2/megabombicon2.png'), pygame.image.load('Images2/megabombicon1.png')] def __init__(self, x, y,", "bomb(pygame.sprite.Sprite): def __init__(self, x, y, width, height, bomb_count, bomb_type): super().__init__()", "self.expright: screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck", "self.change_y=0 self.superspeed=False self.speed=3 self.shield=False self.megabombs=False self.megabombcount=0 class Wall(pygame.sprite.Sprite): def __init__(self,", "// 3], (self.rect.x, self.rect.y)) self.walkCount += 1 else: if self.change_y", "self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck + 16, 32, 32)) if self.doubleexpup: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x,", "(self.rightcheck, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck-16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck-16,", "= [pygame.image.load('Images/p1front.png'), pygame.image.load('Images/p1front1.png'), pygame.image.load('Images/p1front2.png'), pygame.image.load('Images/p1front3.png'), pygame.image.load('Images/p1front2.png'), pygame.image.load('Images/p1front1.png'), pygame.image.load('Images/p1front.png')] p1walkFrontshield =", "pygame.image.load('Images/p1back.png')] p1walkBackshield = [pygame.image.load('Images/p1backshield.png'), pygame.image.load('Images/p1back1shield.png'), pygame.image.load('Images/p1back2shield.png'), pygame.image.load('Images/p1back3shield.png'), pygame.image.load('Images/p1back2shield.png'), pygame.image.load('Images/p1back1shield.png'), pygame.image.load('Images/p1backshield.png')]", "= True self.doubleexpleft = True self.expright = True self.doubleexpright =", "+ 1 >= 21: self.walkCount = 0 screen.blit(self.p2walkFrontshield[self.walkCount // 3],", "self.walkCount += 1 else: if self.change_y == 0: screen.blit(pygame.image.load('Images/p1front.png'), (self.rect.x,", "= None self.alive = True self.canmove = True self.deathCount =", "i in self.walls: if i.rect.collidepoint(self.leftcheck, self.rect.y): self.expleft = False if", "if self.bomb_type == 0: screen.blit(pygame.image.load('Images/bomb1.png'), (self.rect.x, self.rect.y)) else: screen.blit(pygame.image.load('Images2/megabomb1.png'), (self.rect.x,", "0 self.walkCount = 0 self.walls = None self.alive = True", "(self.rect.x, self.downcheck)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck -", "False if i.rect.collidepoint(self.rightcheck,self.rect.y): self.expright = False if i.rect.collidepoint(self.rect.x,self.upcheck): self.expup =", "self.upcheck, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck+16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck+16, 32, 32))", "False if i.rect.collidepoint(self.rect.x, self.upcheck-32): self.doubleexpup = False if i.rect.collidepoint(self.rect.x, self.downcheck):", "= False if i.rect.collidepoint(self.rightcheck+32, self.rect.y): self.doubleexpright = False if i.rect.collidepoint(self.rect.x,", "[pygame.image.load('Images/p2back.png'), pygame.image.load('Images/p2back1.png'), pygame.image.load('Images/p2back2.png'), pygame.image.load('Images/p2back3.png'), pygame.image.load('Images/p2back2.png'), pygame.image.load('Images/p2back1.png'), pygame.image.load('Images/p2back.png')] p2walkLeftshield = [pygame.image.load('Images/p2leftshield.png'),", "self.walkCount += 1 else: if self.change_x == 0: screen.blit(pygame.image.load('Images/p1left.png'), (self.rect.x,", "pygame.image.load('Images/p2front2.png'), pygame.image.load('Images/p2front1.png'), pygame.image.load('Images/p2front.png')] p2walkBack = [pygame.image.load('Images/p2back.png'), pygame.image.load('Images/p2back1.png'), pygame.image.load('Images/p2back2.png'), pygame.image.load('Images/p2back3.png'), pygame.image.load('Images/p2back2.png'),", "if self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p2walkFront[self.walkCount", ">= 30: self.animationcount = 0 screen.blit(self.shieldanimation[self.animationcount // 5], (self.rect.x, self.rect.y))", "pygame.image.load('Images/p2front1shield.png'), pygame.image.load('Images/p2front2shield.png'), pygame.image.load('Images/p2front3shield.png'), pygame.image.load('Images/p2front2shield.png'), pygame.image.load('Images/p2front1shield.png'), pygame.image.load('Images/p2frontshield.png')] p2walkBackshield = [pygame.image.load('Images/p2backshield.png'), pygame.image.load('Images/p2back1shield.png'),", "pygame.image.load('Images/p2frontshield.png')] p2walkBackshield = [pygame.image.load('Images/p2backshield.png'), pygame.image.load('Images/p2back1shield.png'), pygame.image.load('Images/p2back2shield.png'), pygame.image.load('Images/p2back3shield.png'), pygame.image.load('Images/p2back2shield.png'), pygame.image.load('Images/p2back1shield.png'), pygame.image.load('Images/p2backshield.png')]", "screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck+32, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck+32, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck+16, self.rect.y))", "y if self.superspeed and self.change_x==0 and self.change_y==0: self.speed=6 if self.superspeedcount>=150:", "1 if self.deathCount >= 200: self.rect.x = 1000 self.gotomenu=True def", "self.walkCount += 1 else: if self.change_x == 0: screen.blit(pygame.image.load('Images/p2left.png'), (self.rect.x,", "self.back=False elif self.change_x >0: self.left=False self.right=True self.front=False self.back=False block_hit_list =", "32, 32)) if self.doubleexpup: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck-32)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck-32, 32,", "- 32 self.rightcheck = self.rect.x + self.width self.upcheck = self.rect.y", "self.upcheck, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck + 16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck", "screen): if self.number == 1: screen.blit(pygame.image.load('Images2/'+str(self.megabombcount)+'megabombs.png'), (2, 0)) if self.alive:", "21: self.walkCount = 0 screen.blit(self.p2walkBack[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount", "self.walls: if i.rect.collidepoint(self.leftcheck, self.rect.y): self.expleft = False if i.rect.collidepoint(self.leftcheck-32, self.rect.y):", "screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck+16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck+16, 32, 32)) if self.expdown: screen.blit(pygame.image.load('Images/explosion.png'),", "(self.rightcheck - 16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck - 16, self.rect.y, 32, 32))", "21: self.walkCount = 0 screen.blit(self.p2walkFront[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount", "= number self.spawntimer=0 self.respawntimer=0 self.exists=True self.animationcount=0 def draw(self, screen): if", "self.rect.y, 32, 32)) if self.doubleexpleft: screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck-32, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck-32, self.rect.y,", "if self.alive: if self.front: if self.shield: if self.change_y == 0:", "self.front: if self.shield: if self.change_y == 0: screen.blit(pygame.image.load('Images/p1frontshield.png'), (self.rect.x, self.rect.y))", "self.walkCount += 1 elif self.back: if self.shield: if self.change_y ==", "self.deathCount += 1 if self.deathCount >= 200: self.rect.x = 1000", "self.left=False self.right=True self.front=False self.back=False block_hit_list = pygame.sprite.spritecollide(self, self.walls, False) for", "0: screen.blit(pygame.image.load('Images/p1back.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >=", "if self.front: if self.shield: if self.change_y == 0: screen.blit(pygame.image.load('Images/p2frontshield.png'), (self.rect.x,", "// 10], (self.rect.x, self.rect.y)) self.deathCount += 1 if self.deathCount >=", "// 3], (self.rect.x, self.rect.y)) self.walkCount += 1 elif self.right: if", "draw(self, screen): if self.number == 1: screen.blit(pygame.image.load('Images2/'+str(self.megabombcount)+'megabombs.png'), (2, 0)) if", "self.animationcount = 0 screen.blit(self.shieldanimation[self.animationcount // 5], (self.rect.x, self.rect.y)) self.animationcount +=", "= block.rect.bottom def draw(self, screen): if self.number == 1: screen.blit(pygame.image.load('Images2/'+str(self.megabombcount)+'megabombs.png'),", "+= 1 else: if self.change_x == 0: screen.blit(pygame.image.load('Images/p2right.png'), (self.rect.x, self.rect.y))", "if i.rect.collidepoint(self.rightcheck+32, self.rect.y): self.doubleexpright = False if i.rect.collidepoint(self.rect.x, self.upcheck): self.expup", "self.change_x = 0 self.change_y = 0 self.walkCount = 0 self.walls", "self.upcheck)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck + 16))", "self.image.get_rect() self.rect.y = y self.rect.x = x self.front = True", "self.spawntimer>50: if self.animationcount + 1 >= 30: self.animationcount = 0", "0 screen.blit(self.p2walkFront[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 elif", "+= 1 elif self.left: if self.shield: if self.change_x == 0:", "1 >= 21: self.walkCount = 0 screen.blit(self.p1walkRightshield[self.walkCount // 3], (self.rect.x,", "self.rect.x = x self.width = width self.height = height self.bomb_count", "pygame.image.load('Images/p2front3.png'), pygame.image.load('Images/p2front2.png'), pygame.image.load('Images/p2front1.png'), pygame.image.load('Images/p2front.png')] p2walkBack = [pygame.image.load('Images/p2back.png'), pygame.image.load('Images/p2back1.png'), pygame.image.load('Images/p2back2.png'), pygame.image.load('Images/p2back3.png'),", "self.respawntimer=0 self.exists=True class bomb(pygame.sprite.Sprite): def __init__(self, x, y, width, height,", "= [pygame.image.load('Images/p2left.png'), pygame.image.load('Images/p2left1.png'), pygame.image.load('Images/p2left2.png'), pygame.image.load('Images/p2left3.png'), pygame.image.load('Images/p2left2.png'), pygame.image.load('Images/p2left1.png'), pygame.image.load('Images/p2left.png')] p2walkRight =", "pygame.image.load('Images/p1front1.png'), pygame.image.load('Images/p1front.png')] p1walkFrontshield = [pygame.image.load('Images/p1frontshield.png'), pygame.image.load('Images/p1front1shield.png'), pygame.image.load('Images/p1front2shield.png'), pygame.image.load('Images/p1front3shield.png'), pygame.image.load('Images/p1front2shield.png'), pygame.image.load('Images/p1front1shield.png'),", "screen.blit(pygame.image.load('Images/p2back.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >= 21:", "self.change_y = 0 self.walkCount = 0 self.walls = None self.alive", "pygame.image.load('Images/shield2.png'), pygame.image.load('Images/shield1.png')] megabombanimation = [pygame.image.load('Images2/megabombicon1.png'), pygame.image.load('Images2/megabombicon2.png'), pygame.image.load('Images2/megabombicon3.png'), pygame.image.load('Images2/megabombicon3.png'), pygame.image.load('Images2/megabombicon2.png'), pygame.image.load('Images2/megabombicon1.png')]", "32)) if self.expup: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck, 32, 32))", "1000 self.gotomenu=True def reset(self,x,y): self.gotomenu = False self.alive = True", "if self.change_x > 0: self.rect.right = block.rect.left else: self.rect.left =", "screen.blit(pygame.image.load('Images/p2backshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >= 21:", "x, y, width, height): super().__init__() self.image = pygame.Surface([width,height], pygame.SRCALPHA, 32)", "= 0 screen.blit(self.megabombanimation[self.animationcount // 5], (self.rect.x, self.rect.y)) self.animationcount += 1", "else: if self.change_y == 0: screen.blit(pygame.image.load('Images/p2back.png'), (self.rect.x, self.rect.y)) else: if", "screen.blit(pygame.image.load('Images2/megabomb1.png'), (self.rect.x, self.rect.y)) elif self.bomb_count < 120: if self.bomb_type==0: for", "1 >= 30: self.animationcount = 0 screen.blit(self.megabombanimation[self.animationcount // 5], (self.rect.x,", "pygame.image.load('Images/p1right1.png'), pygame.image.load('Images/p1right.png')] p1walkRightshield = [pygame.image.load('Images/p1rightshield.png'), pygame.image.load('Images/p1right1shield.png'), pygame.image.load('Images/p1right2shield.png'), pygame.image.load('Images/p1right3shield.png'), pygame.image.load('Images/p1right2shield.png'), pygame.image.load('Images/p1right1shield.png'),", "self.rect = self.image.get_rect() self.rect.y = y self.rect.x = x class", "= True self.deathCount = 0 self.gotomenu=False self.speed=3 self.superspeed=False self.superspeedcount=0 self.shield=False", "if self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p1walkBack[self.walkCount", "= True self.doubleexpright = True self.expup = True self.doubleexpup =", "if i.rect.collidepoint(self.leftcheck-32, self.rect.y): self.doubleexpleft = False if i.rect.collidepoint(self.rightcheck, self.rect.y): self.expright", "self.shieldcount=0 self.megabombs=False self.megabombcount = 0 def changespeed(self, x, y): self.change_x", "+= x self.change_y += y if self.superspeed and self.change_x==0 and", "+ self.height self.expleft = True self.doubleexpleft = True self.expright =", "screen.blit(pygame.image.load('Images/bomb2.png'), (self.rect.x, self.rect.y)) else: screen.blit(pygame.image.load('Images2/megabomb2.png'), (self.rect.x, self.rect.y)) elif self.bomb_count <", "elif self.right: if self.shield: if self.change_x == 0: screen.blit(pygame.image.load('Images/p2rightshield.png'), (self.rect.x,", "False if i.rect.collidepoint(self.rect.x,self.downcheck): self.expdown = False screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rect.x,", "= 0 screen.blit(self.p2walkLeftshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1", "32)) if self.doubleexpleft: screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck-32, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck-32, self.rect.y, 32, 32))", "screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck + 16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck + 16, 32,", "1 >= 21: self.walkCount = 0 screen.blit(self.p1walkRight[self.walkCount // 3], (self.rect.x,", "pygame.image.load('Images/p1front3.png'), pygame.image.load('Images/p1front2.png'), pygame.image.load('Images/p1front1.png'), pygame.image.load('Images/p1front.png')] p1walkFrontshield = [pygame.image.load('Images/p1frontshield.png'), pygame.image.load('Images/p1front1shield.png'), pygame.image.load('Images/p1front2shield.png'), pygame.image.load('Images/p1front3shield.png'),", "self.walkCount = 0 screen.blit(self.p2walkLeft[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount +=", "self.downcheck, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck-16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck-16, 32, 32))", "self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck-16, self.rect.y, 32, 32)) if self.expup: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck))", "self.rect.y)) self.expboxlist.append(pygame.Rect(self.rect.x, self.rect.y, 32, 32)) if self.expleft: screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck, self.rect.y))", "120: if self.bomb_type==0: for i in self.walls: if i.rect.collidepoint(self.leftcheck,self.rect.y): self.expleft", "pygame.image.load('Images/p1back2shield.png'), pygame.image.load('Images/p1back3shield.png'), pygame.image.load('Images/p1back2shield.png'), pygame.image.load('Images/p1back1shield.png'), pygame.image.load('Images/p1backshield.png')] p2walkLeft = [pygame.image.load('Images/p2left.png'), pygame.image.load('Images/p2left1.png'), pygame.image.load('Images/p2left2.png'),", "self.change_x==0 and self.change_y==0: self.speed=6 if self.superspeedcount>=150: self.superspeed = False self.speed=3", "bomb_count self.bomb_type = bomb_type self.walls = None self.leftcheck = self.rect.x", "False and self.deathCount < 200: screen.blit(self.death[self.deathCount // 10], (self.rect.x, self.rect.y))", "= [pygame.image.load('Images/p2back.png'), pygame.image.load('Images/p2back1.png'), pygame.image.load('Images/p2back2.png'), pygame.image.load('Images/p2back3.png'), pygame.image.load('Images/p2back2.png'), pygame.image.load('Images/p2back1.png'), pygame.image.load('Images/p2back.png')] p2walkLeftshield =", "self.deathCount < 200: screen.blit(self.death[self.deathCount // 10], (self.rect.x, self.rect.y)) self.deathCount +=", "0)) if self.alive: if self.front: if self.shield: if self.change_y ==", "pygame.image.load('Images/p2back1.png'), pygame.image.load('Images/p2back.png')] p2walkLeftshield = [pygame.image.load('Images/p2leftshield.png'), pygame.image.load('Images/p2left1shield.png'), pygame.image.load('Images/p2left2shield.png'), pygame.image.load('Images/p2left3shield.png'), pygame.image.load('Images/p2left2shield.png'), pygame.image.load('Images/p2left1shield.png'),", "(2, 0)) if self.alive: if self.front: if self.shield: if self.change_y", "pygame.image.load('Images/p2right3.png'), pygame.image.load('Images/p2right2.png'), pygame.image.load('Images/p2right1.png'), pygame.image.load('Images/p2right.png')] p2walkFront = [pygame.image.load('Images/p2front.png'), pygame.image.load('Images/p2front1.png'), pygame.image.load('Images/p2front2.png'), pygame.image.load('Images/p2front3.png'),", "self.downcheck): self.expdown = False if i.rect.collidepoint(self.rect.x, self.downcheck+32): self.doubleexpdown = False", "self.bomb_count < 90: if self.bomb_type == 0: screen.blit(pygame.image.load('Images/bomb1.png'), (self.rect.x, self.rect.y))", "if self.number==1: if self.exists and self.spawntimer>50: if self.animationcount + 1", "block_hit_list: if self.change_y > 0: self.rect.bottom = block.rect.top else: self.rect.top", "21: self.walkCount = 0 screen.blit(self.p1walkBack[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount", "self.rect.y + self.height self.expleft = True self.doubleexpleft = True self.expright", "200: self.rect.x = 1000 self.gotomenu=True def reset(self,x,y): self.gotomenu = False", "if self.expright: screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'),", "if self.change_y > 0: self.rect.bottom = block.rect.top else: self.rect.top =", "Player(pygame.sprite.Sprite): death = [pygame.image.load('Images/death1.png'), pygame.image.load('Images/death2.png'), pygame.image.load('Images/death3.png'), pygame.image.load('Images/death4.png'), pygame.image.load('Images/death5.png'), pygame.image.load('Images/death6.png'), pygame.image.load('Images/death7.png'),", "0: screen.blit(pygame.image.load('Images/p2left.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >=", "y self.rect.x = x self.front = True self.back = False", "+ 1 >= 21: self.walkCount = 0 screen.blit(self.p2walkLeft[self.walkCount // 3],", "bomb_count, bomb_type): super().__init__() self.image = pygame.Surface([22, 28], pygame.SRCALPHA, 32) image", "screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck+16, self.rect.y))", "[pygame.image.load('Images/p1front.png'), pygame.image.load('Images/p1front1.png'), pygame.image.load('Images/p1front2.png'), pygame.image.load('Images/p1front3.png'), pygame.image.load('Images/p1front2.png'), pygame.image.load('Images/p1front1.png'), pygame.image.load('Images/p1front.png')] p1walkFrontshield = [pygame.image.load('Images/p1frontshield.png'),", "self.spawntimer=0 self.respawntimer=0 self.exists=True self.animationcount=0 def draw(self, screen): if self.number==1: if", "(self.leftcheck-16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck-16, self.rect.y, 32, 32)) if self.expright: screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck,", "self.rect = self.image.get_rect() self.rect.y = y self.rect.x = x self.width", "(self.rect.x, self.rect.y)) self.walkCount += 1 elif self.back: if self.shield: if", "self.right=False self.front=False self.back=False elif self.change_x >0: self.left=False self.right=True self.front=False self.back=False", "self.upcheck + 16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck + 16, 32, 32)) if", "pygame.image.load('Images/death20.png')] p1walkLeft = [pygame.image.load('Images/p1left.png'), pygame.image.load('Images/p1left1.png'), pygame.image.load('Images/p1left2.png'), pygame.image.load('Images/p1left3.png'), pygame.image.load('Images/p1left2.png'), pygame.image.load('Images/p1left1.png'), pygame.image.load('Images/p1left.png')]", "self.change_y==0: self.speed=6 if self.superspeedcount>=150: self.superspeed = False self.speed=3 self.superspeedcount=0 def", "if self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p1walkBackshield[self.walkCount", "self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck - 16, self.rect.y))", "[pygame.image.load('Images/p1leftshield.png'), pygame.image.load('Images/p1left1shield.png'), pygame.image.load('Images/p1left2shield.png'), pygame.image.load('Images/p1left3shield.png'), pygame.image.load('Images/p1left2shield.png'), pygame.image.load('Images/p1left1shield.png'), pygame.image.load('Images/p1leftshield.png')] p1walkRight = [pygame.image.load('Images/p1right.png'),", "block in block_hit_list: if self.change_y > 0: self.rect.bottom = block.rect.top", "pygame.image.load('Images2/megabombicon3.png'), pygame.image.load('Images2/megabombicon3.png'), pygame.image.load('Images2/megabombicon2.png'), pygame.image.load('Images2/megabombicon1.png')] def __init__(self, x, y, number): super().__init__()", "3], (self.rect.x, self.rect.y)) self.walkCount += 1 elif self.number == 2:", "screen.blit(pygame.image.load('Images/p2frontshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >= 21:", "else: if self.change_x == 0: screen.blit(pygame.image.load('Images/p2right.png'), (self.rect.x, self.rect.y)) else: if", "self.rect.y)) elif self.bomb_count < 60: if self.bomb_type == 0: screen.blit(pygame.image.load('Images/bomb2.png'),", "self.number = number self.spawntimer=0 self.respawntimer=0 self.exists=True self.animationcount=0 def draw(self, screen):", "if i.rect.collidepoint(self.rect.x,self.downcheck): self.expdown = False screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rect.x, self.rect.y,", "False if i.rect.collidepoint(self.rect.x, self.upcheck): self.expup = False if i.rect.collidepoint(self.rect.x, self.upcheck-32):", "= 0 screen.blit(self.p2walkRightshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1", "self.doubleexpright: screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck+32, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck+32, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck+16,", "21: self.walkCount = 0 screen.blit(self.p2walkRight[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount", "(self.rect.x, self.upcheck + 16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck + 16, 32, 32))", "1 >= 21: self.walkCount = 0 screen.blit(self.p1walkLeft[self.walkCount // 3], (self.rect.x,", "self.rect.x + self.width self.upcheck = self.rect.y - 32 self.downcheck =", "pygame.image.load('Images/p1right1shield.png'), pygame.image.load('Images/p1rightshield.png')] p1walkFront = [pygame.image.load('Images/p1front.png'), pygame.image.load('Images/p1front1.png'), pygame.image.load('Images/p1front2.png'), pygame.image.load('Images/p1front3.png'), pygame.image.load('Images/p1front2.png'), pygame.image.load('Images/p1front1.png'),", "== 0: screen.blit(pygame.image.load('Images/p2left.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1", "pygame.image.load('Images/p2back1shield.png'), pygame.image.load('Images/p2backshield.png')] # Constructor function def __init__(self, x, y, number):", "if self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p2walkRight[self.walkCount", "if self.change_x == 0: screen.blit(pygame.image.load('Images/p2rightshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount", "0 self.walls = None self.alive = True self.canmove = True", "self.expup = False if i.rect.collidepoint(self.rect.x,self.downcheck): self.expdown = False screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x,", "True self.expright = True self.doubleexpright = True self.expup = True", "= self.rect.y + self.height self.expleft = True self.doubleexpleft = True", "self.walls, False) for block in block_hit_list: if self.change_x > 0:", "if self.expdown: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'),", "self.doubleexpdown = True self.expboxlist = [] def draw(self, screen): if", "self.right=False self.front=False self.back=True elif self.change_y >0: self.left=False self.right=False self.front=True self.back=False", "def reset(self): self.spawntimer=0 self.respawntimer=0 self.exists=True class bomb(pygame.sprite.Sprite): def __init__(self, x,", "and self.spawntimer > 50: if self.animationcount + 1 >= 30:", "self.right: if self.shield: if self.change_x == 0: screen.blit(pygame.image.load('Images/p1rightshield.png'), (self.rect.x, self.rect.y))", "1 >= 21: self.walkCount = 0 screen.blit(self.p2walkFront[self.walkCount // 3], (self.rect.x,", "self.expboxlist.append(pygame.Rect(self.leftcheck, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck + 16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck", "pygame.image.load('Images/p1right1.png'), pygame.image.load('Images/p1right2.png'), pygame.image.load('Images/p1right3.png'), pygame.image.load('Images/p1right2.png'), pygame.image.load('Images/p1right1.png'), pygame.image.load('Images/p1right.png')] p1walkRightshield = [pygame.image.load('Images/p1rightshield.png'), pygame.image.load('Images/p1right1shield.png'),", "// 3], (self.rect.x, self.rect.y)) self.walkCount += 1 elif self.back: if", "self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck - 16, 32, 32)) if self.doubleexpdown: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x,", "screen.blit(self.p2walkBackshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 else: if", "pygame.image.load('Images/p2back2.png'), pygame.image.load('Images/p2back1.png'), pygame.image.load('Images/p2back.png')] p2walkLeftshield = [pygame.image.load('Images/p2leftshield.png'), pygame.image.load('Images/p2left1shield.png'), pygame.image.load('Images/p2left2shield.png'), pygame.image.load('Images/p2left3shield.png'), pygame.image.load('Images/p2left2shield.png'),", "(self.rect.x, self.upcheck-32)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck-32, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck-16)) self.expboxlist.append(pygame.Rect(self.rect.x,", "= 0 screen.blit(self.p1walkLeftshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1", "<0: self.left=False self.right=False self.front=False self.back=True elif self.change_y >0: self.left=False self.right=False", "self.change_x if self.change_x <0: self.left=True self.right=False self.front=False self.back=False elif self.change_x", "self.walkCount = 0 self.walls = None self.alive = True self.canmove", "= pygame.Surface([22, 28], pygame.SRCALPHA, 32) image = self.image.convert_alpha() self.rect =", "def draw(self, screen): if self.number==1: if self.exists and self.spawntimer>50: if", "p1walkFront = [pygame.image.load('Images/p1front.png'), pygame.image.load('Images/p1front1.png'), pygame.image.load('Images/p1front2.png'), pygame.image.load('Images/p1front3.png'), pygame.image.load('Images/p1front2.png'), pygame.image.load('Images/p1front1.png'), pygame.image.load('Images/p1front.png')] p1walkFrontshield", "height): super().__init__() self.image = pygame.Surface([width,height], pygame.SRCALPHA, 32) image = self.image.convert_alpha()", "= pygame.sprite.spritecollide(self, self.walls, False) for block in block_hit_list: if self.change_x", "else: if self.change_y == 0: screen.blit(pygame.image.load('Images/p2front.png'), (self.rect.x, self.rect.y)) else: if", "if self.shield: if self.change_y == 0: screen.blit(pygame.image.load('Images/p1frontshield.png'), (self.rect.x, self.rect.y)) else:", "self.rect.y - 32 self.downcheck = self.rect.y + self.height self.expleft =", "False) for block in block_hit_list: if self.change_x > 0: self.rect.right", "self.change_x == 0: screen.blit(pygame.image.load('Images/p2rightshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount +", "self.shield=False self.shieldcount=0 self.megabombs=False self.megabombcount = 0 def changespeed(self, x, y):", "pygame.image.load('Images/p2back2shield.png'), pygame.image.load('Images/p2back3shield.png'), pygame.image.load('Images/p2back2shield.png'), pygame.image.load('Images/p2back1shield.png'), pygame.image.load('Images/p2backshield.png')] # Constructor function def __init__(self,", "self.deathCount = 0 self.rect.x = x self.rect.y = y self.canmove", "self.rect.y, 32, 32)) if self.expup: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck,", "False self.alive = True self.deathCount = 0 self.rect.x = x", "30: self.animationcount = 0 screen.blit(self.shieldanimation[self.animationcount // 5], (self.rect.x, self.rect.y)) self.animationcount", "[pygame.image.load('Images/p1back.png'), pygame.image.load('Images/p1back1.png'), pygame.image.load('Images/p1back2.png'), pygame.image.load('Images/p1back3.png'), pygame.image.load('Images/p1back2.png'), pygame.image.load('Images/p1back1.png'), pygame.image.load('Images/p1back.png')] p1walkBackshield = [pygame.image.load('Images/p1backshield.png'),", "28], pygame.SRCALPHA, 32) image = self.image.convert_alpha() self.rect = self.image.get_rect() self.rect.y", "32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck + 16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck +", "(self.rect.x, self.rect.y)) self.walkCount += 1 else: if self.change_x == 0:", "self.rect.y)) self.walkCount += 1 elif self.number == 2: screen.blit(pygame.image.load('Images2/'+str(self.megabombcount)+'megabombs.png'), (415,", "32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck+16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck+16, self.rect.y, 32, 32)) if self.expup:", "screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck", "self.canmove = True self.deathCount = 0 self.gotomenu=False self.speed=3 self.superspeed=False self.superspeedcount=0", "self.megabombcount = 0 def changespeed(self, x, y): self.change_x += x", "self.rect.y)) else: screen.blit(pygame.image.load('Images2/megabomb2.png'), (self.rect.x, self.rect.y)) elif self.bomb_count < 90: if", "update(self): if self.canmove: self.rect.x += self.change_x if self.change_x <0: self.left=True", "self.rect.y)) else: screen.blit(pygame.image.load('Images2/megabomb3.png'), (self.rect.x, self.rect.y)) elif self.bomb_count < 60: if", "0 screen.blit(self.p2walkBackshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 else:", "p1walkBackshield = [pygame.image.load('Images/p1backshield.png'), pygame.image.load('Images/p1back1shield.png'), pygame.image.load('Images/p1back2shield.png'), pygame.image.load('Images/p1back3shield.png'), pygame.image.load('Images/p1back2shield.png'), pygame.image.load('Images/p1back1shield.png'), pygame.image.load('Images/p1backshield.png')] p2walkLeft", "self.rect.bottom = block.rect.top else: self.rect.top = block.rect.bottom def draw(self, screen):", "= 0 screen.blit(self.p1walkFrontshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1", "screen): if self.bomb_count < 30: if self.bomb_type==0: screen.blit(pygame.image.load('Images/bomb3.png'), (self.rect.x, self.rect.y))", "self.rect.y): self.expleft = False if i.rect.collidepoint(self.leftcheck-32, self.rect.y): self.doubleexpleft = False", "32, 32)) if self.doubleexpdown: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck+32)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck+32, 32,", "self.rect.y)) self.walkCount += 1 else: if self.change_x == 0: screen.blit(pygame.image.load('Images/p2left.png'),", "self.expboxlist.append(pygame.Rect(self.leftcheck-16, self.rect.y, 32, 32)) if self.expright: screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck,", "self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p2walkFront[self.walkCount //", "- 16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck - 16, 32, 32)) if self.doubleexpdown:", "> 50: if self.animationcount + 1 >= 30: self.animationcount =", "screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck +", "pygame.image.load('Images/p1left1shield.png'), pygame.image.load('Images/p1leftshield.png')] p1walkRight = [pygame.image.load('Images/p1right.png'), pygame.image.load('Images/p1right1.png'), pygame.image.load('Images/p1right2.png'), pygame.image.load('Images/p1right3.png'), pygame.image.load('Images/p1right2.png'), pygame.image.load('Images/p1right1.png'),", "if self.change_x == 0: screen.blit(pygame.image.load('Images/p2left.png'), (self.rect.x, self.rect.y)) else: if self.walkCount", "self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck-16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck-16, self.rect.y, 32, 32))", "== 0: screen.blit(pygame.image.load('Images/p2back.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1", "0: self.rect.bottom = block.rect.top else: self.rect.top = block.rect.bottom def draw(self,", "self.rect.y)) self.walkCount += 1 else: if self.change_y == 0: screen.blit(pygame.image.load('Images/p1back.png'),", "if self.change_x == 0: screen.blit(pygame.image.load('Images/p1right.png'), (self.rect.x, self.rect.y)) else: if self.walkCount", "self.change_y > 0: self.rect.bottom = block.rect.top else: self.rect.top = block.rect.bottom", "32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck+16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck+16, 32, 32)) if self.expdown:", "== 0: screen.blit(pygame.image.load('Images/p2frontshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1", "< 120: if self.bomb_type==0: for i in self.walls: if i.rect.collidepoint(self.leftcheck,self.rect.y):", "self.rect.top = block.rect.bottom def draw(self, screen): if self.number == 1:", "+ 1 >= 30: self.animationcount = 0 screen.blit(self.megabombanimation[self.animationcount // 5],", "= False if i.rect.collidepoint(self.rect.x, self.upcheck-32): self.doubleexpup = False if i.rect.collidepoint(self.rect.x,", "+ 1 >= 21: self.walkCount = 0 screen.blit(self.p1walkFrontshield[self.walkCount // 3],", "1 >= 21: self.walkCount = 0 screen.blit(self.p1walkFrontshield[self.walkCount // 3], (self.rect.x,", "y, number): super().__init__() self.image = pygame.Surface([24, 28]) self.image.fill((0,0,0)) self.rect =", "pygame.image.load('Images/superspeed2.png'), pygame.image.load('Images/superspeed3.png'), pygame.image.load('Images/superspeed3.png'), pygame.image.load('Images/superspeed2.png'), pygame.image.load('Images/superspeed1.png')] shieldanimation = [pygame.image.load('Images/shield1.png'), pygame.image.load('Images/shield2.png'), pygame.image.load('Images/shield3.png'),", "pygame.image.load('Images/shield3.png'), pygame.image.load('Images/shield3.png'), pygame.image.load('Images/shield2.png'), pygame.image.load('Images/shield1.png')] megabombanimation = [pygame.image.load('Images2/megabombicon1.png'), pygame.image.load('Images2/megabombicon2.png'), pygame.image.load('Images2/megabombicon3.png'), pygame.image.load('Images2/megabombicon3.png'),", "(self.rect.x, self.rect.y)) self.animationcount += 1 def reset(self): self.spawntimer=0 self.respawntimer=0 self.exists=True", "pygame.image.load('Images/p1left2shield.png'), pygame.image.load('Images/p1left1shield.png'), pygame.image.load('Images/p1leftshield.png')] p1walkRight = [pygame.image.load('Images/p1right.png'), pygame.image.load('Images/p1right1.png'), pygame.image.load('Images/p1right2.png'), pygame.image.load('Images/p1right3.png'), pygame.image.load('Images/p1right2.png'),", "= height self.bomb_count = bomb_count self.bomb_type = bomb_type self.walls =", "3], (self.rect.x, self.rect.y)) self.walkCount += 1 elif self.back: if self.shield:", "= 0 screen.blit(self.p2walkRight[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1", "pygame.image.load('Images/death2.png'), pygame.image.load('Images/death3.png'), pygame.image.load('Images/death4.png'), pygame.image.load('Images/death5.png'), pygame.image.load('Images/death6.png'), pygame.image.load('Images/death7.png'), pygame.image.load('Images/death8.png'), pygame.image.load('Images/death9.png'), pygame.image.load('Images/death10.png'), pygame.image.load('Images/death11.png'),", "screen.blit(pygame.image.load('Images/p2front.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >= 21:", "self.expboxlist.append(pygame.Rect(self.leftcheck, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck+16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck+16, self.rect.y, 32,", "1 else: if self.exists and self.spawntimer > 50: if self.animationcount", "= False if i.rect.collidepoint(self.rect.x, self.downcheck+32): self.doubleexpdown = False screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x,", "self.rect = self.image.get_rect() self.rect.y = y self.rect.x = x self.number", "self.rect.x += self.change_x if self.change_x <0: self.left=True self.right=False self.front=False self.back=False", "if i.rect.collidepoint(self.rect.x, self.upcheck-32): self.doubleexpup = False if i.rect.collidepoint(self.rect.x, self.downcheck): self.expdown", "self.expboxlist.append(pygame.Rect(self.rightcheck, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck - 16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck", "self.image.get_rect() self.rect.y = y self.rect.x = x class powerup(pygame.sprite.Sprite): superspeedanimation=[pygame.image.load('Images/superspeed1.png'),", "self.back = False self.left = False self.right = False self.number", "if self.change_y == 0: screen.blit(pygame.image.load('Images/p2front.png'), (self.rect.x, self.rect.y)) else: if self.walkCount", "reset(self,x,y): self.gotomenu = False self.alive = True self.deathCount = 0", "self.animationcount + 1 >= 30: self.animationcount = 0 screen.blit(self.megabombanimation[self.animationcount //", "self.width = width self.height = height self.bomb_count = bomb_count self.bomb_type", "32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck + 16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck + 16,", "+= 1 else: if self.change_y == 0: screen.blit(pygame.image.load('Images/p2front.png'), (self.rect.x, self.rect.y))", "if self.shield: if self.change_y == 0: screen.blit(pygame.image.load('Images/p2frontshield.png'), (self.rect.x, self.rect.y)) else:", "self.rect.y)) self.walkCount += 1 if self.alive == False and self.deathCount", "self.alive == False and self.deathCount < 200: screen.blit(self.death[self.deathCount // 10],", "self.expleft = False if i.rect.collidepoint(self.leftcheck-32, self.rect.y): self.doubleexpleft = False if", "self.shield=False self.megabombs=False self.megabombcount=0 class Wall(pygame.sprite.Sprite): def __init__(self, x, y, width,", "if self.shield: if self.change_y == 0: screen.blit(pygame.image.load('Images/p2backshield.png'), (self.rect.x, self.rect.y)) else:", "elif self.bomb_count < 120: if self.bomb_type==0: for i in self.walls:", "self.expdown = False if i.rect.collidepoint(self.rect.x, self.downcheck+32): self.doubleexpdown = False screen.blit(pygame.image.load('Images/explosion.png'),", "if self.change_x == 0: screen.blit(pygame.image.load('Images/p2leftshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount", "+= 1 elif self.right: if self.shield: if self.change_x == 0:", "self.animationcount + 1 >= 30: self.animationcount = 0 screen.blit(self.superspeedanimation[self.animationcount //", "self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck-16, self.rect.y, 32, 32)) if self.expright: screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck, self.rect.y))", "True self.expboxlist = [] def draw(self, screen): if self.bomb_count <", "self.walkCount = 0 screen.blit(self.p2walkRight[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount +=", "if i.rect.collidepoint(self.rect.x, self.downcheck): self.expdown = False if i.rect.collidepoint(self.rect.x, self.downcheck+32): self.doubleexpdown", "self.shield: if self.change_y == 0: screen.blit(pygame.image.load('Images/p1backshield.png'), (self.rect.x, self.rect.y)) else: if", "super().__init__() self.image = pygame.Surface([24, 28]) self.image.fill((0,0,0)) self.rect = self.image.get_rect() self.rect.y", "self.walkCount = 0 screen.blit(self.p1walkFront[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount +=", "+ 1 >= 21: self.walkCount = 0 screen.blit(self.p2walkFront[self.walkCount // 3],", "p1walkRightshield = [pygame.image.load('Images/p1rightshield.png'), pygame.image.load('Images/p1right1shield.png'), pygame.image.load('Images/p1right2shield.png'), pygame.image.load('Images/p1right3shield.png'), pygame.image.load('Images/p1right2shield.png'), pygame.image.load('Images/p1right1shield.png'), pygame.image.load('Images/p1rightshield.png')] p1walkFront", "self.change_y == 0: screen.blit(pygame.image.load('Images/p2frontshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount +", "elif self.change_y >0: self.left=False self.right=False self.front=True self.back=False block_hit_list = pygame.sprite.spritecollide(self,", "self.expup = False if i.rect.collidepoint(self.rect.x, self.upcheck-32): self.doubleexpup = False if", "= block.rect.left else: self.rect.left = block.rect.right self.rect.y += self.change_y if", "pygame.image.load('Images/p2right1.png'), pygame.image.load('Images/p2right.png')] p2walkFront = [pygame.image.load('Images/p2front.png'), pygame.image.load('Images/p2front1.png'), pygame.image.load('Images/p2front2.png'), pygame.image.load('Images/p2front3.png'), pygame.image.load('Images/p2front2.png'), pygame.image.load('Images/p2front1.png'),", "self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p2walkFrontshield[self.walkCount //", "self.megabombs=False self.megabombcount=0 class Wall(pygame.sprite.Sprite): def __init__(self, x, y, width, height):", "32, 32)) if self.doubleexpright: screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck+32, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck+32, self.rect.y, 32,", "self.animationcount + 1 >= 30: self.animationcount = 0 screen.blit(self.shieldanimation[self.animationcount //", "block.rect.left else: self.rect.left = block.rect.right self.rect.y += self.change_y if self.change_y", "0 screen.blit(self.p1walkFrontshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 else:", "self.expright = False if i.rect.collidepoint(self.rect.x,self.upcheck): self.expup = False if i.rect.collidepoint(self.rect.x,self.downcheck):", "5], (self.rect.x, self.rect.y)) self.animationcount += 1 else: if self.exists and", "self.animationcount += 1 elif self.number==2: if self.exists and self.spawntimer >", "self.superspeedcount=0 def update(self): if self.canmove: self.rect.x += self.change_x if self.change_x", "= self.image.get_rect() self.rect.y = y self.rect.x = x class powerup(pygame.sprite.Sprite):", "= True self.expdown = True self.doubleexpdown = True self.expboxlist =", "= False if i.rect.collidepoint(self.rect.x, self.downcheck): self.expdown = False if i.rect.collidepoint(self.rect.x,", "screen.blit(pygame.image.load('Images/p1left.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >= 21:", "+ 1 >= 30: self.animationcount = 0 screen.blit(self.superspeedanimation[self.animationcount // 5],", "pygame.image.load('Images/p2left2.png'), pygame.image.load('Images/p2left3.png'), pygame.image.load('Images/p2left2.png'), pygame.image.load('Images/p2left1.png'), pygame.image.load('Images/p2left.png')] p2walkRight = [pygame.image.load('Images/p2right.png'), pygame.image.load('Images/p2right1.png'), pygame.image.load('Images/p2right2.png'),", "// 3], (self.rect.x, self.rect.y)) self.walkCount += 1 elif self.number ==", "+= 1 else: if self.change_x == 0: screen.blit(pygame.image.load('Images/p1left.png'), (self.rect.x, self.rect.y))", "screen.blit(pygame.image.load('Images/p1rightshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >= 21:", "screen.blit(self.p1walkFront[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 elif self.back:", "0: screen.blit(pygame.image.load('Images/p2back.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >=", "= y self.rect.x = x self.front = True self.back =", "pygame.image.load('Images/p2left.png')] p2walkRight = [pygame.image.load('Images/p2right.png'), pygame.image.load('Images/p2right1.png'), pygame.image.load('Images/p2right2.png'), pygame.image.load('Images/p2right3.png'), pygame.image.load('Images/p2right2.png'), pygame.image.load('Images/p2right1.png'), pygame.image.load('Images/p2right.png')]", "== 1: screen.blit(pygame.image.load('Images2/'+str(self.megabombcount)+'megabombs.png'), (2, 0)) if self.alive: if self.front: if", "self.rect.y += self.change_y if self.change_y <0: self.left=False self.right=False self.front=False self.back=True", "if self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p2walkBack[self.walkCount", "False if i.rect.collidepoint(self.rightcheck+32, self.rect.y): self.doubleexpright = False if i.rect.collidepoint(self.rect.x, self.upcheck):", "self.change_x == 0: screen.blit(pygame.image.load('Images/p1right.png'), (self.rect.x, self.rect.y)) else: if self.walkCount +", "(self.leftcheck-32, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck-32, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck-16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck-16,", "p2walkBack = [pygame.image.load('Images/p2back.png'), pygame.image.load('Images/p2back1.png'), pygame.image.load('Images/p2back2.png'), pygame.image.load('Images/p2back3.png'), pygame.image.load('Images/p2back2.png'), pygame.image.load('Images/p2back1.png'), pygame.image.load('Images/p2back.png')] p2walkLeftshield", "bomb_type self.walls = None self.leftcheck = self.rect.x - 32 self.rightcheck", "self.superspeedcount>=150: self.superspeed = False self.speed=3 self.superspeedcount=0 def update(self): if self.canmove:", "False if i.rect.collidepoint(self.rect.x,self.upcheck): self.expup = False if i.rect.collidepoint(self.rect.x,self.downcheck): self.expdown =", "(self.rect.x, self.rect.y)) elif self.bomb_count < 120: if self.bomb_type==0: for i", "< 60: if self.bomb_type == 0: screen.blit(pygame.image.load('Images/bomb2.png'), (self.rect.x, self.rect.y)) else:", "pygame.image.load('Images2/megabombicon2.png'), pygame.image.load('Images2/megabombicon1.png')] def __init__(self, x, y, number): super().__init__() self.image =", "block.rect.top else: self.rect.top = block.rect.bottom def draw(self, screen): if self.number", "screen.blit(self.p1walkLeftshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 else: if", "self.walkCount += 1 else: if self.change_y == 0: screen.blit(pygame.image.load('Images/p2front.png'), (self.rect.x,", "y, number): super().__init__() self.image = pygame.Surface([22, 28], pygame.SRCALPHA, 32) image", "if self.bomb_type == 0: screen.blit(pygame.image.load('Images/bomb2.png'), (self.rect.x, self.rect.y)) else: screen.blit(pygame.image.load('Images2/megabomb2.png'), (self.rect.x,", "= [pygame.image.load('Images/p1leftshield.png'), pygame.image.load('Images/p1left1shield.png'), pygame.image.load('Images/p1left2shield.png'), pygame.image.load('Images/p1left3shield.png'), pygame.image.load('Images/p1left2shield.png'), pygame.image.load('Images/p1left1shield.png'), pygame.image.load('Images/p1leftshield.png')] p1walkRight =", "+= y if self.superspeed and self.change_x==0 and self.change_y==0: self.speed=6 if", "screen.blit(self.p1walkBack[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 elif self.left:", "0 self.change_y = 0 self.walkCount = 0 self.walls = None", "screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rect.x, self.rect.y, 32, 32)) if self.expleft: screen.blit(pygame.image.load('Images/explosion.png'),", "screen.blit(self.p2walkRightshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 else: if", "block in block_hit_list: if self.change_x > 0: self.rect.right = block.rect.left", ">= 21: self.walkCount = 0 screen.blit(self.p1walkFront[self.walkCount // 3], (self.rect.x, self.rect.y))", "screen.blit(pygame.image.load('Images/p2rightshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >= 21:", "pygame.image.load('Images/p2back.png')] p2walkLeftshield = [pygame.image.load('Images/p2leftshield.png'), pygame.image.load('Images/p2left1shield.png'), pygame.image.load('Images/p2left2shield.png'), pygame.image.load('Images/p2left3shield.png'), pygame.image.load('Images/p2left2shield.png'), pygame.image.load('Images/p2left1shield.png'), pygame.image.load('Images/p2leftshield.png')]", "self.rect.x = 1000 self.gotomenu=True def reset(self,x,y): self.gotomenu = False self.alive", "pygame.image.load('Images/p2back2shield.png'), pygame.image.load('Images/p2back1shield.png'), pygame.image.load('Images/p2backshield.png')] # Constructor function def __init__(self, x, y,", "32 self.downcheck = self.rect.y + self.height self.expleft = True self.doubleexpleft", "self.rect.y)) self.walkCount += 1 else: if self.change_x == 0: screen.blit(pygame.image.load('Images/p1right.png'),", "pygame.image.load('Images/superspeed3.png'), pygame.image.load('Images/superspeed2.png'), pygame.image.load('Images/superspeed1.png')] shieldanimation = [pygame.image.load('Images/shield1.png'), pygame.image.load('Images/shield2.png'), pygame.image.load('Images/shield3.png'), pygame.image.load('Images/shield3.png'), pygame.image.load('Images/shield2.png'),", "self.right=True self.front=False self.back=False block_hit_list = pygame.sprite.spritecollide(self, self.walls, False) for block", "= 1000 self.gotomenu=True def reset(self,x,y): self.gotomenu = False self.alive =", "x self.rect.y = y self.canmove = True self.front = True", "self.upcheck)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck+16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck+16,", "[pygame.image.load('Images/p2left.png'), pygame.image.load('Images/p2left1.png'), pygame.image.load('Images/p2left2.png'), pygame.image.load('Images/p2left3.png'), pygame.image.load('Images/p2left2.png'), pygame.image.load('Images/p2left1.png'), pygame.image.load('Images/p2left.png')] p2walkRight = [pygame.image.load('Images/p2right.png'),", "32)) if self.doubleexpright: screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck+32, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck+32, self.rect.y, 32, 32))", "1 >= 21: self.walkCount = 0 screen.blit(self.p1walkFront[self.walkCount // 3], (self.rect.x,", ">= 21: self.walkCount = 0 screen.blit(self.p2walkRightshield[self.walkCount // 3], (self.rect.x, self.rect.y))", "- 16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck - 16, self.rect.y, 32, 32)) if", "elif self.number==2: if self.exists and self.spawntimer > 50: if self.animationcount", "x class powerup(pygame.sprite.Sprite): superspeedanimation=[pygame.image.load('Images/superspeed1.png'), pygame.image.load('Images/superspeed2.png'), pygame.image.load('Images/superspeed3.png'), pygame.image.load('Images/superspeed3.png'), pygame.image.load('Images/superspeed2.png'), pygame.image.load('Images/superspeed1.png')] shieldanimation", "= True self.canmove = True self.deathCount = 0 self.gotomenu=False self.speed=3", "21: self.walkCount = 0 screen.blit(self.p1walkFront[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount", "10], (self.rect.x, self.rect.y)) self.deathCount += 1 if self.deathCount >= 200:", "if self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p2walkFrontshield[self.walkCount", "def __init__(self, x, y, number): super().__init__() self.image = pygame.Surface([22, 28],", "self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck-16, 32, 32)) if self.expdown: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck)) self.expboxlist.append(pygame.Rect(self.rect.x,", "5], (self.rect.x, self.rect.y)) self.animationcount += 1 elif self.number==2: if self.exists", "(self.rect.x, self.rect.y)) self.animationcount += 1 elif self.number==2: if self.exists and", "self.number==2: if self.exists and self.spawntimer > 50: if self.animationcount +", "if self.shield: if self.change_x == 0: screen.blit(pygame.image.load('Images/p1leftshield.png'), (self.rect.x, self.rect.y)) else:", "if self.exists and self.spawntimer>50: if self.animationcount + 1 >= 30:", "screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck-16, self.rect.y))", "self.back=True elif self.change_y >0: self.left=False self.right=False self.front=True self.back=False block_hit_list =", "pygame.image.load('Images/p2right1.png'), pygame.image.load('Images/p2right2.png'), pygame.image.load('Images/p2right3.png'), pygame.image.load('Images/p2right2.png'), pygame.image.load('Images/p2right1.png'), pygame.image.load('Images/p2right.png')] p2walkFront = [pygame.image.load('Images/p2front.png'), pygame.image.load('Images/p2front1.png'),", "pygame class Player(pygame.sprite.Sprite): death = [pygame.image.load('Images/death1.png'), pygame.image.load('Images/death2.png'), pygame.image.load('Images/death3.png'), pygame.image.load('Images/death4.png'), pygame.image.load('Images/death5.png'),", "self.megabombs=False self.megabombcount = 0 def changespeed(self, x, y): self.change_x +=", "if self.expleft: screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'),", "if i.rect.collidepoint(self.rect.x, self.upcheck): self.expup = False if i.rect.collidepoint(self.rect.x, self.upcheck-32): self.doubleexpup", "[pygame.image.load('Images/p2rightshield.png'), pygame.image.load('Images/p2right1shield.png'), pygame.image.load('Images/p2right2shield.png'), pygame.image.load('Images/p2right3shield.png'), pygame.image.load('Images/p2right2shield.png'), pygame.image.load('Images/p2right1shield.png'), pygame.image.load('Images/p2rightshield.png')] p2walkFrontshield = [pygame.image.load('Images/p2frontshield.png'),", "+= 1 else: if self.change_x == 0: screen.blit(pygame.image.load('Images/p2left.png'), (self.rect.x, self.rect.y))", "(self.rect.x, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rect.x, self.rect.y, 32, 32)) if self.expleft: screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck,", "pygame.image.load('Images/p1right3shield.png'), pygame.image.load('Images/p1right2shield.png'), pygame.image.load('Images/p1right1shield.png'), pygame.image.load('Images/p1rightshield.png')] p1walkFront = [pygame.image.load('Images/p1front.png'), pygame.image.load('Images/p1front1.png'), pygame.image.load('Images/p1front2.png'), pygame.image.load('Images/p1front3.png'),", "= x self.width = width self.height = height self.bomb_count =", "- 32 self.downcheck = self.rect.y + self.height self.expleft = True", "32, 32)) if self.doubleexpleft: screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck-32, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck-32, self.rect.y, 32,", "self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck+16, 32, 32)) if self.expdown: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck)) self.expboxlist.append(pygame.Rect(self.rect.x,", "0 screen.blit(self.shieldanimation[self.animationcount // 5], (self.rect.x, self.rect.y)) self.animationcount += 1 else:", "+ self.width self.upcheck = self.rect.y - 32 self.downcheck = self.rect.y", "0: screen.blit(pygame.image.load('Images/p1left.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >=", "16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck - 16, 32, 32)) if self.doubleexpdown: screen.blit(pygame.image.load('Images/explosion.png'),", "pygame.image.load('Images2/megabombicon3.png'), pygame.image.load('Images2/megabombicon2.png'), pygame.image.load('Images2/megabombicon1.png')] def __init__(self, x, y, number): super().__init__() self.image", "1 else: if self.change_y == 0: screen.blit(pygame.image.load('Images/p2back.png'), (self.rect.x, self.rect.y)) else:", "screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck -", "2: screen.blit(pygame.image.load('Images2/'+str(self.megabombcount)+'megabombs.png'), (415, 0)) if self.alive: if self.front: if self.shield:", "+= 1 def reset(self): self.spawntimer=0 self.respawntimer=0 self.exists=True class bomb(pygame.sprite.Sprite): def", "pygame.image.load('Images/p2right2shield.png'), pygame.image.load('Images/p2right1shield.png'), pygame.image.load('Images/p2rightshield.png')] p2walkFrontshield = [pygame.image.load('Images/p2frontshield.png'), pygame.image.load('Images/p2front1shield.png'), pygame.image.load('Images/p2front2shield.png'), pygame.image.load('Images/p2front3shield.png'), pygame.image.load('Images/p2front2shield.png'),", "self.upcheck-32): self.doubleexpup = False if i.rect.collidepoint(self.rect.x, self.downcheck): self.expdown = False", "if self.shield: if self.change_x == 0: screen.blit(pygame.image.load('Images/p2rightshield.png'), (self.rect.x, self.rect.y)) else:", "= x self.rect.y = y self.canmove = True self.front =", "= True self.doubleexpdown = True self.expboxlist = [] def draw(self,", "pygame.image.load('Images/shield3.png'), pygame.image.load('Images/shield2.png'), pygame.image.load('Images/shield1.png')] megabombanimation = [pygame.image.load('Images2/megabombicon1.png'), pygame.image.load('Images2/megabombicon2.png'), pygame.image.load('Images2/megabombicon3.png'), pygame.image.load('Images2/megabombicon3.png'), pygame.image.load('Images2/megabombicon2.png'),", "screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck+16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck+16, self.rect.y, 32, 32)) if self.expright: screen.blit(pygame.image.load('Images/explosion.png'),", "5], (self.rect.x, self.rect.y)) self.animationcount += 1 def reset(self): self.spawntimer=0 self.respawntimer=0", "pygame.image.load('Images/p1left2.png'), pygame.image.load('Images/p1left3.png'), pygame.image.load('Images/p1left2.png'), pygame.image.load('Images/p1left1.png'), pygame.image.load('Images/p1left.png')] p1walkLeftshield = [pygame.image.load('Images/p1leftshield.png'), pygame.image.load('Images/p1left1shield.png'), pygame.image.load('Images/p1left2shield.png'),", "pygame.image.load('Images/p1left2shield.png'), pygame.image.load('Images/p1left3shield.png'), pygame.image.load('Images/p1left2shield.png'), pygame.image.load('Images/p1left1shield.png'), pygame.image.load('Images/p1leftshield.png')] p1walkRight = [pygame.image.load('Images/p1right.png'), pygame.image.load('Images/p1right1.png'), pygame.image.load('Images/p1right2.png'),", "self.back: if self.shield: if self.change_y == 0: screen.blit(pygame.image.load('Images/p2backshield.png'), (self.rect.x, self.rect.y))", "1 >= 21: self.walkCount = 0 screen.blit(self.p2walkFrontshield[self.walkCount // 3], (self.rect.x,", "if i.rect.collidepoint(self.rightcheck, self.rect.y): self.expright = False if i.rect.collidepoint(self.rightcheck+32, self.rect.y): self.doubleexpright", "self.doubleexpright = False if i.rect.collidepoint(self.rect.x, self.upcheck): self.expup = False if", "screen.blit(pygame.image.load('Images/bomb1.png'), (self.rect.x, self.rect.y)) else: screen.blit(pygame.image.load('Images2/megabomb1.png'), (self.rect.x, self.rect.y)) elif self.bomb_count <", "pygame.image.load('Images/p2backshield.png')] # Constructor function def __init__(self, x, y, number): super().__init__()", "0 screen.blit(self.p2walkBack[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 elif", "0: screen.blit(pygame.image.load('Images/p2frontshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >=", "= 0 screen.blit(self.p1walkRightshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1", "if self.doubleexpup: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck-32)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck-32, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'),", "self.rect.right = block.rect.left else: self.rect.left = block.rect.right self.rect.y += self.change_y", "pygame.image.load('Images/p1frontshield.png')] p1walkBack = [pygame.image.load('Images/p1back.png'), pygame.image.load('Images/p1back1.png'), pygame.image.load('Images/p1back2.png'), pygame.image.load('Images/p1back3.png'), pygame.image.load('Images/p1back2.png'), pygame.image.load('Images/p1back1.png'), pygame.image.load('Images/p1back.png')]", "0 self.gotomenu=False self.speed=3 self.superspeed=False self.superspeedcount=0 self.shield=False self.shieldcount=0 self.megabombs=False self.megabombcount =", "megabombanimation = [pygame.image.load('Images2/megabombicon1.png'), pygame.image.load('Images2/megabombicon2.png'), pygame.image.load('Images2/megabombicon3.png'), pygame.image.load('Images2/megabombicon3.png'), pygame.image.load('Images2/megabombicon2.png'), pygame.image.load('Images2/megabombicon1.png')] def __init__(self,", "self.change_x=0 self.change_y=0 self.superspeed=False self.speed=3 self.shield=False self.megabombs=False self.megabombcount=0 class Wall(pygame.sprite.Sprite): def", "False if i.rect.collidepoint(self.leftcheck-32, self.rect.y): self.doubleexpleft = False if i.rect.collidepoint(self.rightcheck, self.rect.y):", "32)) if self.expdown: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck, 32, 32))", "= [pygame.image.load('Images/p1back.png'), pygame.image.load('Images/p1back1.png'), pygame.image.load('Images/p1back2.png'), pygame.image.load('Images/p1back3.png'), pygame.image.load('Images/p1back2.png'), pygame.image.load('Images/p1back1.png'), pygame.image.load('Images/p1back.png')] p1walkBackshield =", "self.walkCount = 0 screen.blit(self.p2walkFront[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount +=", "21: self.walkCount = 0 screen.blit(self.p1walkRightshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount", "self.downcheck - 16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck - 16, 32, 32)) if", "pygame.image.load('Images/p1rightshield.png')] p1walkFront = [pygame.image.load('Images/p1front.png'), pygame.image.load('Images/p1front1.png'), pygame.image.load('Images/p1front2.png'), pygame.image.load('Images/p1front3.png'), pygame.image.load('Images/p1front2.png'), pygame.image.load('Images/p1front1.png'), pygame.image.load('Images/p1front.png')]", "pygame.image.load('Images/p1right.png')] p1walkRightshield = [pygame.image.load('Images/p1rightshield.png'), pygame.image.load('Images/p1right1shield.png'), pygame.image.load('Images/p1right2shield.png'), pygame.image.load('Images/p1right3shield.png'), pygame.image.load('Images/p1right2shield.png'), pygame.image.load('Images/p1right1shield.png'), pygame.image.load('Images/p1rightshield.png')]", "32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck - 16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck - 16, self.rect.y,", "28]) self.image.fill((0,0,0)) self.rect = self.image.get_rect() self.rect.y = y self.rect.x =", "if self.superspeed and self.change_x==0 and self.change_y==0: self.speed=6 if self.superspeedcount>=150: self.superspeed", "for i in self.walls: if i.rect.collidepoint(self.leftcheck, self.rect.y): self.expleft = False", ">= 200: self.rect.x = 1000 self.gotomenu=True def reset(self,x,y): self.gotomenu =", "pygame.image.load('Images/p2right2.png'), pygame.image.load('Images/p2right3.png'), pygame.image.load('Images/p2right2.png'), pygame.image.load('Images/p2right1.png'), pygame.image.load('Images/p2right.png')] p2walkFront = [pygame.image.load('Images/p2front.png'), pygame.image.load('Images/p2front1.png'), pygame.image.load('Images/p2front2.png'),", "self.bomb_count < 30: if self.bomb_type==0: screen.blit(pygame.image.load('Images/bomb3.png'), (self.rect.x, self.rect.y)) else: screen.blit(pygame.image.load('Images2/megabomb3.png'),", "screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck-32)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck-32, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck-16))", "self.shield: if self.change_x == 0: screen.blit(pygame.image.load('Images/p2leftshield.png'), (self.rect.x, self.rect.y)) else: if", "self.downcheck - 16, 32, 32)) if self.doubleexpdown: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck+32))", "32, 32)) if self.expdown: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck, 32,", "__init__(self, x, y, number): super().__init__() self.image = pygame.Surface([22, 28], pygame.SRCALPHA,", "self.change_y == 0: screen.blit(pygame.image.load('Images/p1front.png'), (self.rect.x, self.rect.y)) else: if self.walkCount +", "self.upcheck+16, 32, 32)) if self.expdown: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck,", "True self.expdown = True self.doubleexpdown = True self.expboxlist = []", "if self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p2walkRightshield[self.walkCount", "pygame.image.load('Images/death12.png'), pygame.image.load('Images/death13.png'), pygame.image.load('Images/death14.png'), pygame.image.load('Images/death15.png'), pygame.image.load('Images/death16.png'), pygame.image.load('Images/death17.png'), pygame.image.load('Images/death18.png'),pygame.image.load('Images/death19.png'), pygame.image.load('Images/death20.png')] p1walkLeft =", "if self.change_x == 0: screen.blit(pygame.image.load('Images/p1left.png'), (self.rect.x, self.rect.y)) else: if self.walkCount", "screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck-16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck-16, 32, 32)) elif self.bomb_type==1: for", "p2walkLeft = [pygame.image.load('Images/p2left.png'), pygame.image.load('Images/p2left1.png'), pygame.image.load('Images/p2left2.png'), pygame.image.load('Images/p2left3.png'), pygame.image.load('Images/p2left2.png'), pygame.image.load('Images/p2left1.png'), pygame.image.load('Images/p2left.png')] p2walkRight", "0: screen.blit(pygame.image.load('Images/p2right.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >=", "self.change_x == 0: screen.blit(pygame.image.load('Images/p1left.png'), (self.rect.x, self.rect.y)) else: if self.walkCount +", "if i.rect.collidepoint(self.rect.x, self.downcheck+32): self.doubleexpdown = False screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rect.x,", "self.superspeed=False self.superspeedcount=0 self.shield=False self.shieldcount=0 self.megabombs=False self.megabombcount = 0 def changespeed(self,", "i.rect.collidepoint(self.leftcheck-32, self.rect.y): self.doubleexpleft = False if i.rect.collidepoint(self.rightcheck, self.rect.y): self.expright =", "= y self.canmove = True self.front = True self.change_x=0 self.change_y=0", "screen.blit(pygame.image.load('Images/p2left.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >= 21:", "16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck + 16, 32, 32)) if self.doubleexpup: screen.blit(pygame.image.load('Images/explosion.png'),", "self.rect.y, 32, 32)) if self.expleft: screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck, self.rect.y,", "0: screen.blit(pygame.image.load('Images/p2leftshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >=", "self.expboxlist.append(pygame.Rect(self.rightcheck+32, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck+16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck+16, self.rect.y, 32,", "i.rect.collidepoint(self.leftcheck, self.rect.y): self.expleft = False if i.rect.collidepoint(self.leftcheck-32, self.rect.y): self.doubleexpleft =", "if i.rect.collidepoint(self.leftcheck, self.rect.y): self.expleft = False if i.rect.collidepoint(self.leftcheck-32, self.rect.y): self.doubleexpleft", "self.upcheck-32, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck-16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck-16, 32, 32))", "1 >= 21: self.walkCount = 0 screen.blit(self.p2walkBack[self.walkCount // 3], (self.rect.x,", "elif self.bomb_type==1: for i in self.walls: if i.rect.collidepoint(self.leftcheck, self.rect.y): self.expleft", "self.canmove = True self.front = True self.change_x=0 self.change_y=0 self.superspeed=False self.speed=3", "self.rect.y)) else: screen.blit(pygame.image.load('Images2/megabomb1.png'), (self.rect.x, self.rect.y)) elif self.bomb_count < 120: if", "[pygame.image.load('Images/p2frontshield.png'), pygame.image.load('Images/p2front1shield.png'), pygame.image.load('Images/p2front2shield.png'), pygame.image.load('Images/p2front3shield.png'), pygame.image.load('Images/p2front2shield.png'), pygame.image.load('Images/p2front1shield.png'), pygame.image.load('Images/p2frontshield.png')] p2walkBackshield = [pygame.image.load('Images/p2backshield.png'),", "self.walkCount += 1 else: if self.change_y == 0: screen.blit(pygame.image.load('Images/p1back.png'), (self.rect.x,", "import pygame class Player(pygame.sprite.Sprite): death = [pygame.image.load('Images/death1.png'), pygame.image.load('Images/death2.png'), pygame.image.load('Images/death3.png'), pygame.image.load('Images/death4.png'),", "True self.deathCount = 0 self.gotomenu=False self.speed=3 self.superspeed=False self.superspeedcount=0 self.shield=False self.shieldcount=0", "self.downcheck)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck-16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck-16,", "super().__init__() self.image = pygame.Surface([22, 28], pygame.SRCALPHA, 32) image = self.image.convert_alpha()", "32 self.rightcheck = self.rect.x + self.width self.upcheck = self.rect.y -", "= pygame.sprite.spritecollide(self, self.walls, False) for block in block_hit_list: if self.change_y", "self.rect.y)) self.walkCount += 1 else: if self.change_y == 0: screen.blit(pygame.image.load('Images/p2back.png'),", "1 >= 21: self.walkCount = 0 screen.blit(self.p2walkRightshield[self.walkCount // 3], (self.rect.x,", "def __init__(self, x, y, number): super().__init__() self.image = pygame.Surface([24, 28])", "16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck + 16, self.rect.y, 32, 32)) if self.doubleexpleft:", "self.upcheck-16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck-16, 32, 32)) if self.expdown: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck))", "== 0: screen.blit(pygame.image.load('Images/bomb2.png'), (self.rect.x, self.rect.y)) else: screen.blit(pygame.image.load('Images2/megabomb2.png'), (self.rect.x, self.rect.y)) elif", "def update(self): if self.canmove: self.rect.x += self.change_x if self.change_x <0:", "32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck-16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck-16, self.rect.y, 32, 32)) if", "screen.blit(self.p2walkLeft[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 elif self.right:", "(self.rect.x, self.rect.y)) self.walkCount += 1 elif self.right: if self.shield: if", "< 90: if self.bomb_type == 0: screen.blit(pygame.image.load('Images/bomb1.png'), (self.rect.x, self.rect.y)) else:", "if self.change_y <0: self.left=False self.right=False self.front=False self.back=True elif self.change_y >0:", "True self.change_x=0 self.change_y=0 self.superspeed=False self.speed=3 self.shield=False self.megabombs=False self.megabombcount=0 class Wall(pygame.sprite.Sprite):", ">= 21: self.walkCount = 0 screen.blit(self.p2walkFrontshield[self.walkCount // 3], (self.rect.x, self.rect.y))", "[pygame.image.load('Images/p1backshield.png'), pygame.image.load('Images/p1back1shield.png'), pygame.image.load('Images/p1back2shield.png'), pygame.image.load('Images/p1back3shield.png'), pygame.image.load('Images/p1back2shield.png'), pygame.image.load('Images/p1back1shield.png'), pygame.image.load('Images/p1backshield.png')] p2walkLeft = [pygame.image.load('Images/p2left.png'),", "i.rect.collidepoint(self.rect.x, self.upcheck-32): self.doubleexpup = False if i.rect.collidepoint(self.rect.x, self.downcheck): self.expdown =", "pygame.image.load('Images/p1leftshield.png')] p1walkRight = [pygame.image.load('Images/p1right.png'), pygame.image.load('Images/p1right1.png'), pygame.image.load('Images/p1right2.png'), pygame.image.load('Images/p1right3.png'), pygame.image.load('Images/p1right2.png'), pygame.image.load('Images/p1right1.png'), pygame.image.load('Images/p1right.png')]", "self.number == 1: screen.blit(pygame.image.load('Images2/'+str(self.megabombcount)+'megabombs.png'), (2, 0)) if self.alive: if self.front:", "200: screen.blit(self.death[self.deathCount // 10], (self.rect.x, self.rect.y)) self.deathCount += 1 if", "= y self.rect.x = x class powerup(pygame.sprite.Sprite): superspeedanimation=[pygame.image.load('Images/superspeed1.png'), pygame.image.load('Images/superspeed2.png'), pygame.image.load('Images/superspeed3.png'),", "in self.walls: if i.rect.collidepoint(self.leftcheck,self.rect.y): self.expleft = False if i.rect.collidepoint(self.rightcheck,self.rect.y): self.expright", "if self.shield: if self.change_x == 0: screen.blit(pygame.image.load('Images/p2leftshield.png'), (self.rect.x, self.rect.y)) else:", "__init__(self, x, y, width, height, bomb_count, bomb_type): super().__init__() self.image =", "if self.change_y == 0: screen.blit(pygame.image.load('Images/p1frontshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount", "self.image.fill((0,0,0)) self.rect = self.image.get_rect() self.rect.y = y self.rect.x = x", "self.expdown: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x,", "self.expboxlist.append(pygame.Rect(self.rightcheck - 16, self.rect.y, 32, 32)) if self.doubleexpright: screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck+32,", "self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck-16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck-16, 32,", "1 >= 21: self.walkCount = 0 screen.blit(self.p1walkBackshield[self.walkCount // 3], (self.rect.x,", "y, width, height): super().__init__() self.image = pygame.Surface([width,height], pygame.SRCALPHA, 32) image", "screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck-32, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck-32, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck-16, self.rect.y))", "32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck-16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck-16, self.rect.y, 32, 32)) if self.expup:", "if self.expup: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'),", "pygame.image.load('Images/death4.png'), pygame.image.load('Images/death5.png'), pygame.image.load('Images/death6.png'), pygame.image.load('Images/death7.png'), pygame.image.load('Images/death8.png'), pygame.image.load('Images/death9.png'), pygame.image.load('Images/death10.png'), pygame.image.load('Images/death11.png'), pygame.image.load('Images/death12.png'), pygame.image.load('Images/death13.png'),", "False self.right = False self.number = number self.change_x = 0", "1 elif self.number==2: if self.exists and self.spawntimer > 50: if", "if self.change_x == 0: screen.blit(pygame.image.load('Images/p1rightshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount", "pygame.image.load('Images/p2right2shield.png'), pygame.image.load('Images/p2right3shield.png'), pygame.image.load('Images/p2right2shield.png'), pygame.image.load('Images/p2right1shield.png'), pygame.image.load('Images/p2rightshield.png')] p2walkFrontshield = [pygame.image.load('Images/p2frontshield.png'), pygame.image.load('Images/p2front1shield.png'), pygame.image.load('Images/p2front2shield.png'),", "(self.rect.x, self.rect.y)) self.walkCount += 1 elif self.number == 2: screen.blit(pygame.image.load('Images2/'+str(self.megabombcount)+'megabombs.png'),", "p1walkLeft = [pygame.image.load('Images/p1left.png'), pygame.image.load('Images/p1left1.png'), pygame.image.load('Images/p1left2.png'), pygame.image.load('Images/p1left3.png'), pygame.image.load('Images/p1left2.png'), pygame.image.load('Images/p1left1.png'), pygame.image.load('Images/p1left.png')] p1walkLeftshield", "self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p2walkBack[self.walkCount //", ">= 21: self.walkCount = 0 screen.blit(self.p2walkLeftshield[self.walkCount // 3], (self.rect.x, self.rect.y))", "self.downcheck)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck - 16))", "self.deathCount = 0 self.gotomenu=False self.speed=3 self.superspeed=False self.superspeedcount=0 self.shield=False self.shieldcount=0 self.megabombs=False", "0: self.rect.right = block.rect.left else: self.rect.left = block.rect.right self.rect.y +=", "+= 1 else: if self.exists and self.spawntimer > 50: if", "self.rect.left = block.rect.right self.rect.y += self.change_y if self.change_y <0: self.left=False", "0: screen.blit(pygame.image.load('Images/p1backshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >=", "= True self.expboxlist = [] def draw(self, screen): if self.bomb_count", "(self.rect.x, self.downcheck)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck-16)) self.expboxlist.append(pygame.Rect(self.rect.x,", "self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck - 16, self.rect.y, 32, 32)) if self.doubleexpright: screen.blit(pygame.image.load('Images/explosion.png'),", "screen.blit(self.p1walkBackshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 else: if", "pygame.image.load('Images/p1right2.png'), pygame.image.load('Images/p1right3.png'), pygame.image.load('Images/p1right2.png'), pygame.image.load('Images/p1right1.png'), pygame.image.load('Images/p1right.png')] p1walkRightshield = [pygame.image.load('Images/p1rightshield.png'), pygame.image.load('Images/p1right1shield.png'), pygame.image.load('Images/p1right2shield.png'),", "21: self.walkCount = 0 screen.blit(self.p1walkLeftshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount", "screen.blit(self.p2walkRight[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 if self.alive", "screen.blit(self.p2walkFrontshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 else: if", "32, 32)) if self.expup: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck, 32,", "pygame.image.load('Images/p1back3.png'), pygame.image.load('Images/p1back2.png'), pygame.image.load('Images/p1back1.png'), pygame.image.load('Images/p1back.png')] p1walkBackshield = [pygame.image.load('Images/p1backshield.png'), pygame.image.load('Images/p1back1shield.png'), pygame.image.load('Images/p1back2shield.png'), pygame.image.load('Images/p1back3shield.png'),", "i.rect.collidepoint(self.rightcheck+32, self.rect.y): self.doubleexpright = False if i.rect.collidepoint(self.rect.x, self.upcheck): self.expup =", "pygame.image.load('Images/p1left2.png'), pygame.image.load('Images/p1left1.png'), pygame.image.load('Images/p1left.png')] p1walkLeftshield = [pygame.image.load('Images/p1leftshield.png'), pygame.image.load('Images/p1left1shield.png'), pygame.image.load('Images/p1left2shield.png'), pygame.image.load('Images/p1left3shield.png'), pygame.image.load('Images/p1left2shield.png'),", "self.superspeed = False self.speed=3 self.superspeedcount=0 def update(self): if self.canmove: self.rect.x", "0: screen.blit(pygame.image.load('Images/p2backshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >=", "if self.change_y == 0: screen.blit(pygame.image.load('Images/p1backshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount", "self.rect.x = x self.front = True self.back = False self.left", "if self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p1walkRight[self.walkCount", "self.deathCount >= 200: self.rect.x = 1000 self.gotomenu=True def reset(self,x,y): self.gotomenu", "self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck-32, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck-16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck-16, 32,", "pygame.image.load('Images/p2right2.png'), pygame.image.load('Images/p2right1.png'), pygame.image.load('Images/p2right.png')] p2walkFront = [pygame.image.load('Images/p2front.png'), pygame.image.load('Images/p2front1.png'), pygame.image.load('Images/p2front2.png'), pygame.image.load('Images/p2front3.png'), pygame.image.load('Images/p2front2.png'),", "self.number==1: if self.exists and self.spawntimer>50: if self.animationcount + 1 >=", "self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck+32, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck+16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck+16, 32,", ">0: self.left=False self.right=False self.front=True self.back=False block_hit_list = pygame.sprite.spritecollide(self, self.walls, False)", "32)) if self.expright: screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck, self.rect.y, 32, 32))", "= [] def draw(self, screen): if self.bomb_count < 30: if", "False self.left = False self.right = False self.number = number", "self.back=False block_hit_list = pygame.sprite.spritecollide(self, self.walls, False) for block in block_hit_list:", ">= 21: self.walkCount = 0 screen.blit(self.p1walkFrontshield[self.walkCount // 3], (self.rect.x, self.rect.y))", "pygame.image.load('Images/p2right3shield.png'), pygame.image.load('Images/p2right2shield.png'), pygame.image.load('Images/p2right1shield.png'), pygame.image.load('Images/p2rightshield.png')] p2walkFrontshield = [pygame.image.load('Images/p2frontshield.png'), pygame.image.load('Images/p2front1shield.png'), pygame.image.load('Images/p2front2shield.png'), pygame.image.load('Images/p2front3shield.png'),", "if self.doubleexpright: screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck+32, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck+32, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'),", "self.height self.expleft = True self.doubleexpleft = True self.expright = True", "self.shield: if self.change_x == 0: screen.blit(pygame.image.load('Images/p1rightshield.png'), (self.rect.x, self.rect.y)) else: if", "pygame.image.load('Images/p2front1shield.png'), pygame.image.load('Images/p2frontshield.png')] p2walkBackshield = [pygame.image.load('Images/p2backshield.png'), pygame.image.load('Images/p2back1shield.png'), pygame.image.load('Images/p2back2shield.png'), pygame.image.load('Images/p2back3shield.png'), pygame.image.load('Images/p2back2shield.png'), pygame.image.load('Images/p2back1shield.png'),", "pygame.image.load('Images/p2left2shield.png'), pygame.image.load('Images/p2left3shield.png'), pygame.image.load('Images/p2left2shield.png'), pygame.image.load('Images/p2left1shield.png'), pygame.image.load('Images/p2leftshield.png')] p2walkRightshield = [pygame.image.load('Images/p2rightshield.png'), pygame.image.load('Images/p2right1shield.png'), pygame.image.load('Images/p2right2shield.png'),", "if self.change_x == 0: screen.blit(pygame.image.load('Images/p2right.png'), (self.rect.x, self.rect.y)) else: if self.walkCount", "[pygame.image.load('Images/p1left.png'), pygame.image.load('Images/p1left1.png'), pygame.image.load('Images/p1left2.png'), pygame.image.load('Images/p1left3.png'), pygame.image.load('Images/p1left2.png'), pygame.image.load('Images/p1left1.png'), pygame.image.load('Images/p1left.png')] p1walkLeftshield = [pygame.image.load('Images/p1leftshield.png'),", "32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck+16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck+16, self.rect.y, 32, 32)) if self.expright:", "if self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p1walkFront[self.walkCount", "0 screen.blit(self.p2walkFrontshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 else:", "pygame.image.load('Images/death9.png'), pygame.image.load('Images/death10.png'), pygame.image.load('Images/death11.png'), pygame.image.load('Images/death12.png'), pygame.image.load('Images/death13.png'), pygame.image.load('Images/death14.png'), pygame.image.load('Images/death15.png'), pygame.image.load('Images/death16.png'), pygame.image.load('Images/death17.png'), pygame.image.load('Images/death18.png'),pygame.image.load('Images/death19.png'),", "self.rect.y)) elif self.bomb_count < 90: if self.bomb_type == 0: screen.blit(pygame.image.load('Images/bomb1.png'),", "// 5], (self.rect.x, self.rect.y)) self.animationcount += 1 elif self.number==2: if", "Wall(pygame.sprite.Sprite): def __init__(self, x, y, width, height): super().__init__() self.image =", "pygame.image.load('Images/death14.png'), pygame.image.load('Images/death15.png'), pygame.image.load('Images/death16.png'), pygame.image.load('Images/death17.png'), pygame.image.load('Images/death18.png'),pygame.image.load('Images/death19.png'), pygame.image.load('Images/death20.png')] p1walkLeft = [pygame.image.load('Images/p1left.png'), pygame.image.load('Images/p1left1.png'),", "= y self.rect.x = x self.width = width self.height =", "= 0 self.gotomenu=False self.speed=3 self.superspeed=False self.superspeedcount=0 self.shield=False self.shieldcount=0 self.megabombs=False self.megabombcount", "if self.doubleexpleft: screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck-32, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck-32, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'),", "(self.rect.x, self.rect.y)) self.animationcount += 1 else: if self.exists and self.spawntimer", "self.downcheck = self.rect.y + self.height self.expleft = True self.doubleexpleft =", "pygame.image.load('Images/p2back1.png'), pygame.image.load('Images/p2back2.png'), pygame.image.load('Images/p2back3.png'), pygame.image.load('Images/p2back2.png'), pygame.image.load('Images/p2back1.png'), pygame.image.load('Images/p2back.png')] p2walkLeftshield = [pygame.image.load('Images/p2leftshield.png'), pygame.image.load('Images/p2left1shield.png'),", "pygame.image.load('Images/death5.png'), pygame.image.load('Images/death6.png'), pygame.image.load('Images/death7.png'), pygame.image.load('Images/death8.png'), pygame.image.load('Images/death9.png'), pygame.image.load('Images/death10.png'), pygame.image.load('Images/death11.png'), pygame.image.load('Images/death12.png'), pygame.image.load('Images/death13.png'), pygame.image.load('Images/death14.png'),", "self.rect.y)) self.walkCount += 1 else: if self.change_x == 0: screen.blit(pygame.image.load('Images/p1left.png'),", "False) for block in block_hit_list: if self.change_y > 0: self.rect.bottom", "self.change_x == 0: screen.blit(pygame.image.load('Images/p1rightshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount +", "pygame.image.load('Images/death10.png'), pygame.image.load('Images/death11.png'), pygame.image.load('Images/death12.png'), pygame.image.load('Images/death13.png'), pygame.image.load('Images/death14.png'), pygame.image.load('Images/death15.png'), pygame.image.load('Images/death16.png'), pygame.image.load('Images/death17.png'), pygame.image.load('Images/death18.png'),pygame.image.load('Images/death19.png'), pygame.image.load('Images/death20.png')]", "= bomb_type self.walls = None self.leftcheck = self.rect.x - 32", "pygame.image.load('Images/p2right1shield.png'), pygame.image.load('Images/p2rightshield.png')] p2walkFrontshield = [pygame.image.load('Images/p2frontshield.png'), pygame.image.load('Images/p2front1shield.png'), pygame.image.load('Images/p2front2shield.png'), pygame.image.load('Images/p2front3shield.png'), pygame.image.load('Images/p2front2shield.png'), pygame.image.load('Images/p2front1shield.png'),", ">= 21: self.walkCount = 0 screen.blit(self.p1walkRightshield[self.walkCount // 3], (self.rect.x, self.rect.y))", "[pygame.image.load('Images/death1.png'), pygame.image.load('Images/death2.png'), pygame.image.load('Images/death3.png'), pygame.image.load('Images/death4.png'), pygame.image.load('Images/death5.png'), pygame.image.load('Images/death6.png'), pygame.image.load('Images/death7.png'), pygame.image.load('Images/death8.png'), pygame.image.load('Images/death9.png'), pygame.image.load('Images/death10.png'),", "pygame.image.load('Images/p2left2shield.png'), pygame.image.load('Images/p2left1shield.png'), pygame.image.load('Images/p2leftshield.png')] p2walkRightshield = [pygame.image.load('Images/p2rightshield.png'), pygame.image.load('Images/p2right1shield.png'), pygame.image.load('Images/p2right2shield.png'), pygame.image.load('Images/p2right3shield.png'), pygame.image.load('Images/p2right2shield.png'),", "90: if self.bomb_type == 0: screen.blit(pygame.image.load('Images/bomb1.png'), (self.rect.x, self.rect.y)) else: screen.blit(pygame.image.load('Images2/megabomb1.png'),", "32)) if self.expleft: screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck, self.rect.y, 32, 32))", "pygame.image.load('Images/death15.png'), pygame.image.load('Images/death16.png'), pygame.image.load('Images/death17.png'), pygame.image.load('Images/death18.png'),pygame.image.load('Images/death19.png'), pygame.image.load('Images/death20.png')] p1walkLeft = [pygame.image.load('Images/p1left.png'), pygame.image.load('Images/p1left1.png'), pygame.image.load('Images/p1left2.png'),", "def __init__(self, x, y, width, height): super().__init__() self.image = pygame.Surface([width,height],", "= False self.number = number self.change_x = 0 self.change_y =", "3], (self.rect.x, self.rect.y)) self.walkCount += 1 elif self.right: if self.shield:", "self.change_x == 0: screen.blit(pygame.image.load('Images/p2leftshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount +", "self.shield: if self.change_x == 0: screen.blit(pygame.image.load('Images/p2rightshield.png'), (self.rect.x, self.rect.y)) else: if", "= False if i.rect.collidepoint(self.rect.x,self.upcheck): self.expup = False if i.rect.collidepoint(self.rect.x,self.downcheck): self.expdown", "if self.change_x == 0: screen.blit(pygame.image.load('Images/p1leftshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount", "32)) if self.doubleexpdown: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck+32)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck+32, 32, 32))", "self.rect.x = x self.rect.y = y self.canmove = True self.front", "width, height, bomb_count, bomb_type): super().__init__() self.image = pygame.Surface([22, 28], pygame.SRCALPHA,", "= bomb_count self.bomb_type = bomb_type self.walls = None self.leftcheck =", "[pygame.image.load('Images/p1frontshield.png'), pygame.image.load('Images/p1front1shield.png'), pygame.image.load('Images/p1front2shield.png'), pygame.image.load('Images/p1front3shield.png'), pygame.image.load('Images/p1front2shield.png'), pygame.image.load('Images/p1front1shield.png'), pygame.image.load('Images/p1frontshield.png')] p1walkBack = [pygame.image.load('Images/p1back.png'),", "else: if self.change_y == 0: screen.blit(pygame.image.load('Images/p1back.png'), (self.rect.x, self.rect.y)) else: if", "= 0 screen.blit(self.p1walkBack[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1", "True self.expup = True self.doubleexpup = True self.expdown = True", "pygame.image.load('Images/superspeed3.png'), pygame.image.load('Images/superspeed3.png'), pygame.image.load('Images/superspeed2.png'), pygame.image.load('Images/superspeed1.png')] shieldanimation = [pygame.image.load('Images/shield1.png'), pygame.image.load('Images/shield2.png'), pygame.image.load('Images/shield3.png'), pygame.image.load('Images/shield3.png'),", "32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck+16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck+16, self.rect.y, 32, 32)) if", "self.expup = True self.doubleexpup = True self.expdown = True self.doubleexpdown", "__init__(self, x, y, number): super().__init__() self.image = pygame.Surface([24, 28]) self.image.fill((0,0,0))", "self.rect.y)) self.deathCount += 1 if self.deathCount >= 200: self.rect.x =", "= [pygame.image.load('Images/p2right.png'), pygame.image.load('Images/p2right1.png'), pygame.image.load('Images/p2right2.png'), pygame.image.load('Images/p2right3.png'), pygame.image.load('Images/p2right2.png'), pygame.image.load('Images/p2right1.png'), pygame.image.load('Images/p2right.png')] p2walkFront =", "self.doubleexpdown = False screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rect.x, self.rect.y, 32, 32))", "pygame.image.load('Images/death8.png'), pygame.image.load('Images/death9.png'), pygame.image.load('Images/death10.png'), pygame.image.load('Images/death11.png'), pygame.image.load('Images/death12.png'), pygame.image.load('Images/death13.png'), pygame.image.load('Images/death14.png'), pygame.image.load('Images/death15.png'), pygame.image.load('Images/death16.png'), pygame.image.load('Images/death17.png'),", "0: screen.blit(pygame.image.load('Images/p1front.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >=", "= 0 screen.blit(self.p1walkFront[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1", "= [pygame.image.load('Images/p1left.png'), pygame.image.load('Images/p1left1.png'), pygame.image.load('Images/p1left2.png'), pygame.image.load('Images/p1left3.png'), pygame.image.load('Images/p1left2.png'), pygame.image.load('Images/p1left1.png'), pygame.image.load('Images/p1left.png')] p1walkLeftshield =", "> 0: self.rect.right = block.rect.left else: self.rect.left = block.rect.right self.rect.y", "1 def reset(self): self.spawntimer=0 self.respawntimer=0 self.exists=True class bomb(pygame.sprite.Sprite): def __init__(self,", "True self.back = False self.left = False self.right = False", "32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck + 16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck + 16,", "if self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p1walkLeftshield[self.walkCount", "pygame.image.load('Images/p2front.png')] p2walkBack = [pygame.image.load('Images/p2back.png'), pygame.image.load('Images/p2back1.png'), pygame.image.load('Images/p2back2.png'), pygame.image.load('Images/p2back3.png'), pygame.image.load('Images/p2back2.png'), pygame.image.load('Images/p2back1.png'), pygame.image.load('Images/p2back.png')]", "self.change_y if self.change_y <0: self.left=False self.right=False self.front=False self.back=True elif self.change_y", "screen.blit(self.p2walkBack[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 elif self.left:", "screen.blit(pygame.image.load('Images2/'+str(self.megabombcount)+'megabombs.png'), (2, 0)) if self.alive: if self.front: if self.shield: if", "== 0: screen.blit(pygame.image.load('Images/p1back.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1", "= True self.doubleexpup = True self.expdown = True self.doubleexpdown =", "pygame.SRCALPHA, 32) image = self.image.convert_alpha() self.rect = self.image.get_rect() self.rect.y =", "if self.animationcount + 1 >= 30: self.animationcount = 0 screen.blit(self.shieldanimation[self.animationcount", "elif self.left: if self.shield: if self.change_x == 0: screen.blit(pygame.image.load('Images/p1leftshield.png'), (self.rect.x,", "self.bomb_type==0: for i in self.walls: if i.rect.collidepoint(self.leftcheck,self.rect.y): self.expleft = False", "i.rect.collidepoint(self.rect.x, self.downcheck+32): self.doubleexpdown = False screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rect.x, self.rect.y,", "= 0 self.change_y = 0 self.walkCount = 0 self.walls =", "32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck-16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck-16, 32, 32)) elif", "+= 1 if self.alive == False and self.deathCount < 200:", ">0: self.left=False self.right=True self.front=False self.back=False block_hit_list = pygame.sprite.spritecollide(self, self.walls, False)", "x self.change_y += y if self.superspeed and self.change_x==0 and self.change_y==0:", "+ 1 >= 21: self.walkCount = 0 screen.blit(self.p1walkBack[self.walkCount // 3],", "self.upcheck): self.expup = False if i.rect.collidepoint(self.rect.x, self.upcheck-32): self.doubleexpup = False", "32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck-16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck-16, 32, 32)) if", "self.superspeedcount=0 self.shield=False self.shieldcount=0 self.megabombs=False self.megabombcount = 0 def changespeed(self, x,", "1 if self.alive == False and self.deathCount < 200: screen.blit(self.death[self.deathCount", "self.doubleexpdown: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck+32)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck+32, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x,", ">= 21: self.walkCount = 0 screen.blit(self.p2walkBack[self.walkCount // 3], (self.rect.x, self.rect.y))", "block.rect.bottom def draw(self, screen): if self.number == 1: screen.blit(pygame.image.load('Images2/'+str(self.megabombcount)+'megabombs.png'), (2,", "screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck+32)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck+32, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck+16))", "self.change_y >0: self.left=False self.right=False self.front=True self.back=False block_hit_list = pygame.sprite.spritecollide(self, self.walls,", "p2walkRight = [pygame.image.load('Images/p2right.png'), pygame.image.load('Images/p2right1.png'), pygame.image.load('Images/p2right2.png'), pygame.image.load('Images/p2right3.png'), pygame.image.load('Images/p2right2.png'), pygame.image.load('Images/p2right1.png'), pygame.image.load('Images/p2right.png')] p2walkFront", "else: if self.change_x == 0: screen.blit(pygame.image.load('Images/p1right.png'), (self.rect.x, self.rect.y)) else: if", "= pygame.Surface([width,height], pygame.SRCALPHA, 32) image = self.image.convert_alpha() self.rect = self.image.get_rect()", "1 elif self.left: if self.shield: if self.change_x == 0: screen.blit(pygame.image.load('Images/p1leftshield.png'),", "number self.spawntimer=0 self.respawntimer=0 self.exists=True self.animationcount=0 def draw(self, screen): if self.number==1:", "self.walls: if i.rect.collidepoint(self.leftcheck,self.rect.y): self.expleft = False if i.rect.collidepoint(self.rightcheck,self.rect.y): self.expright =", "= 0 screen.blit(self.shieldanimation[self.animationcount // 5], (self.rect.x, self.rect.y)) self.animationcount += 1", "self.expdown = True self.doubleexpdown = True self.expboxlist = [] def", "if self.change_y == 0: screen.blit(pygame.image.load('Images/p1front.png'), (self.rect.x, self.rect.y)) else: if self.walkCount", "self.animationcount = 0 screen.blit(self.megabombanimation[self.animationcount // 5], (self.rect.x, self.rect.y)) self.animationcount +=", "in block_hit_list: if self.change_x > 0: self.rect.right = block.rect.left else:", "32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck + 16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck + 16, self.rect.y,", "self.walkCount = 0 screen.blit(self.p1walkBack[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount +=", "Constructor function def __init__(self, x, y, number): super().__init__() self.image =", "pygame.image.load('Images/death18.png'),pygame.image.load('Images/death19.png'), pygame.image.load('Images/death20.png')] p1walkLeft = [pygame.image.load('Images/p1left.png'), pygame.image.load('Images/p1left1.png'), pygame.image.load('Images/p1left2.png'), pygame.image.load('Images/p1left3.png'), pygame.image.load('Images/p1left2.png'), pygame.image.load('Images/p1left1.png'),", "self.walkCount = 0 screen.blit(self.p1walkLeft[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount +=", "def draw(self, screen): if self.bomb_count < 30: if self.bomb_type==0: screen.blit(pygame.image.load('Images/bomb3.png'),", "// 3], (self.rect.x, self.rect.y)) self.walkCount += 1 else: if self.change_x", "if self.bomb_count < 30: if self.bomb_type==0: screen.blit(pygame.image.load('Images/bomb3.png'), (self.rect.x, self.rect.y)) else:", "for block in block_hit_list: if self.change_y > 0: self.rect.bottom =", "= [pygame.image.load('Images/p2backshield.png'), pygame.image.load('Images/p2back1shield.png'), pygame.image.load('Images/p2back2shield.png'), pygame.image.load('Images/p2back3shield.png'), pygame.image.load('Images/p2back2shield.png'), pygame.image.load('Images/p2back1shield.png'), pygame.image.load('Images/p2backshield.png')] # Constructor", "self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p2walkRight[self.walkCount //", "screen.blit(pygame.image.load('Images/p1front.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >= 21:", "if self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p1walkLeft[self.walkCount", "0 screen.blit(self.p2walkRightshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 else:", "= 0 screen.blit(self.superspeedanimation[self.animationcount // 5], (self.rect.x, self.rect.y)) self.animationcount += 1", "for i in self.walls: if i.rect.collidepoint(self.leftcheck,self.rect.y): self.expleft = False if", "= block.rect.right self.rect.y += self.change_y if self.change_y <0: self.left=False self.right=False", "pygame.image.load('Images/p2front2shield.png'), pygame.image.load('Images/p2front1shield.png'), pygame.image.load('Images/p2frontshield.png')] p2walkBackshield = [pygame.image.load('Images/p2backshield.png'), pygame.image.load('Images/p2back1shield.png'), pygame.image.load('Images/p2back2shield.png'), pygame.image.load('Images/p2back3shield.png'), pygame.image.load('Images/p2back2shield.png'),", "pygame.image.load('Images/death11.png'), pygame.image.load('Images/death12.png'), pygame.image.load('Images/death13.png'), pygame.image.load('Images/death14.png'), pygame.image.load('Images/death15.png'), pygame.image.load('Images/death16.png'), pygame.image.load('Images/death17.png'), pygame.image.load('Images/death18.png'),pygame.image.load('Images/death19.png'), pygame.image.load('Images/death20.png')] p1walkLeft", "y, width, height, bomb_count, bomb_type): super().__init__() self.image = pygame.Surface([22, 28],", "self.bomb_count < 60: if self.bomb_type == 0: screen.blit(pygame.image.load('Images/bomb2.png'), (self.rect.x, self.rect.y))", "self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck + 16, self.rect.y, 32, 32)) if self.doubleexpleft: screen.blit(pygame.image.load('Images/explosion.png'),", "self.bomb_type = bomb_type self.walls = None self.leftcheck = self.rect.x -", "self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p1walkBackshield[self.walkCount //", "self.number == 2: screen.blit(pygame.image.load('Images2/'+str(self.megabombcount)+'megabombs.png'), (415, 0)) if self.alive: if self.front:", "self.left = False self.right = False self.number = number self.change_x", "(self.rect.x, self.rect.y)) else: screen.blit(pygame.image.load('Images2/megabomb2.png'), (self.rect.x, self.rect.y)) elif self.bomb_count < 90:", "= [pygame.image.load('Images/p1rightshield.png'), pygame.image.load('Images/p1right1shield.png'), pygame.image.load('Images/p1right2shield.png'), pygame.image.load('Images/p1right3shield.png'), pygame.image.load('Images/p1right2shield.png'), pygame.image.load('Images/p1right1shield.png'), pygame.image.load('Images/p1rightshield.png')] p1walkFront =", "pygame.image.load('Images/p2left1.png'), pygame.image.load('Images/p2left2.png'), pygame.image.load('Images/p2left3.png'), pygame.image.load('Images/p2left2.png'), pygame.image.load('Images/p2left1.png'), pygame.image.load('Images/p2left.png')] p2walkRight = [pygame.image.load('Images/p2right.png'), pygame.image.load('Images/p2right1.png'),", "if self.change_y == 0: screen.blit(pygame.image.load('Images/p2frontshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount", "number): super().__init__() self.image = pygame.Surface([24, 28]) self.image.fill((0,0,0)) self.rect = self.image.get_rect()", "+ 1 >= 21: self.walkCount = 0 screen.blit(self.p1walkBackshield[self.walkCount // 3],", "else: if self.exists and self.spawntimer > 50: if self.animationcount +", "True self.doubleexpdown = True self.expboxlist = [] def draw(self, screen):", "pygame.image.load('Images/p2left1shield.png'), pygame.image.load('Images/p2leftshield.png')] p2walkRightshield = [pygame.image.load('Images/p2rightshield.png'), pygame.image.load('Images/p2right1shield.png'), pygame.image.load('Images/p2right2shield.png'), pygame.image.load('Images/p2right3shield.png'), pygame.image.load('Images/p2right2shield.png'), pygame.image.load('Images/p2right1shield.png'),", "self.walkCount += 1 elif self.number == 2: screen.blit(pygame.image.load('Images2/'+str(self.megabombcount)+'megabombs.png'), (415, 0))", "3], (self.rect.x, self.rect.y)) self.walkCount += 1 else: if self.change_x ==", "= False if i.rect.collidepoint(self.rightcheck,self.rect.y): self.expright = False if i.rect.collidepoint(self.rect.x,self.upcheck): self.expup", ">= 21: self.walkCount = 0 screen.blit(self.p1walkLeft[self.walkCount // 3], (self.rect.x, self.rect.y))", "1 elif self.right: if self.shield: if self.change_x == 0: screen.blit(pygame.image.load('Images/p2rightshield.png'),", "== 0: screen.blit(pygame.image.load('Images/p2leftshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1", "self.front=False self.back=False block_hit_list = pygame.sprite.spritecollide(self, self.walls, False) for block in", "0: screen.blit(pygame.image.load('Images/p1frontshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >=", "= [pygame.image.load('Images2/megabombicon1.png'), pygame.image.load('Images2/megabombicon2.png'), pygame.image.load('Images2/megabombicon3.png'), pygame.image.load('Images2/megabombicon3.png'), pygame.image.load('Images2/megabombicon2.png'), pygame.image.load('Images2/megabombicon1.png')] def __init__(self, x,", "pygame.image.load('Images/p1front3shield.png'), pygame.image.load('Images/p1front2shield.png'), pygame.image.load('Images/p1front1shield.png'), pygame.image.load('Images/p1frontshield.png')] p1walkBack = [pygame.image.load('Images/p1back.png'), pygame.image.load('Images/p1back1.png'), pygame.image.load('Images/p1back2.png'), pygame.image.load('Images/p1back3.png'),", "self.right = False self.number = number self.change_x = 0 self.change_y", "i.rect.collidepoint(self.rightcheck,self.rect.y): self.expright = False if i.rect.collidepoint(self.rect.x,self.upcheck): self.expup = False if", "screen.blit(pygame.image.load('Images/p1leftshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >= 21:", "screen.blit(self.p2walkFront[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 elif self.back:", "0: screen.blit(pygame.image.load('Images/p1right.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >=", "x, y, number): super().__init__() self.image = pygame.Surface([22, 28], pygame.SRCALPHA, 32)", "1 >= 21: self.walkCount = 0 screen.blit(self.p1walkLeftshield[self.walkCount // 3], (self.rect.x,", "y self.canmove = True self.front = True self.change_x=0 self.change_y=0 self.superspeed=False", "= [pygame.image.load('Images/p1frontshield.png'), pygame.image.load('Images/p1front1shield.png'), pygame.image.load('Images/p1front2shield.png'), pygame.image.load('Images/p1front3shield.png'), pygame.image.load('Images/p1front2shield.png'), pygame.image.load('Images/p1front1shield.png'), pygame.image.load('Images/p1frontshield.png')] p1walkBack =", "= True self.change_x=0 self.change_y=0 self.superspeed=False self.speed=3 self.shield=False self.megabombs=False self.megabombcount=0 class", "= True self.back = False self.left = False self.right =", "self.alive = True self.deathCount = 0 self.rect.x = x self.rect.y", "self.expboxlist.append(pygame.Rect(self.leftcheck-32, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck-16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck-16, self.rect.y, 32,", "3], (self.rect.x, self.rect.y)) self.walkCount += 1 elif self.left: if self.shield:", "self.rect.y)) self.walkCount += 1 elif self.back: if self.shield: if self.change_y", "y self.rect.x = x self.width = width self.height = height", "self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck + 16, self.rect.y))", "= self.image.get_rect() self.rect.y = y self.rect.x = x self.front =", "if self.shield: if self.change_y == 0: screen.blit(pygame.image.load('Images/p1backshield.png'), (self.rect.x, self.rect.y)) else:", ">= 21: self.walkCount = 0 screen.blit(self.p1walkRight[self.walkCount // 3], (self.rect.x, self.rect.y))", "self.walkCount = 0 screen.blit(self.p2walkBackshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount +=", "== False and self.deathCount < 200: screen.blit(self.death[self.deathCount // 10], (self.rect.x,", "bomb_type): super().__init__() self.image = pygame.Surface([22, 28], pygame.SRCALPHA, 32) image =", "elif self.left: if self.shield: if self.change_x == 0: screen.blit(pygame.image.load('Images/p2leftshield.png'), (self.rect.x,", "self.front = True self.back = False self.left = False self.right", "+= 1 else: if self.change_y == 0: screen.blit(pygame.image.load('Images/p2back.png'), (self.rect.x, self.rect.y))", ">= 21: self.walkCount = 0 screen.blit(self.p1walkLeftshield[self.walkCount // 3], (self.rect.x, self.rect.y))", "else: if self.change_y == 0: screen.blit(pygame.image.load('Images/p1front.png'), (self.rect.x, self.rect.y)) else: if", "screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck-16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck-16, self.rect.y, 32, 32)) if self.expup: screen.blit(pygame.image.load('Images/explosion.png'),", "== 0: screen.blit(pygame.image.load('Images/p2right.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1", "if self.change_x <0: self.left=True self.right=False self.front=False self.back=False elif self.change_x >0:", "else: screen.blit(pygame.image.load('Images2/megabomb2.png'), (self.rect.x, self.rect.y)) elif self.bomb_count < 90: if self.bomb_type", "if self.deathCount >= 200: self.rect.x = 1000 self.gotomenu=True def reset(self,x,y):", "and self.change_x==0 and self.change_y==0: self.speed=6 if self.superspeedcount>=150: self.superspeed = False", "self.change_y <0: self.left=False self.right=False self.front=False self.back=True elif self.change_y >0: self.left=False", "= [pygame.image.load('Images/death1.png'), pygame.image.load('Images/death2.png'), pygame.image.load('Images/death3.png'), pygame.image.load('Images/death4.png'), pygame.image.load('Images/death5.png'), pygame.image.load('Images/death6.png'), pygame.image.load('Images/death7.png'), pygame.image.load('Images/death8.png'), pygame.image.load('Images/death9.png'),", "= x self.number = number self.spawntimer=0 self.respawntimer=0 self.exists=True self.animationcount=0 def", "[pygame.image.load('Images/p2front.png'), pygame.image.load('Images/p2front1.png'), pygame.image.load('Images/p2front2.png'), pygame.image.load('Images/p2front3.png'), pygame.image.load('Images/p2front2.png'), pygame.image.load('Images/p2front1.png'), pygame.image.load('Images/p2front.png')] p2walkBack = [pygame.image.load('Images/p2back.png'),", "screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck+16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck+16, self.rect.y, 32, 32)) if self.expup: screen.blit(pygame.image.load('Images/explosion.png'),", "self.image.convert_alpha() self.rect = self.image.get_rect() self.rect.y = y self.rect.x = x", "+ 1 >= 21: self.walkCount = 0 screen.blit(self.p1walkRightshield[self.walkCount // 3],", "self.animationcount = 0 screen.blit(self.superspeedanimation[self.animationcount // 5], (self.rect.x, self.rect.y)) self.animationcount +=", "self.expup: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x,", "1 >= 30: self.animationcount = 0 screen.blit(self.shieldanimation[self.animationcount // 5], (self.rect.x,", "(415, 0)) if self.alive: if self.front: if self.shield: if self.change_y", "self.walkCount += 1 if self.alive == False and self.deathCount <", "pygame.image.load('Images/p1right2.png'), pygame.image.load('Images/p1right1.png'), pygame.image.load('Images/p1right.png')] p1walkRightshield = [pygame.image.load('Images/p1rightshield.png'), pygame.image.load('Images/p1right1shield.png'), pygame.image.load('Images/p1right2shield.png'), pygame.image.load('Images/p1right3shield.png'), pygame.image.load('Images/p1right2shield.png'),", "21: self.walkCount = 0 screen.blit(self.p1walkFrontshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount", "self.exists=True self.animationcount=0 def draw(self, screen): if self.number==1: if self.exists and", "if self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p2walkBackshield[self.walkCount", "self.change_y == 0: screen.blit(pygame.image.load('Images/p1back.png'), (self.rect.x, self.rect.y)) else: if self.walkCount +", "pygame.image.load('Images/p1back3shield.png'), pygame.image.load('Images/p1back2shield.png'), pygame.image.load('Images/p1back1shield.png'), pygame.image.load('Images/p1backshield.png')] p2walkLeft = [pygame.image.load('Images/p2left.png'), pygame.image.load('Images/p2left1.png'), pygame.image.load('Images/p2left2.png'), pygame.image.load('Images/p2left3.png'),", "height self.bomb_count = bomb_count self.bomb_type = bomb_type self.walls = None", "1 else: if self.change_x == 0: screen.blit(pygame.image.load('Images/p1left.png'), (self.rect.x, self.rect.y)) else:", "= False if i.rect.collidepoint(self.rightcheck, self.rect.y): self.expright = False if i.rect.collidepoint(self.rightcheck+32,", "pygame.image.load('Images/p1right1shield.png'), pygame.image.load('Images/p1right2shield.png'), pygame.image.load('Images/p1right3shield.png'), pygame.image.load('Images/p1right2shield.png'), pygame.image.load('Images/p1right1shield.png'), pygame.image.load('Images/p1rightshield.png')] p1walkFront = [pygame.image.load('Images/p1front.png'), pygame.image.load('Images/p1front1.png'),", "class powerup(pygame.sprite.Sprite): superspeedanimation=[pygame.image.load('Images/superspeed1.png'), pygame.image.load('Images/superspeed2.png'), pygame.image.load('Images/superspeed3.png'), pygame.image.load('Images/superspeed3.png'), pygame.image.load('Images/superspeed2.png'), pygame.image.load('Images/superspeed1.png')] shieldanimation =", "super().__init__() self.image = pygame.Surface([width,height], pygame.SRCALPHA, 32) image = self.image.convert_alpha() self.rect", "image = self.image.convert_alpha() self.rect = self.image.get_rect() self.rect.y = y self.rect.x", "width self.height = height self.bomb_count = bomb_count self.bomb_type = bomb_type", "= False self.alive = True self.deathCount = 0 self.rect.x =", "reset(self): self.spawntimer=0 self.respawntimer=0 self.exists=True class bomb(pygame.sprite.Sprite): def __init__(self, x, y,", "= None self.leftcheck = self.rect.x - 32 self.rightcheck = self.rect.x", "= pygame.Surface([24, 28]) self.image.fill((0,0,0)) self.rect = self.image.get_rect() self.rect.y = y", "self.expboxlist.append(pygame.Rect(self.rightcheck+16, self.rect.y, 32, 32)) if self.expup: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck)) self.expboxlist.append(pygame.Rect(self.rect.x,", "self.walls, False) for block in block_hit_list: if self.change_y > 0:", "self.walkCount = 0 screen.blit(self.p2walkLeftshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount +=", "self.walkCount = 0 screen.blit(self.p1walkRight[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount +=", "pygame.image.load('Images/p1right2shield.png'), pygame.image.load('Images/p1right1shield.png'), pygame.image.load('Images/p1rightshield.png')] p1walkFront = [pygame.image.load('Images/p1front.png'), pygame.image.load('Images/p1front1.png'), pygame.image.load('Images/p1front2.png'), pygame.image.load('Images/p1front3.png'), pygame.image.load('Images/p1front2.png'),", "32, 32)) elif self.bomb_type==1: for i in self.walls: if i.rect.collidepoint(self.leftcheck,", "32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck - 16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck - 16,", "pygame.sprite.spritecollide(self, self.walls, False) for block in block_hit_list: if self.change_x >", "self.rect.y)) self.walkCount += 1 elif self.right: if self.shield: if self.change_x", "self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck - 16)) self.expboxlist.append(pygame.Rect(self.rect.x,", "self.rect.y)) self.animationcount += 1 def reset(self): self.spawntimer=0 self.respawntimer=0 self.exists=True class", "= False self.left = False self.right = False self.number =", "in block_hit_list: if self.change_y > 0: self.rect.bottom = block.rect.top else:", "pygame.image.load('Images/p1front1.png'), pygame.image.load('Images/p1front2.png'), pygame.image.load('Images/p1front3.png'), pygame.image.load('Images/p1front2.png'), pygame.image.load('Images/p1front1.png'), pygame.image.load('Images/p1front.png')] p1walkFrontshield = [pygame.image.load('Images/p1frontshield.png'), pygame.image.load('Images/p1front1shield.png'),", "+ 1 >= 21: self.walkCount = 0 screen.blit(self.p2walkBackshield[self.walkCount // 3],", "powerup(pygame.sprite.Sprite): superspeedanimation=[pygame.image.load('Images/superspeed1.png'), pygame.image.load('Images/superspeed2.png'), pygame.image.load('Images/superspeed3.png'), pygame.image.load('Images/superspeed3.png'), pygame.image.load('Images/superspeed2.png'), pygame.image.load('Images/superspeed1.png')] shieldanimation = [pygame.image.load('Images/shield1.png'),", "True self.deathCount = 0 self.rect.x = x self.rect.y = y", "pygame.image.load('Images/death3.png'), pygame.image.load('Images/death4.png'), pygame.image.load('Images/death5.png'), pygame.image.load('Images/death6.png'), pygame.image.load('Images/death7.png'), pygame.image.load('Images/death8.png'), pygame.image.load('Images/death9.png'), pygame.image.load('Images/death10.png'), pygame.image.load('Images/death11.png'), pygame.image.load('Images/death12.png'),", "x, y): self.change_x += x self.change_y += y if self.superspeed", "self.change_y == 0: screen.blit(pygame.image.load('Images/p2back.png'), (self.rect.x, self.rect.y)) else: if self.walkCount +", "// 3], (self.rect.x, self.rect.y)) self.walkCount += 1 if self.alive ==", "self.walkCount += 1 else: if self.change_y == 0: screen.blit(pygame.image.load('Images/p2back.png'), (self.rect.x,", "def __init__(self, x, y, width, height, bomb_count, bomb_type): super().__init__() self.image", "screen.blit(self.death[self.deathCount // 10], (self.rect.x, self.rect.y)) self.deathCount += 1 if self.deathCount", "self.exists and self.spawntimer > 50: if self.animationcount + 1 >=", "# Constructor function def __init__(self, x, y, number): super().__init__() self.image", "else: if self.walkCount + 1 >= 21: self.walkCount = 0", "block_hit_list: if self.change_x > 0: self.rect.right = block.rect.left else: self.rect.left", "self.bomb_type == 0: screen.blit(pygame.image.load('Images/bomb1.png'), (self.rect.x, self.rect.y)) else: screen.blit(pygame.image.load('Images2/megabomb1.png'), (self.rect.x, self.rect.y))", "block.rect.right self.rect.y += self.change_y if self.change_y <0: self.left=False self.right=False self.front=False", "// 5], (self.rect.x, self.rect.y)) self.animationcount += 1 else: if self.exists", "self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p1walkFront[self.walkCount //", "x self.number = number self.spawntimer=0 self.respawntimer=0 self.exists=True self.animationcount=0 def draw(self,", "if self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p2walkLeftshield[self.walkCount", "elif self.bomb_count < 90: if self.bomb_type == 0: screen.blit(pygame.image.load('Images/bomb1.png'), (self.rect.x,", "32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck-16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck-16, 32, 32)) elif self.bomb_type==1:", "x self.front = True self.back = False self.left = False", "if self.number == 1: screen.blit(pygame.image.load('Images2/'+str(self.megabombcount)+'megabombs.png'), (2, 0)) if self.alive: if", "self.right=False self.front=True self.back=False block_hit_list = pygame.sprite.spritecollide(self, self.walls, False) for block", "= 0 screen.blit(self.p2walkFront[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1", "pygame.image.load('Images/p1front2shield.png'), pygame.image.load('Images/p1front1shield.png'), pygame.image.load('Images/p1frontshield.png')] p1walkBack = [pygame.image.load('Images/p1back.png'), pygame.image.load('Images/p1back1.png'), pygame.image.load('Images/p1back2.png'), pygame.image.load('Images/p1back3.png'), pygame.image.load('Images/p1back2.png'),", "== 0: screen.blit(pygame.image.load('Images/p2backshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1", "self.walkCount += 1 else: if self.change_x == 0: screen.blit(pygame.image.load('Images/p2right.png'), (self.rect.x,", "if self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p2walkLeft[self.walkCount", "pygame.image.load('Images/shield2.png'), pygame.image.load('Images/shield3.png'), pygame.image.load('Images/shield3.png'), pygame.image.load('Images/shield2.png'), pygame.image.load('Images/shield1.png')] megabombanimation = [pygame.image.load('Images2/megabombicon1.png'), pygame.image.load('Images2/megabombicon2.png'), pygame.image.load('Images2/megabombicon3.png'),", "self.gotomenu=True def reset(self,x,y): self.gotomenu = False self.alive = True self.deathCount", "self.speed=3 self.shield=False self.megabombs=False self.megabombcount=0 class Wall(pygame.sprite.Sprite): def __init__(self, x, y,", "self.rect.y)) else: if self.walkCount + 1 >= 21: self.walkCount =", "if self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p1walkRightshield[self.walkCount", "self.walkCount = 0 screen.blit(self.p1walkFrontshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount +=", "if self.exists and self.spawntimer > 50: if self.animationcount + 1", "self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck - 16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck -", "False screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rect.x, self.rect.y, 32, 32)) if self.expleft:", "False if i.rect.collidepoint(self.rect.x, self.downcheck): self.expdown = False if i.rect.collidepoint(self.rect.x, self.downcheck+32):", "x self.width = width self.height = height self.bomb_count = bomb_count", "self.walkCount = 0 screen.blit(self.p2walkFrontshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount +=", "i.rect.collidepoint(self.rightcheck, self.rect.y): self.expright = False if i.rect.collidepoint(self.rightcheck+32, self.rect.y): self.doubleexpright =", "pygame.image.load('Images/p2back1shield.png'), pygame.image.load('Images/p2back2shield.png'), pygame.image.load('Images/p2back3shield.png'), pygame.image.load('Images/p2back2shield.png'), pygame.image.load('Images/p2back1shield.png'), pygame.image.load('Images/p2backshield.png')] # Constructor function def", "+ 1 >= 21: self.walkCount = 0 screen.blit(self.p1walkLeftshield[self.walkCount // 3],", "0 screen.blit(self.p1walkBackshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 else:", "self.animationcount += 1 else: if self.exists and self.spawntimer > 50:", ">= 21: self.walkCount = 0 screen.blit(self.p2walkRight[self.walkCount // 3], (self.rect.x, self.rect.y))", "self.expdown = False screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rect.x, self.rect.y, 32, 32))", "self.image.get_rect() self.rect.y = y self.rect.x = x self.number = number", "21: self.walkCount = 0 screen.blit(self.p2walkLeftshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount", "(self.rect.x, self.downcheck - 16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck - 16, 32, 32))", "pygame.image.load('Images/p2left3shield.png'), pygame.image.load('Images/p2left2shield.png'), pygame.image.load('Images/p2left1shield.png'), pygame.image.load('Images/p2leftshield.png')] p2walkRightshield = [pygame.image.load('Images/p2rightshield.png'), pygame.image.load('Images/p2right1shield.png'), pygame.image.load('Images/p2right2shield.png'), pygame.image.load('Images/p2right3shield.png'),", "self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck+16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck+16, self.rect.y, 32, 32))", "self.speed=3 self.superspeed=False self.superspeedcount=0 self.shield=False self.shieldcount=0 self.megabombs=False self.megabombcount = 0 def", "self.spawntimer=0 self.respawntimer=0 self.exists=True class bomb(pygame.sprite.Sprite): def __init__(self, x, y, width,", "self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck+16, self.rect.y, 32, 32)) if self.expup: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck))", "self.downcheck+32): self.doubleexpdown = False screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rect.x, self.rect.y, 32,", "else: if self.change_x == 0: screen.blit(pygame.image.load('Images/p1left.png'), (self.rect.x, self.rect.y)) else: if", "+ 1 >= 21: self.walkCount = 0 screen.blit(self.p2walkLeftshield[self.walkCount // 3],", "self.rect.y, 32, 32)) if self.expright: screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck, self.rect.y,", "self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck-16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck-16, self.rect.y, 32, 32))", "0: screen.blit(pygame.image.load('Images/p2front.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >=", "def draw(self, screen): if self.number == 1: screen.blit(pygame.image.load('Images2/'+str(self.megabombcount)+'megabombs.png'), (2, 0))", "self.right: if self.shield: if self.change_x == 0: screen.blit(pygame.image.load('Images/p2rightshield.png'), (self.rect.x, self.rect.y))", "= x class powerup(pygame.sprite.Sprite): superspeedanimation=[pygame.image.load('Images/superspeed1.png'), pygame.image.load('Images/superspeed2.png'), pygame.image.load('Images/superspeed3.png'), pygame.image.load('Images/superspeed3.png'), pygame.image.load('Images/superspeed2.png'), pygame.image.load('Images/superspeed1.png')]", "number self.change_x = 0 self.change_y = 0 self.walkCount = 0", "self.doubleexpright = True self.expup = True self.doubleexpup = True self.expdown", "self.expboxlist.append(pygame.Rect(self.rightcheck, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck-16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck-16, self.rect.y, 32,", "self.image = pygame.Surface([22, 28], pygame.SRCALPHA, 32) image = self.image.convert_alpha() self.rect", "= 0 screen.blit(self.p2walkBack[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1", "if i.rect.collidepoint(self.rect.x,self.upcheck): self.expup = False if i.rect.collidepoint(self.rect.x,self.downcheck): self.expdown = False", "pygame.image.load('Images/p1front2shield.png'), pygame.image.load('Images/p1front3shield.png'), pygame.image.load('Images/p1front2shield.png'), pygame.image.load('Images/p1front1shield.png'), pygame.image.load('Images/p1frontshield.png')] p1walkBack = [pygame.image.load('Images/p1back.png'), pygame.image.load('Images/p1back1.png'), pygame.image.load('Images/p1back2.png'),", "21: self.walkCount = 0 screen.blit(self.p2walkLeft[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount", "self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck + 16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck +", "if self.doubleexpdown: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck+32)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck+32, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'),", "self.rect.x = x class powerup(pygame.sprite.Sprite): superspeedanimation=[pygame.image.load('Images/superspeed1.png'), pygame.image.load('Images/superspeed2.png'), pygame.image.load('Images/superspeed3.png'), pygame.image.load('Images/superspeed3.png'), pygame.image.load('Images/superspeed2.png'),", "pygame.Surface([width,height], pygame.SRCALPHA, 32) image = self.image.convert_alpha() self.rect = self.image.get_rect() self.rect.y", "(self.rect.x, self.upcheck-16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck-16, 32, 32)) if self.expdown: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x,", "self.image = pygame.Surface([width,height], pygame.SRCALPHA, 32) image = self.image.convert_alpha() self.rect =", "self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck-16, 32, 32)) elif self.bomb_type==1: for i in self.walls:", "21: self.walkCount = 0 screen.blit(self.p1walkRight[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount", "if self.shield: if self.change_x == 0: screen.blit(pygame.image.load('Images/p1rightshield.png'), (self.rect.x, self.rect.y)) else:", "self.change_x <0: self.left=True self.right=False self.front=False self.back=False elif self.change_x >0: self.left=False", "+= 1 else: if self.change_y == 0: screen.blit(pygame.image.load('Images/p1back.png'), (self.rect.x, self.rect.y))", "= [pygame.image.load('Images/p2front.png'), pygame.image.load('Images/p2front1.png'), pygame.image.load('Images/p2front2.png'), pygame.image.load('Images/p2front3.png'), pygame.image.load('Images/p2front2.png'), pygame.image.load('Images/p2front1.png'), pygame.image.load('Images/p2front.png')] p2walkBack =", "self.front: if self.shield: if self.change_y == 0: screen.blit(pygame.image.load('Images/p2frontshield.png'), (self.rect.x, self.rect.y))", "pygame.image.load('Images2/megabombicon2.png'), pygame.image.load('Images2/megabombicon3.png'), pygame.image.load('Images2/megabombicon3.png'), pygame.image.load('Images2/megabombicon2.png'), pygame.image.load('Images2/megabombicon1.png')] def __init__(self, x, y, number):", "pygame.image.load('Images/p1front2.png'), pygame.image.load('Images/p1front1.png'), pygame.image.load('Images/p1front.png')] p1walkFrontshield = [pygame.image.load('Images/p1frontshield.png'), pygame.image.load('Images/p1front1shield.png'), pygame.image.load('Images/p1front2shield.png'), pygame.image.load('Images/p1front3shield.png'), pygame.image.load('Images/p1front2shield.png'),", "None self.alive = True self.canmove = True self.deathCount = 0", "i in self.walls: if i.rect.collidepoint(self.leftcheck,self.rect.y): self.expleft = False if i.rect.collidepoint(self.rightcheck,self.rect.y):", "class Player(pygame.sprite.Sprite): death = [pygame.image.load('Images/death1.png'), pygame.image.load('Images/death2.png'), pygame.image.load('Images/death3.png'), pygame.image.load('Images/death4.png'), pygame.image.load('Images/death5.png'), pygame.image.load('Images/death6.png'),", "superspeedanimation=[pygame.image.load('Images/superspeed1.png'), pygame.image.load('Images/superspeed2.png'), pygame.image.load('Images/superspeed3.png'), pygame.image.load('Images/superspeed3.png'), pygame.image.load('Images/superspeed2.png'), pygame.image.load('Images/superspeed1.png')] shieldanimation = [pygame.image.load('Images/shield1.png'), pygame.image.load('Images/shield2.png'),", "self.walkCount += 1 elif self.left: if self.shield: if self.change_x ==", ">= 30: self.animationcount = 0 screen.blit(self.megabombanimation[self.animationcount // 5], (self.rect.x, self.rect.y))", "self.animationcount += 1 def reset(self): self.spawntimer=0 self.respawntimer=0 self.exists=True class bomb(pygame.sprite.Sprite):", "changespeed(self, x, y): self.change_x += x self.change_y += y if", ">= 21: self.walkCount = 0 screen.blit(self.p1walkBackshield[self.walkCount // 3], (self.rect.x, self.rect.y))", "pygame.image.load('Images/p2front1.png'), pygame.image.load('Images/p2front.png')] p2walkBack = [pygame.image.load('Images/p2back.png'), pygame.image.load('Images/p2back1.png'), pygame.image.load('Images/p2back2.png'), pygame.image.load('Images/p2back3.png'), pygame.image.load('Images/p2back2.png'), pygame.image.load('Images/p2back1.png'),", "+= 1 else: if self.change_x == 0: screen.blit(pygame.image.load('Images/p1right.png'), (self.rect.x, self.rect.y))", "self.doubleexpup: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck-32)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck-32, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x,", "p2walkRightshield = [pygame.image.load('Images/p2rightshield.png'), pygame.image.load('Images/p2right1shield.png'), pygame.image.load('Images/p2right2shield.png'), pygame.image.load('Images/p2right3shield.png'), pygame.image.load('Images/p2right2shield.png'), pygame.image.load('Images/p2right1shield.png'), pygame.image.load('Images/p2rightshield.png')] p2walkFrontshield", "= 0 screen.blit(self.p2walkBackshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1", "self.downcheck+32, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck+16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck+16, 32, 32))", "self.change_y += y if self.superspeed and self.change_x==0 and self.change_y==0: self.speed=6", "3], (self.rect.x, self.rect.y)) self.walkCount += 1 if self.alive == False", "+ 1 >= 21: self.walkCount = 0 screen.blit(self.p2walkBack[self.walkCount // 3],", "class Wall(pygame.sprite.Sprite): def __init__(self, x, y, width, height): super().__init__() self.image", "self.upcheck-16, 32, 32)) if self.expdown: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck,", "60: if self.bomb_type == 0: screen.blit(pygame.image.load('Images/bomb2.png'), (self.rect.x, self.rect.y)) else: screen.blit(pygame.image.load('Images2/megabomb2.png'),", "21: self.walkCount = 0 screen.blit(self.p2walkRightshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount", "self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p1walkLeftshield[self.walkCount //", "self.expboxlist.append(pygame.Rect(self.rightcheck-16, self.rect.y, 32, 32)) if self.expup: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck)) self.expboxlist.append(pygame.Rect(self.rect.x,", "pygame.sprite.spritecollide(self, self.walls, False) for block in block_hit_list: if self.change_y >", "self.left=True self.right=False self.front=False self.back=False elif self.change_x >0: self.left=False self.right=True self.front=False", "self.shield: if self.change_y == 0: screen.blit(pygame.image.load('Images/p2backshield.png'), (self.rect.x, self.rect.y)) else: if", "screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck-16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck-16, 32, 32)) if self.expdown: screen.blit(pygame.image.load('Images/explosion.png'),", "= number self.change_x = 0 self.change_y = 0 self.walkCount =", "+ 16, 32, 32)) if self.doubleexpup: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck-32)) self.expboxlist.append(pygame.Rect(self.rect.x,", "(self.rect.x, self.rect.y)) else: screen.blit(pygame.image.load('Images2/megabomb3.png'), (self.rect.x, self.rect.y)) elif self.bomb_count < 60:", "screen.blit(pygame.image.load('Images/p2right.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >= 21:", "self.downcheck, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck - 16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck", "(self.rect.x, self.downcheck+32)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck+32, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck+16)) self.expboxlist.append(pygame.Rect(self.rect.x,", "pygame.image.load('Images/p2back3.png'), pygame.image.load('Images/p2back2.png'), pygame.image.load('Images/p2back1.png'), pygame.image.load('Images/p2back.png')] p2walkLeftshield = [pygame.image.load('Images/p2leftshield.png'), pygame.image.load('Images/p2left1shield.png'), pygame.image.load('Images/p2left2shield.png'), pygame.image.load('Images/p2left3shield.png'),", "1 else: if self.change_y == 0: screen.blit(pygame.image.load('Images/p2front.png'), (self.rect.x, self.rect.y)) else:", "if i.rect.collidepoint(self.leftcheck,self.rect.y): self.expleft = False if i.rect.collidepoint(self.rightcheck,self.rect.y): self.expright = False", "self.superspeed and self.change_x==0 and self.change_y==0: self.speed=6 if self.superspeedcount>=150: self.superspeed =", "16, self.rect.y, 32, 32)) if self.doubleexpright: screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck+32, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck+32,", "i.rect.collidepoint(self.rect.x, self.upcheck): self.expup = False if i.rect.collidepoint(self.rect.x, self.upcheck-32): self.doubleexpup =", "[pygame.image.load('Images/p2leftshield.png'), pygame.image.load('Images/p2left1shield.png'), pygame.image.load('Images/p2left2shield.png'), pygame.image.load('Images/p2left3shield.png'), pygame.image.load('Images/p2left2shield.png'), pygame.image.load('Images/p2left1shield.png'), pygame.image.load('Images/p2leftshield.png')] p2walkRightshield = [pygame.image.load('Images/p2rightshield.png'),", "== 0: screen.blit(pygame.image.load('Images/p1rightshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1", "[pygame.image.load('Images/p1rightshield.png'), pygame.image.load('Images/p1right1shield.png'), pygame.image.load('Images/p1right2shield.png'), pygame.image.load('Images/p1right3shield.png'), pygame.image.load('Images/p1right2shield.png'), pygame.image.load('Images/p1right1shield.png'), pygame.image.load('Images/p1rightshield.png')] p1walkFront = [pygame.image.load('Images/p1front.png'),", "== 0: screen.blit(pygame.image.load('Images/p1right.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1", "__init__(self, x, y, width, height): super().__init__() self.image = pygame.Surface([width,height], pygame.SRCALPHA,", "and self.change_y==0: self.speed=6 if self.superspeedcount>=150: self.superspeed = False self.speed=3 self.superspeedcount=0", "self.left=False self.right=False self.front=False self.back=True elif self.change_y >0: self.left=False self.right=False self.front=True", "screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck", ">= 21: self.walkCount = 0 screen.blit(self.p2walkBackshield[self.walkCount // 3], (self.rect.x, self.rect.y))", "+= 1 elif self.number == 2: screen.blit(pygame.image.load('Images2/'+str(self.megabombcount)+'megabombs.png'), (415, 0)) if", "self.height = height self.bomb_count = bomb_count self.bomb_type = bomb_type self.walls", "self.back: if self.shield: if self.change_y == 0: screen.blit(pygame.image.load('Images/p1backshield.png'), (self.rect.x, self.rect.y))", "+= 1 if self.deathCount >= 200: self.rect.x = 1000 self.gotomenu=True", "pygame.image.load('Images/p2front3shield.png'), pygame.image.load('Images/p2front2shield.png'), pygame.image.load('Images/p2front1shield.png'), pygame.image.load('Images/p2frontshield.png')] p2walkBackshield = [pygame.image.load('Images/p2backshield.png'), pygame.image.load('Images/p2back1shield.png'), pygame.image.load('Images/p2back2shield.png'), pygame.image.load('Images/p2back3shield.png'),", "screen.blit(self.p1walkRight[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 elif self.number", "+= 1 elif self.number==2: if self.exists and self.spawntimer > 50:", "= self.image.get_rect() self.rect.y = y self.rect.x = x self.number =", "screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck - 16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck - 16, 32,", "self.change_y == 0: screen.blit(pygame.image.load('Images/p2front.png'), (self.rect.x, self.rect.y)) else: if self.walkCount +", "1 else: if self.change_x == 0: screen.blit(pygame.image.load('Images/p2right.png'), (self.rect.x, self.rect.y)) else:", "self.rect.y): self.doubleexpright = False if i.rect.collidepoint(self.rect.x, self.upcheck): self.expup = False", "== 0: screen.blit(pygame.image.load('Images/p1backshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1", "def changespeed(self, x, y): self.change_x += x self.change_y += y", "+ 16, self.rect.y, 32, 32)) if self.doubleexpleft: screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck-32, self.rect.y))", "+ 1 >= 21: self.walkCount = 0 screen.blit(self.p2walkRightshield[self.walkCount // 3],", "self.doubleexpup = False if i.rect.collidepoint(self.rect.x, self.downcheck): self.expdown = False if", "self.walkCount = 0 screen.blit(self.p1walkRightshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount +=", "< 30: if self.bomb_type==0: screen.blit(pygame.image.load('Images/bomb3.png'), (self.rect.x, self.rect.y)) else: screen.blit(pygame.image.load('Images2/megabomb3.png'), (self.rect.x,", "self.superspeed=False self.speed=3 self.shield=False self.megabombs=False self.megabombcount=0 class Wall(pygame.sprite.Sprite): def __init__(self, x,", "pygame.image.load('Images/p1backshield.png')] p2walkLeft = [pygame.image.load('Images/p2left.png'), pygame.image.load('Images/p2left1.png'), pygame.image.load('Images/p2left2.png'), pygame.image.load('Images/p2left3.png'), pygame.image.load('Images/p2left2.png'), pygame.image.load('Images/p2left1.png'), pygame.image.load('Images/p2left.png')]", "= [pygame.image.load('Images/p1backshield.png'), pygame.image.load('Images/p1back1shield.png'), pygame.image.load('Images/p1back2shield.png'), pygame.image.load('Images/p1back3shield.png'), pygame.image.load('Images/p1back2shield.png'), pygame.image.load('Images/p1back1shield.png'), pygame.image.load('Images/p1backshield.png')] p2walkLeft =", "self.rect.y)) self.walkCount += 1 else: if self.change_y == 0: screen.blit(pygame.image.load('Images/p1front.png'),", "pygame.image.load('Images/superspeed1.png')] shieldanimation = [pygame.image.load('Images/shield1.png'), pygame.image.load('Images/shield2.png'), pygame.image.load('Images/shield3.png'), pygame.image.load('Images/shield3.png'), pygame.image.load('Images/shield2.png'), pygame.image.load('Images/shield1.png')] megabombanimation", "pygame.image.load('Images/p2rightshield.png')] p2walkFrontshield = [pygame.image.load('Images/p2frontshield.png'), pygame.image.load('Images/p2front1shield.png'), pygame.image.load('Images/p2front2shield.png'), pygame.image.load('Images/p2front3shield.png'), pygame.image.load('Images/p2front2shield.png'), pygame.image.load('Images/p2front1shield.png'), pygame.image.load('Images/p2frontshield.png')]", "self.walkCount = 0 screen.blit(self.p1walkLeftshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount +=", "(self.rect.x, self.upcheck)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck+16)) self.expboxlist.append(pygame.Rect(self.rect.x,", "pygame.Surface([24, 28]) self.image.fill((0,0,0)) self.rect = self.image.get_rect() self.rect.y = y self.rect.x", "p2walkLeftshield = [pygame.image.load('Images/p2leftshield.png'), pygame.image.load('Images/p2left1shield.png'), pygame.image.load('Images/p2left2shield.png'), pygame.image.load('Images/p2left3shield.png'), pygame.image.load('Images/p2left2shield.png'), pygame.image.load('Images/p2left1shield.png'), pygame.image.load('Images/p2leftshield.png')] p2walkRightshield", "= 0 self.rect.x = x self.rect.y = y self.canmove =", "0 screen.blit(self.p1walkBack[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 elif", "1: screen.blit(pygame.image.load('Images2/'+str(self.megabombcount)+'megabombs.png'), (2, 0)) if self.alive: if self.front: if self.shield:", "self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck+32, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck+16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck+16, self.rect.y,", "self.expleft = True self.doubleexpleft = True self.expright = True self.doubleexpright", "pygame.image.load('Images/p1right3.png'), pygame.image.load('Images/p1right2.png'), pygame.image.load('Images/p1right1.png'), pygame.image.load('Images/p1right.png')] p1walkRightshield = [pygame.image.load('Images/p1rightshield.png'), pygame.image.load('Images/p1right1shield.png'), pygame.image.load('Images/p1right2shield.png'), pygame.image.load('Images/p1right3shield.png'),", "shieldanimation = [pygame.image.load('Images/shield1.png'), pygame.image.load('Images/shield2.png'), pygame.image.load('Images/shield3.png'), pygame.image.load('Images/shield3.png'), pygame.image.load('Images/shield2.png'), pygame.image.load('Images/shield1.png')] megabombanimation =", "self.change_x == 0: screen.blit(pygame.image.load('Images/p2left.png'), (self.rect.x, self.rect.y)) else: if self.walkCount +", "self.width self.upcheck = self.rect.y - 32 self.downcheck = self.rect.y +", "self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p1walkLeft[self.walkCount //", "p1walkFrontshield = [pygame.image.load('Images/p1frontshield.png'), pygame.image.load('Images/p1front1shield.png'), pygame.image.load('Images/p1front2shield.png'), pygame.image.load('Images/p1front3shield.png'), pygame.image.load('Images/p1front2shield.png'), pygame.image.load('Images/p1front1shield.png'), pygame.image.load('Images/p1frontshield.png')] p1walkBack", "self.bomb_count < 120: if self.bomb_type==0: for i in self.walls: if", "self.front = True self.change_x=0 self.change_y=0 self.superspeed=False self.speed=3 self.shield=False self.megabombs=False self.megabombcount=0", "draw(self, screen): if self.number==1: if self.exists and self.spawntimer>50: if self.animationcount", "self.rect.y = y self.rect.x = x self.width = width self.height", "== 0: screen.blit(pygame.image.load('Images/p1leftshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1", "32)) elif self.bomb_type==1: for i in self.walls: if i.rect.collidepoint(self.leftcheck, self.rect.y):", "if self.change_y == 0: screen.blit(pygame.image.load('Images/p2back.png'), (self.rect.x, self.rect.y)) else: if self.walkCount", "for block in block_hit_list: if self.change_x > 0: self.rect.right =", "32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck - 16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck -", "(self.rightcheck-16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck-16, self.rect.y, 32, 32)) if self.expup: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x,", "== 0: screen.blit(pygame.image.load('Images/p1frontshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1", "if self.bomb_type==0: for i in self.walls: if i.rect.collidepoint(self.leftcheck,self.rect.y): self.expleft =", "pygame.image.load('Images/p2right1shield.png'), pygame.image.load('Images/p2right2shield.png'), pygame.image.load('Images/p2right3shield.png'), pygame.image.load('Images/p2right2shield.png'), pygame.image.load('Images/p2right1shield.png'), pygame.image.load('Images/p2rightshield.png')] p2walkFrontshield = [pygame.image.load('Images/p2frontshield.png'), pygame.image.load('Images/p2front1shield.png'),", "pygame.image.load('Images/p2left1shield.png'), pygame.image.load('Images/p2left2shield.png'), pygame.image.load('Images/p2left3shield.png'), pygame.image.load('Images/p2left2shield.png'), pygame.image.load('Images/p2left1shield.png'), pygame.image.load('Images/p2leftshield.png')] p2walkRightshield = [pygame.image.load('Images/p2rightshield.png'), pygame.image.load('Images/p2right1shield.png'),", "(self.leftcheck, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck + 16,", "p1walkBack = [pygame.image.load('Images/p1back.png'), pygame.image.load('Images/p1back1.png'), pygame.image.load('Images/p1back2.png'), pygame.image.load('Images/p1back3.png'), pygame.image.load('Images/p1back2.png'), pygame.image.load('Images/p1back1.png'), pygame.image.load('Images/p1back.png')] p1walkBackshield", "= False self.right = False self.number = number self.change_x =", "32) image = self.image.convert_alpha() self.rect = self.image.get_rect() self.rect.y = y", "in self.walls: if i.rect.collidepoint(self.leftcheck, self.rect.y): self.expleft = False if i.rect.collidepoint(self.leftcheck-32,", "(self.rect.x, self.upcheck)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck +", "1 elif self.number == 2: screen.blit(pygame.image.load('Images2/'+str(self.megabombcount)+'megabombs.png'), (415, 0)) if self.alive:", "pygame.image.load('Images/p1back1shield.png'), pygame.image.load('Images/p1backshield.png')] p2walkLeft = [pygame.image.load('Images/p2left.png'), pygame.image.load('Images/p2left1.png'), pygame.image.load('Images/p2left2.png'), pygame.image.load('Images/p2left3.png'), pygame.image.load('Images/p2left2.png'), pygame.image.load('Images/p2left1.png'),", "= False self.speed=3 self.superspeedcount=0 def update(self): if self.canmove: self.rect.x +=", "self.exists=True class bomb(pygame.sprite.Sprite): def __init__(self, x, y, width, height, bomb_count,", "else: screen.blit(pygame.image.load('Images2/megabomb3.png'), (self.rect.x, self.rect.y)) elif self.bomb_count < 60: if self.bomb_type", "screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck - 16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck - 16, self.rect.y, 32,", "self.doubleexpup = True self.expdown = True self.doubleexpdown = True self.expboxlist", "elif self.change_x >0: self.left=False self.right=True self.front=False self.back=False block_hit_list = pygame.sprite.spritecollide(self,", "pygame.image.load('Images/p1left3shield.png'), pygame.image.load('Images/p1left2shield.png'), pygame.image.load('Images/p1left1shield.png'), pygame.image.load('Images/p1leftshield.png')] p1walkRight = [pygame.image.load('Images/p1right.png'), pygame.image.load('Images/p1right1.png'), pygame.image.load('Images/p1right2.png'), pygame.image.load('Images/p1right3.png'),", "0 screen.blit(self.p2walkLeft[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 elif", "class bomb(pygame.sprite.Sprite): def __init__(self, x, y, width, height, bomb_count, bomb_type):", "pygame.image.load('Images/p2left3.png'), pygame.image.load('Images/p2left2.png'), pygame.image.load('Images/p2left1.png'), pygame.image.load('Images/p2left.png')] p2walkRight = [pygame.image.load('Images/p2right.png'), pygame.image.load('Images/p2right1.png'), pygame.image.load('Images/p2right2.png'), pygame.image.load('Images/p2right3.png'),", "pygame.image.load('Images/p2front2.png'), pygame.image.load('Images/p2front3.png'), pygame.image.load('Images/p2front2.png'), pygame.image.load('Images/p2front1.png'), pygame.image.load('Images/p2front.png')] p2walkBack = [pygame.image.load('Images/p2back.png'), pygame.image.load('Images/p2back1.png'), pygame.image.load('Images/p2back2.png'),", "(self.rect.x, self.rect.y)) self.walkCount += 1 elif self.left: if self.shield: if", "if i.rect.collidepoint(self.rightcheck,self.rect.y): self.expright = False if i.rect.collidepoint(self.rect.x,self.upcheck): self.expup = False", "self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck + 16)) self.expboxlist.append(pygame.Rect(self.rect.x,", "screen.blit(self.p1walkRightshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 else: if", "pygame.image.load('Images/p1front2.png'), pygame.image.load('Images/p1front3.png'), pygame.image.load('Images/p1front2.png'), pygame.image.load('Images/p1front1.png'), pygame.image.load('Images/p1front.png')] p1walkFrontshield = [pygame.image.load('Images/p1frontshield.png'), pygame.image.load('Images/p1front1shield.png'), pygame.image.load('Images/p1front2shield.png'),", "screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck+16))", "0 screen.blit(self.p2walkRight[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 if", "pygame.image.load('Images/p1back1.png'), pygame.image.load('Images/p1back2.png'), pygame.image.load('Images/p1back3.png'), pygame.image.load('Images/p1back2.png'), pygame.image.load('Images/p1back1.png'), pygame.image.load('Images/p1back.png')] p1walkBackshield = [pygame.image.load('Images/p1backshield.png'), pygame.image.load('Images/p1back1shield.png'),", "self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck+16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck+16, self.rect.y,", "self.rect.y = y self.rect.x = x self.front = True self.back", "[pygame.image.load('Images/p2backshield.png'), pygame.image.load('Images/p2back1shield.png'), pygame.image.load('Images/p2back2shield.png'), pygame.image.load('Images/p2back3shield.png'), pygame.image.load('Images/p2back2shield.png'), pygame.image.load('Images/p2back1shield.png'), pygame.image.load('Images/p2backshield.png')] # Constructor function", "self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p1walkFrontshield[self.walkCount //", "False if i.rect.collidepoint(self.rect.x, self.downcheck+32): self.doubleexpdown = False screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.rect.y))", "self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p2walkLeft[self.walkCount //", "<0: self.left=True self.right=False self.front=False self.back=False elif self.change_x >0: self.left=False self.right=True", "pygame.Surface([22, 28], pygame.SRCALPHA, 32) image = self.image.convert_alpha() self.rect = self.image.get_rect()", "self.respawntimer=0 self.exists=True self.animationcount=0 def draw(self, screen): if self.number==1: if self.exists", "pygame.image.load('Images2/megabombicon1.png')] def __init__(self, x, y, number): super().__init__() self.image = pygame.Surface([22,", "1 >= 21: self.walkCount = 0 screen.blit(self.p2walkBackshield[self.walkCount // 3], (self.rect.x,", "self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p2walkBackshield[self.walkCount //", "= 0 screen.blit(self.p1walkLeft[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1", "self.rect.y)) self.walkCount += 1 elif self.left: if self.shield: if self.change_x", "pygame.image.load('Images/death6.png'), pygame.image.load('Images/death7.png'), pygame.image.load('Images/death8.png'), pygame.image.load('Images/death9.png'), pygame.image.load('Images/death10.png'), pygame.image.load('Images/death11.png'), pygame.image.load('Images/death12.png'), pygame.image.load('Images/death13.png'), pygame.image.load('Images/death14.png'), pygame.image.load('Images/death15.png'),", "== 2: screen.blit(pygame.image.load('Images2/'+str(self.megabombcount)+'megabombs.png'), (415, 0)) if self.alive: if self.front: if", "self.exists and self.spawntimer>50: if self.animationcount + 1 >= 30: self.animationcount", "number): super().__init__() self.image = pygame.Surface([22, 28], pygame.SRCALPHA, 32) image =", "screen.blit(pygame.image.load('Images2/megabomb3.png'), (self.rect.x, self.rect.y)) elif self.bomb_count < 60: if self.bomb_type ==", "self.megabombcount=0 class Wall(pygame.sprite.Sprite): def __init__(self, x, y, width, height): super().__init__()", "pygame.image.load('Images/p1front1shield.png'), pygame.image.load('Images/p1frontshield.png')] p1walkBack = [pygame.image.load('Images/p1back.png'), pygame.image.load('Images/p1back1.png'), pygame.image.load('Images/p1back2.png'), pygame.image.load('Images/p1back3.png'), pygame.image.load('Images/p1back2.png'), pygame.image.load('Images/p1back1.png'),", "= 0 screen.blit(self.p2walkFrontshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1", "self.bomb_count = bomb_count self.bomb_type = bomb_type self.walls = None self.leftcheck", "self.walkCount = 0 screen.blit(self.p2walkRightshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount +=", "(self.rect.x, self.rect.y)) elif self.bomb_count < 90: if self.bomb_type == 0:", "30: if self.bomb_type==0: screen.blit(pygame.image.load('Images/bomb3.png'), (self.rect.x, self.rect.y)) else: screen.blit(pygame.image.load('Images2/megabomb3.png'), (self.rect.x, self.rect.y))", "= y self.rect.x = x self.number = number self.spawntimer=0 self.respawntimer=0", "pygame.image.load('Images/p1left1.png'), pygame.image.load('Images/p1left.png')] p1walkLeftshield = [pygame.image.load('Images/p1leftshield.png'), pygame.image.load('Images/p1left1shield.png'), pygame.image.load('Images/p1left2shield.png'), pygame.image.load('Images/p1left3shield.png'), pygame.image.load('Images/p1left2shield.png'), pygame.image.load('Images/p1left1shield.png'),", "1 >= 21: self.walkCount = 0 screen.blit(self.p2walkLeftshield[self.walkCount // 3], (self.rect.x,", "0 screen.blit(self.p2walkLeftshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 else:", "32)) if self.doubleexpup: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck-32)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck-32, 32, 32))", "1 elif self.right: if self.shield: if self.change_x == 0: screen.blit(pygame.image.load('Images/p1rightshield.png'),", "0 screen.blit(self.p1walkLeft[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 elif", "screen.blit(pygame.image.load('Images/p1right.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >= 21:", "pygame.image.load('Images/p1left.png')] p1walkLeftshield = [pygame.image.load('Images/p1leftshield.png'), pygame.image.load('Images/p1left1shield.png'), pygame.image.load('Images/p1left2shield.png'), pygame.image.load('Images/p1left3shield.png'), pygame.image.load('Images/p1left2shield.png'), pygame.image.load('Images/p1left1shield.png'), pygame.image.load('Images/p1leftshield.png')]", "self.bomb_type == 0: screen.blit(pygame.image.load('Images/bomb2.png'), (self.rect.x, self.rect.y)) else: screen.blit(pygame.image.load('Images2/megabomb2.png'), (self.rect.x, self.rect.y))", "[pygame.image.load('Images/p1right.png'), pygame.image.load('Images/p1right1.png'), pygame.image.load('Images/p1right2.png'), pygame.image.load('Images/p1right3.png'), pygame.image.load('Images/p1right2.png'), pygame.image.load('Images/p1right1.png'), pygame.image.load('Images/p1right.png')] p1walkRightshield = [pygame.image.load('Images/p1rightshield.png'),", "self.rect.y)) elif self.bomb_count < 120: if self.bomb_type==0: for i in", "elif self.number == 2: screen.blit(pygame.image.load('Images2/'+str(self.megabombcount)+'megabombs.png'), (415, 0)) if self.alive: if", "self.walkCount += 1 elif self.right: if self.shield: if self.change_x ==", "pygame.image.load('Images/p2front2shield.png'), pygame.image.load('Images/p2front3shield.png'), pygame.image.load('Images/p2front2shield.png'), pygame.image.load('Images/p2front1shield.png'), pygame.image.load('Images/p2frontshield.png')] p2walkBackshield = [pygame.image.load('Images/p2backshield.png'), pygame.image.load('Images/p2back1shield.png'), pygame.image.load('Images/p2back2shield.png'),", "y self.rect.x = x self.number = number self.spawntimer=0 self.respawntimer=0 self.exists=True", "p2walkFront = [pygame.image.load('Images/p2front.png'), pygame.image.load('Images/p2front1.png'), pygame.image.load('Images/p2front2.png'), pygame.image.load('Images/p2front3.png'), pygame.image.load('Images/p2front2.png'), pygame.image.load('Images/p2front1.png'), pygame.image.load('Images/p2front.png')] p2walkBack", "p2walkFrontshield = [pygame.image.load('Images/p2frontshield.png'), pygame.image.load('Images/p2front1shield.png'), pygame.image.load('Images/p2front2shield.png'), pygame.image.load('Images/p2front3shield.png'), pygame.image.load('Images/p2front2shield.png'), pygame.image.load('Images/p2front1shield.png'), pygame.image.load('Images/p2frontshield.png')] p2walkBackshield", "pygame.image.load('Images/death7.png'), pygame.image.load('Images/death8.png'), pygame.image.load('Images/death9.png'), pygame.image.load('Images/death10.png'), pygame.image.load('Images/death11.png'), pygame.image.load('Images/death12.png'), pygame.image.load('Images/death13.png'), pygame.image.load('Images/death14.png'), pygame.image.load('Images/death15.png'), pygame.image.load('Images/death16.png'),", "= False if i.rect.collidepoint(self.rect.x,self.downcheck): self.expdown = False screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.rect.y))", "1 >= 21: self.walkCount = 0 screen.blit(self.p2walkLeft[self.walkCount // 3], (self.rect.x,", "21: self.walkCount = 0 screen.blit(self.p1walkLeft[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount", "0: screen.blit(pygame.image.load('Images/p1rightshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >=", ">= 21: self.walkCount = 0 screen.blit(self.p2walkLeft[self.walkCount // 3], (self.rect.x, self.rect.y))", "= True self.expup = True self.doubleexpup = True self.expdown =", "= self.rect.x + self.width self.upcheck = self.rect.y - 32 self.downcheck", "self.leftcheck = self.rect.x - 32 self.rightcheck = self.rect.x + self.width", "self.expright: screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck-16,", "self.expleft: screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck", "self.alive: if self.front: if self.shield: if self.change_y == 0: screen.blit(pygame.image.load('Images/p1frontshield.png'),", "0 screen.blit(self.megabombanimation[self.animationcount // 5], (self.rect.x, self.rect.y)) self.animationcount += 1 def", "screen.blit(self.megabombanimation[self.animationcount // 5], (self.rect.x, self.rect.y)) self.animationcount += 1 def reset(self):", "= True self.deathCount = 0 self.rect.x = x self.rect.y =", "elif self.back: if self.shield: if self.change_y == 0: screen.blit(pygame.image.load('Images/p1backshield.png'), (self.rect.x,", "self.change_y == 0: screen.blit(pygame.image.load('Images/p1frontshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount +", "self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck+16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck+16, 32,", "True self.doubleexpright = True self.expup = True self.doubleexpup = True", "self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p2walkRightshield[self.walkCount //", "(self.rightcheck+16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck+16, self.rect.y, 32, 32)) if self.expup: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x,", "1 >= 21: self.walkCount = 0 screen.blit(self.p1walkBack[self.walkCount // 3], (self.rect.x,", "self.image.get_rect() self.rect.y = y self.rect.x = x self.width = width", "if self.bomb_type==0: screen.blit(pygame.image.load('Images/bomb3.png'), (self.rect.x, self.rect.y)) else: screen.blit(pygame.image.load('Images2/megabomb3.png'), (self.rect.x, self.rect.y)) elif", "self.rect.y, 32, 32)) if self.doubleexpright: screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck+32, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck+32, self.rect.y,", "self.walls = None self.leftcheck = self.rect.x - 32 self.rightcheck =", "width, height): super().__init__() self.image = pygame.Surface([width,height], pygame.SRCALPHA, 32) image =", "pygame.image.load('Images/death17.png'), pygame.image.load('Images/death18.png'),pygame.image.load('Images/death19.png'), pygame.image.load('Images/death20.png')] p1walkLeft = [pygame.image.load('Images/p1left.png'), pygame.image.load('Images/p1left1.png'), pygame.image.load('Images/p1left2.png'), pygame.image.load('Images/p1left3.png'), pygame.image.load('Images/p1left2.png'),", "screen.blit(pygame.image.load('Images/bomb3.png'), (self.rect.x, self.rect.y)) else: screen.blit(pygame.image.load('Images2/megabomb3.png'), (self.rect.x, self.rect.y)) elif self.bomb_count <", "32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck-16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck-16, self.rect.y, 32, 32)) if self.expright:", "+ 16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck + 16, self.rect.y, 32, 32)) if", "screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck-16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck-16, self.rect.y, 32, 32)) if self.expright: screen.blit(pygame.image.load('Images/explosion.png'),", "= self.rect.y - 32 self.downcheck = self.rect.y + self.height self.expleft", "None self.leftcheck = self.rect.x - 32 self.rightcheck = self.rect.x +", "elif self.right: if self.shield: if self.change_x == 0: screen.blit(pygame.image.load('Images/p1rightshield.png'), (self.rect.x,", "self.alive: if self.front: if self.shield: if self.change_y == 0: screen.blit(pygame.image.load('Images/p2frontshield.png'),", "= block.rect.top else: self.rect.top = block.rect.bottom def draw(self, screen): if", "screen.blit(pygame.image.load('Images2/'+str(self.megabombcount)+'megabombs.png'), (415, 0)) if self.alive: if self.front: if self.shield: if", "self.rect.y)) self.animationcount += 1 else: if self.exists and self.spawntimer >", "self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p1walkRightshield[self.walkCount //", "True self.doubleexpup = True self.expdown = True self.doubleexpdown = True", "0 screen.blit(self.p1walkFront[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 elif", "= 0 def changespeed(self, x, y): self.change_x += x self.change_y", "self.expboxlist = [] def draw(self, screen): if self.bomb_count < 30:", "self.shield: if self.change_y == 0: screen.blit(pygame.image.load('Images/p2frontshield.png'), (self.rect.x, self.rect.y)) else: if", "self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p1walkRight[self.walkCount //", "(self.rect.x, self.rect.y)) self.deathCount += 1 if self.deathCount >= 200: self.rect.x", "function def __init__(self, x, y, number): super().__init__() self.image = pygame.Surface([24,", "screen): if self.number==1: if self.exists and self.spawntimer>50: if self.animationcount +", "pygame.image.load('Images/p1left3.png'), pygame.image.load('Images/p1left2.png'), pygame.image.load('Images/p1left1.png'), pygame.image.load('Images/p1left.png')] p1walkLeftshield = [pygame.image.load('Images/p1leftshield.png'), pygame.image.load('Images/p1left1shield.png'), pygame.image.load('Images/p1left2shield.png'), pygame.image.load('Images/p1left3shield.png'),", "= 0 self.walkCount = 0 self.walls = None self.alive =", "= x self.front = True self.back = False self.left =", "y): self.change_x += x self.change_y += y if self.superspeed and", "== 0: screen.blit(pygame.image.load('Images/p1front.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1", "pygame.image.load('Images/p1back1.png'), pygame.image.load('Images/p1back.png')] p1walkBackshield = [pygame.image.load('Images/p1backshield.png'), pygame.image.load('Images/p1back1shield.png'), pygame.image.load('Images/p1back2shield.png'), pygame.image.load('Images/p1back3shield.png'), pygame.image.load('Images/p1back2shield.png'), pygame.image.load('Images/p1back1shield.png'),", "= self.rect.x - 32 self.rightcheck = self.rect.x + self.width self.upcheck", "self.walkCount = 0 screen.blit(self.p2walkBack[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount +=", "pygame.image.load('Images/p2front1.png'), pygame.image.load('Images/p2front2.png'), pygame.image.load('Images/p2front3.png'), pygame.image.load('Images/p2front2.png'), pygame.image.load('Images/p2front1.png'), pygame.image.load('Images/p2front.png')] p2walkBack = [pygame.image.load('Images/p2back.png'), pygame.image.load('Images/p2back1.png'),", "16, self.rect.y, 32, 32)) if self.doubleexpleft: screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck-32, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck-32,", "p1walkLeftshield = [pygame.image.load('Images/p1leftshield.png'), pygame.image.load('Images/p1left1shield.png'), pygame.image.load('Images/p1left2shield.png'), pygame.image.load('Images/p1left3shield.png'), pygame.image.load('Images/p1left2shield.png'), pygame.image.load('Images/p1left1shield.png'), pygame.image.load('Images/p1leftshield.png')] p1walkRight", "self.change_y == 0: screen.blit(pygame.image.load('Images/p2backshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount +", "pygame.image.load('Images/p1back1shield.png'), pygame.image.load('Images/p1back2shield.png'), pygame.image.load('Images/p1back3shield.png'), pygame.image.load('Images/p1back2shield.png'), pygame.image.load('Images/p1back1shield.png'), pygame.image.load('Images/p1backshield.png')] p2walkLeft = [pygame.image.load('Images/p2left.png'), pygame.image.load('Images/p2left1.png'),", "(self.rightcheck, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck - 16,", "self.alive = True self.canmove = True self.deathCount = 0 self.gotomenu=False", "self.change_y == 0: screen.blit(pygame.image.load('Images/p1backshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount +", "- 16, 32, 32)) if self.doubleexpdown: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck+32)) self.expboxlist.append(pygame.Rect(self.rect.x,", "i.rect.collidepoint(self.rect.x, self.downcheck): self.expdown = False if i.rect.collidepoint(self.rect.x, self.downcheck+32): self.doubleexpdown =", "self.bomb_type==0: screen.blit(pygame.image.load('Images/bomb3.png'), (self.rect.x, self.rect.y)) else: screen.blit(pygame.image.load('Images2/megabomb3.png'), (self.rect.x, self.rect.y)) elif self.bomb_count", "= [pygame.image.load('Images/p2rightshield.png'), pygame.image.load('Images/p2right1shield.png'), pygame.image.load('Images/p2right2shield.png'), pygame.image.load('Images/p2right3shield.png'), pygame.image.load('Images/p2right2shield.png'), pygame.image.load('Images/p2right1shield.png'), pygame.image.load('Images/p2rightshield.png')] p2walkFrontshield =", "self.change_x >0: self.left=False self.right=True self.front=False self.back=False block_hit_list = pygame.sprite.spritecollide(self, self.walls,", "elif self.bomb_count < 60: if self.bomb_type == 0: screen.blit(pygame.image.load('Images/bomb2.png'), (self.rect.x,", ">= 30: self.animationcount = 0 screen.blit(self.superspeedanimation[self.animationcount // 5], (self.rect.x, self.rect.y))", "def reset(self,x,y): self.gotomenu = False self.alive = True self.deathCount =", "self.number = number self.change_x = 0 self.change_y = 0 self.walkCount", "1 elif self.left: if self.shield: if self.change_x == 0: screen.blit(pygame.image.load('Images/p2leftshield.png'),", "self.rect.y)) self.animationcount += 1 elif self.number==2: if self.exists and self.spawntimer", "+= 1 elif self.back: if self.shield: if self.change_y == 0:", "height, bomb_count, bomb_type): super().__init__() self.image = pygame.Surface([22, 28], pygame.SRCALPHA, 32)", "0: screen.blit(pygame.image.load('Images/bomb2.png'), (self.rect.x, self.rect.y)) else: screen.blit(pygame.image.load('Images2/megabomb2.png'), (self.rect.x, self.rect.y)) elif self.bomb_count", "self.shield: if self.change_x == 0: screen.blit(pygame.image.load('Images/p1leftshield.png'), (self.rect.x, self.rect.y)) else: if", "self.speed=6 if self.superspeedcount>=150: self.superspeed = False self.speed=3 self.superspeedcount=0 def update(self):", "(self.rect.x, self.rect.y)) self.walkCount += 1 else: if self.change_y == 0:", "self.expright = False if i.rect.collidepoint(self.rightcheck+32, self.rect.y): self.doubleexpright = False if", "False self.speed=3 self.superspeedcount=0 def update(self): if self.canmove: self.rect.x += self.change_x", "True self.doubleexpleft = True self.expright = True self.doubleexpright = True", "self.walkCount += 1 else: if self.change_x == 0: screen.blit(pygame.image.load('Images/p1right.png'), (self.rect.x,", "< 200: screen.blit(self.death[self.deathCount // 10], (self.rect.x, self.rect.y)) self.deathCount += 1", "0: screen.blit(pygame.image.load('Images/p1leftshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >=", "21: self.walkCount = 0 screen.blit(self.p1walkBackshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount", "+ 1 >= 21: self.walkCount = 0 screen.blit(self.p1walkRight[self.walkCount // 3],", "self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p1walkBack[self.walkCount //", "21: self.walkCount = 0 screen.blit(self.p2walkBackshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount", "0 screen.blit(self.p1walkLeftshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 else:", "if self.animationcount + 1 >= 30: self.animationcount = 0 screen.blit(self.megabombanimation[self.animationcount", "self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck+16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck+16, self.rect.y, 32, 32))", "else: self.rect.top = block.rect.bottom def draw(self, screen): if self.number ==", "self.expright = True self.doubleexpright = True self.expup = True self.doubleexpup", "(self.rect.x, self.upcheck+16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck+16, 32, 32)) if self.expdown: screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x,", "[pygame.image.load('Images/shield1.png'), pygame.image.load('Images/shield2.png'), pygame.image.load('Images/shield3.png'), pygame.image.load('Images/shield3.png'), pygame.image.load('Images/shield2.png'), pygame.image.load('Images/shield1.png')] megabombanimation = [pygame.image.load('Images2/megabombicon1.png'), pygame.image.load('Images2/megabombicon2.png'),", "16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck - 16, self.rect.y, 32, 32)) if self.doubleexpright:", "if self.change_y == 0: screen.blit(pygame.image.load('Images/p1back.png'), (self.rect.x, self.rect.y)) else: if self.walkCount", "0 screen.blit(self.p1walkRightshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 else:", "self.change_x > 0: self.rect.right = block.rect.left else: self.rect.left = block.rect.right", "y self.rect.x = x class powerup(pygame.sprite.Sprite): superspeedanimation=[pygame.image.load('Images/superspeed1.png'), pygame.image.load('Images/superspeed2.png'), pygame.image.load('Images/superspeed3.png'), pygame.image.load('Images/superspeed3.png'),", "0 def changespeed(self, x, y): self.change_x += x self.change_y +=", "self.rect.y): self.doubleexpleft = False if i.rect.collidepoint(self.rightcheck, self.rect.y): self.expright = False", "32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck-16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck-16, 32, 32)) if self.expdown:", "screen.blit(pygame.image.load('Images2/megabomb2.png'), (self.rect.x, self.rect.y)) elif self.bomb_count < 90: if self.bomb_type ==", "1 else: if self.change_y == 0: screen.blit(pygame.image.load('Images/p1back.png'), (self.rect.x, self.rect.y)) else:", "self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck-16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck-16, self.rect.y,", "False self.number = number self.change_x = 0 self.change_y = 0", "self.left: if self.shield: if self.change_x == 0: screen.blit(pygame.image.load('Images/p2leftshield.png'), (self.rect.x, self.rect.y))", "30: self.animationcount = 0 screen.blit(self.superspeedanimation[self.animationcount // 5], (self.rect.x, self.rect.y)) self.animationcount", "screen.blit(pygame.image.load('Images/p1frontshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >= 21:", "32, 32)) if self.expright: screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck, self.rect.y, 32,", "x, y, width, height, bomb_count, bomb_type): super().__init__() self.image = pygame.Surface([22,", "= [pygame.image.load('Images/p1right.png'), pygame.image.load('Images/p1right1.png'), pygame.image.load('Images/p1right2.png'), pygame.image.load('Images/p1right3.png'), pygame.image.load('Images/p1right2.png'), pygame.image.load('Images/p1right1.png'), pygame.image.load('Images/p1right.png')] p1walkRightshield =", "self.rect.x = x self.number = number self.spawntimer=0 self.respawntimer=0 self.exists=True self.animationcount=0", "21: self.walkCount = 0 screen.blit(self.p2walkFrontshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount", "(self.rect.x, self.rect.y)) elif self.bomb_count < 60: if self.bomb_type == 0:", "screen.blit(pygame.image.load('Images/p1back.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >= 21:", ">= 21: self.walkCount = 0 screen.blit(self.p2walkFront[self.walkCount // 3], (self.rect.x, self.rect.y))", "False if i.rect.collidepoint(self.rightcheck, self.rect.y): self.expright = False if i.rect.collidepoint(self.rightcheck+32, self.rect.y):", "self.rect = self.image.get_rect() self.rect.y = y self.rect.x = x self.front", "(self.rect.x, self.downcheck-16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck-16, 32, 32)) elif self.bomb_type==1: for i", "+ 16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck + 16, 32, 32)) if self.doubleexpup:", ">= 21: self.walkCount = 0 screen.blit(self.p1walkBack[self.walkCount // 3], (self.rect.x, self.rect.y))", "pygame.image.load('Images/p1right2shield.png'), pygame.image.load('Images/p1right3shield.png'), pygame.image.load('Images/p1right2shield.png'), pygame.image.load('Images/p1right1shield.png'), pygame.image.load('Images/p1rightshield.png')] p1walkFront = [pygame.image.load('Images/p1front.png'), pygame.image.load('Images/p1front1.png'), pygame.image.load('Images/p1front2.png'),", "pygame.image.load('Images/p2left2.png'), pygame.image.load('Images/p2left1.png'), pygame.image.load('Images/p2left.png')] p2walkRight = [pygame.image.load('Images/p2right.png'), pygame.image.load('Images/p2right1.png'), pygame.image.load('Images/p2right2.png'), pygame.image.load('Images/p2right3.png'), pygame.image.load('Images/p2right2.png'),", "self.walls = None self.alive = True self.canmove = True self.deathCount", "self.expboxlist.append(pygame.Rect(self.leftcheck+16, self.rect.y, 32, 32)) if self.expright: screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rightcheck,", "1 elif self.back: if self.shield: if self.change_y == 0: screen.blit(pygame.image.load('Images/p2backshield.png'),", "= self.image.get_rect() self.rect.y = y self.rect.x = x self.width =", "1 else: if self.change_y == 0: screen.blit(pygame.image.load('Images/p1front.png'), (self.rect.x, self.rect.y)) else:", "screen.blit(self.p2walkLeftshield[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1 else: if", "if self.canmove: self.rect.x += self.change_x if self.change_x <0: self.left=True self.right=False", "self.front=True self.back=False block_hit_list = pygame.sprite.spritecollide(self, self.walls, False) for block in", "screen.blit(pygame.image.load('Images/p1backshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >= 21:", "self.speed=3 self.superspeedcount=0 def update(self): if self.canmove: self.rect.x += self.change_x if", "i.rect.collidepoint(self.rect.x,self.downcheck): self.expdown = False screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.rect.y)) self.expboxlist.append(pygame.Rect(self.rect.x, self.rect.y, 32,", "+ 1 >= 30: self.animationcount = 0 screen.blit(self.shieldanimation[self.animationcount // 5],", "== 0: screen.blit(pygame.image.load('Images/p2rightshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1", "0: screen.blit(pygame.image.load('Images/bomb1.png'), (self.rect.x, self.rect.y)) else: screen.blit(pygame.image.load('Images2/megabomb1.png'), (self.rect.x, self.rect.y)) elif self.bomb_count", "= self.image.convert_alpha() self.rect = self.image.get_rect() self.rect.y = y self.rect.x =", "+ 1 >= 21: self.walkCount = 0 screen.blit(self.p2walkRight[self.walkCount // 3],", "screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck-16))", "self.change_x == 0: screen.blit(pygame.image.load('Images/p2right.png'), (self.rect.x, self.rect.y)) else: if self.walkCount +", "- 16, self.rect.y, 32, 32)) if self.doubleexpright: screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck+32, self.rect.y))", "self.expleft: screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck+16,", "i.rect.collidepoint(self.leftcheck,self.rect.y): self.expleft = False if i.rect.collidepoint(self.rightcheck,self.rect.y): self.expright = False if", "30: self.animationcount = 0 screen.blit(self.megabombanimation[self.animationcount // 5], (self.rect.x, self.rect.y)) self.animationcount", "if self.animationcount + 1 >= 30: self.animationcount = 0 screen.blit(self.superspeedanimation[self.animationcount", "screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck + 16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck + 16, self.rect.y, 32,", "= width self.height = height self.bomb_count = bomb_count self.bomb_type =", "screen.blit(pygame.image.load('Images/p2leftshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount + 1 >= 21:", "(self.rect.x, self.rect.y)) self.walkCount += 1 if self.alive == False and", "self.change_x == 0: screen.blit(pygame.image.load('Images/p1leftshield.png'), (self.rect.x, self.rect.y)) else: if self.walkCount +", "self.expboxlist.append(pygame.Rect(self.leftcheck + 16, self.rect.y, 32, 32)) if self.doubleexpleft: screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck-32,", "self.downcheck-16)) self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck-16, 32, 32)) elif self.bomb_type==1: for i in", "else: if self.change_x == 0: screen.blit(pygame.image.load('Images/p2left.png'), (self.rect.x, self.rect.y)) else: if", "self.left: if self.shield: if self.change_x == 0: screen.blit(pygame.image.load('Images/p1leftshield.png'), (self.rect.x, self.rect.y))", "self.expboxlist.append(pygame.Rect(self.rect.x, self.rect.y, 32, 32)) if self.expleft: screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck,", "= 0 screen.blit(self.p2walkLeft[self.walkCount // 3], (self.rect.x, self.rect.y)) self.walkCount += 1", "if self.front: if self.shield: if self.change_y == 0: screen.blit(pygame.image.load('Images/p1frontshield.png'), (self.rect.x,", "self.front=False self.back=False elif self.change_x >0: self.left=False self.right=True self.front=False self.back=False block_hit_list", "pygame.image.load('Images/p1back2.png'), pygame.image.load('Images/p1back1.png'), pygame.image.load('Images/p1back.png')] p1walkBackshield = [pygame.image.load('Images/p1backshield.png'), pygame.image.load('Images/p1back1shield.png'), pygame.image.load('Images/p1back2shield.png'), pygame.image.load('Images/p1back3shield.png'), pygame.image.load('Images/p1back2shield.png'),", "if self.walkCount + 1 >= 21: self.walkCount = 0 screen.blit(self.p1walkFrontshield[self.walkCount", "= True self.expright = True self.doubleexpright = True self.expup =", "pygame.image.load('Images/p1back2shield.png'), pygame.image.load('Images/p1back1shield.png'), pygame.image.load('Images/p1backshield.png')] p2walkLeft = [pygame.image.load('Images/p2left.png'), pygame.image.load('Images/p2left1.png'), pygame.image.load('Images/p2left2.png'), pygame.image.load('Images/p2left3.png'), pygame.image.load('Images/p2left2.png'),", "self.downcheck-16, 32, 32)) elif self.bomb_type==1: for i in self.walls: if", "self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck-32, self.rect.y, 32, 32)) screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck-16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck-16, self.rect.y,", "= True self.front = True self.change_x=0 self.change_y=0 self.superspeed=False self.speed=3 self.shield=False", "(self.leftcheck + 16, self.rect.y)) self.expboxlist.append(pygame.Rect(self.leftcheck + 16, self.rect.y, 32, 32))" ]
[ "self.modules[index_A] = temp_A self.modules[index_B] = temp_B self.module_map[module_A] = index_B self.module_map[module_B]", "are replaced) \"\"\" temp = rel_path.split('/', 1)[-1] if \"$(INSTALL)\" in", "of ADCore \"\"\" return self.get_module_by_name('ADCORE').version def swap_module_positions(self, module_A, module_B): \"\"\"Swaps", "Error message if applicable, None otherwise \"\"\" valid = True", "n, n, n)) config.add_module(IM(\"ADPROSILICA\", \"R2-5\", \"$(AREA_DETECTOR)/ADProsilica\", gu, ad_org, \"ADProsilica\", n,", "temp) elif \"$(AREA_DETECTOR)\" in rel_path and self.ad_path != None: return", "folder of installSynApps modules : List of InsallModule list of", "name can be found in the list of accounted for", "install location of EPICS area detector motor_path : str abs", "a new injector file to the install_config object Parameters ----------", "R=Pva1:\\n' common_plugins_str += 'NDPvaConfigure(\"PVA1\", $(QSIZE), 0, \"$(PORT)\", 0, $(PREFIX)Pva1:Image, 0,", "temp) elif \"$(\" in rel_path: macro_part = rel_path.split(')')[0] rel_to =", "self.get_module_build_index(module_B) if index_A >= 0 and index_B >= 0: temp_A", "that will be installed base_path : str abs path to", "Returns ------- obj - InstallModule Return matching module, or None", "valid, false otherwise str Error message if applicable, None otherwise", "False message = 'Install location and parent directory do not", "out = [] for module in self.modules: if module.build ==", "be abstracted into a simpler data structure (since its used", "install config self.modules = [] # Dict that maps module", "or None if not found. \"\"\" if name in self.module_map.keys():", "target def generate_default_install_config(target_install_loc='/epics', update_versions=False, with_pva=True): config = InstallConfiguration(target_install_loc, None) y", "str -> int Dictionary storing relation of module names to", "ADCore \"\"\" return self.get_module_by_name('ADCORE').version def swap_module_positions(self, module_A, module_B): \"\"\"Swaps build", "n, n, n)) config.add_module(IM(\"ADPILATUS\", \"R2-8\", \"$(AREA_DETECTOR)/ADPilatus\", gu, ad_org, \"ADPilatus\", n,", "gu, ad_org, \"ADAravis\", n, n, n)) config.add_module(IM(\"ADEIGER\", \"R2-6\", \"$(AREA_DETECTOR)/ADEiger\", gu,", "n, n, n)) config.add_module(IM(\"ADANDOR3\", \"master\", \"$(AREA_DETECTOR)/ADAndor3\", gu, ad_org, \"ADAndor3\", n,", "n)) config.add_module(IM(\"ADMYTHEN\", \"master\", \"$(AREA_DETECTOR)/ADMythen\", gu, ad_org, \"ADMythen\", n, n, n))", "list of module names that are set to build \"\"\"", "is then used throughout the build process. InjectorFile objects are", "be injected into configuration files prior to builds. \"\"\" import", "track if module.name == \"EPICS_BASE\": self.base_path = module.abs_path elif module.name", "of module names to build index injector_files : list of", "convert_path_abs(self, rel_path): \"\"\"Function that converts a given modules relative path", "y, n)) config.add_module(IM(\"ADSUPPORT\", \"R1-9\", \"$(AREA_DETECTOR)/ADSupport\", gu, ad_org, \"ADSupport\", y, y,", "not exist' elif not os.access(target, os.W_OK | os.X_OK): valid =", "path for the given module Returns ------- str The absolute", "config.add_module(IM(\"ADMYTHEN\", \"master\", \"$(AREA_DETECTOR)/ADMythen\", gu, ad_org, \"ADMythen\", n, n, n)) config.add_module(IM(\"ADURL\",", "text that need to be injected into configuration files prior", "and self.ad_path != None: return installSynApps.join_path(self.ad_path, temp) elif \"$(MOTOR)\" in", "syn_org, \"utils\", y, y, n)) config.add_module(IM(\"SNCSEQ\", \"2.2.8\", \"$(SUPPORT)/seq\", wu, seq_rel,", "represents an Install Configuration for installSynApps It stores the top", "build time. Used to add to commonPlugins, commonPlugin_settings, etc. TODO:", "print(self.get_printable_string().strip()) else: fp.write(self.get_printable_string()) def get_printable_string(self): \"\"\"Function that gets a toString", "index_A = self.get_module_build_index(module_A) index_B = self.get_module_build_index(module_B) if index_A >= 0", "Return matching module, or None if not found. \"\"\" if", "into configuration files prior to builds. \"\"\" import os import", "the modules that will be installed base_path : str abs", "config.add_module(IM(\"ADSIMDETECTOR\", \"master\", \"$(AREA_DETECTOR)/ADSimDetector\", gu, ad_org, \"ADSimDetector\", n, n, n)) config.add_module(IM(\"ADPILATUS\",", "def add_module(self, module): \"\"\"Function that adds a module to the", "---------- fp = None : file pointer Optional pointer to", ": str The target location file into which contents will", "that module's absolute path Parameters ---------- rel_path : str The", "dependencies are built before lower level packages Parameters ---------- module_A", "n, n)) config.add_module(IM(\"ADPROSILICA\", \"R2-5\", \"$(AREA_DETECTOR)/ADProsilica\", gu, ad_org, \"ADProsilica\", n, n,", "= out + module.get_printable_string() return out def get_module_names_list(self): \"\"\"Function that", "The contents of the file target : str The target", "area detector motor_path : str abs path to install location", "modules : List of InsallModule list of InstallModule objects representing", "installSynApps It stores the top level install_location, the path to", "= module.abs_path elif module.name == \"SUPPORT\": self.support_path = module.abs_path elif", "\"2.8.10\", \"$(SUPPORT)/stream\", gu, psi_org, \"StreamDevice\", y, y, y)) # AreaDetector", "in self.modules: if module.clone == 'YES': out = out +", "any OS specific configurations, and the actual list of modules", "macro_part.split('(')[1] rel_to_module = self.get_module_by_name(rel_to) if rel_to_module is not None: return", "configure and output locations self.path_to_configure = path_to_configure self.install_location = os.path.abspath(install_location)", "n, n, n)) config.add_module(IM(\"ADSIMDETECTOR\", \"master\", \"$(AREA_DETECTOR)/ADSimDetector\", gu, ad_org, \"ADSimDetector\", n,", "temp) return rel_path def print_installation_info(self, fp = None): \"\"\"Function that", "with_pva: autosave_str += 'file \"NDPva_settings.req\", P=$(P), R=Pva1:\\n' common_plugins_str += 'NDPvaConfigure(\"PVA1\",", "None def is_install_valid(self): \"\"\"Function that checks if given install location", "out = out + module.get_printable_string() return out def get_module_names_list(self): \"\"\"Function", "macro_list): \"\"\"Function that adds macro-value pairs to a list of", "if this is enabled\\n' \\ 'startPVAServer\\n' \\ config.add_injector_file('PLUGIN_CONFIG', common_plugins_str, '$(AREA_DETECTOR)/ADCore/iocBoot/EXAMPLE_commonPlugins.cmd')", "module in self.modules: if module.build == 'YES': out.append(module.name) return out", "config.add_module(IM(\"ADMERLIN\", \"master\", \"$(AREA_DETECTOR)/ADMerlin\", gu, ad_org, \"ADMerlin\", n, n, n)) config.add_module(IM(\"ADARAVIS\",", "return self.modules def get_module_by_name(self, name): \"\"\"Function that returns install module", "installation path for the given module Returns ------- str The", "y, y, y)) # AreaDetector and commonly used drivers config.add_module(IM(\"AREA_DETECTOR\",", "install_location, path_to_configure): \"\"\"Constructor for the InstallConfiguration object \"\"\" # Paths", "module.abs_path = self.convert_path_abs(module.rel_path) # Key paths to track if module.name", "\"$(SUPPORT)/autosave\", gu, mod_org, \"autosave\", y, y, y)) config.add_module(IM(\"BUSY\", \"R1-7-2\", \"$(SUPPORT)/busy\",", "elif \"$(EXTENSIONS)\" in rel_path and self.extensions_path != None: return installSynApps.join_path(self.extensions_path,", "object Returns ------- bool True if install location is valid,", "mod_org, \"ipac\", y, y, y)) config.add_module(IM(\"ASYN\", \"R4-37\", \"$(SUPPORT)/asyn\", gu, mod_org,", "str abs path to install location of EPICS area detector", "module.abs_path elif module.name == \"AREA_DETECTOR\": self.ad_path = module.abs_path elif module.name", "autosave_str = 'file \"sseqRecord_settings.req\", P=$(P), S=AcquireSequence\\n' if with_pva: autosave_str +=", "for relative path correction self.base_path = None self.support_path = None", "n)) config.add_module(IM(\"ADPILATUS\", \"R2-8\", \"$(AREA_DETECTOR)/ADPilatus\", gu, ad_org, \"ADPilatus\", n, n, n))", "modules to track, sets the appropriate variables. Also, add the", "base support_path : str abs path to install location of", "Attributes ---------- path_to_configure : str path to the configure dir", "of macro-value pairs enforced at build time \"\"\" def __init__(self,", "ad_org, \"ADPointGrey\", n, n, n)) config.add_module(IM(\"ADANDOR\", \"R2-8\", \"$(AREA_DETECTOR)/ADAndor\", gu, ad_org,", "that gets a toString for an InstallConfigurations Returns ------- str", "None def get_module_build_index(self, name): \"\"\"Function that returns the index in", "n, n, n)) config.add_module(IM(\"ADARAVIS\", \"master\", \"$(AREA_DETECTOR)/ADAravis\", gu, ad_org, \"ADAravis\", n,", "to an external log file \"\"\" if fp == None:", "\"autosave\", y, y, y)) config.add_module(IM(\"BUSY\", \"R1-7-2\", \"$(SUPPORT)/busy\", gu, mod_org, \"busy\",", "do not exist' elif not os.access(target, os.W_OK | os.X_OK): valid", "EPICS motor module_map : dict of str -> int Dictionary", "\"master\", \"$(AREA_DETECTOR)/ADPerkinElmer\", gu, ad_org, \"ADPerkinElmer\", n, n, n)) config.add_module(IM(\"ADGENICAM\", \"master\",", "\"ipUnidig\", y, y, y)) # Some modules that are commonly", "a given modules relative path to an absolute path If", "locations self.path_to_configure = path_to_configure self.install_location = os.path.abspath(install_location) # Modules loaded", "psi_org, \"StreamDevice\", y, y, y)) # AreaDetector and commonly used", "\"ADGenICam\", n, n, n)) config.add_module(IM(\"ADANDOR3\", \"master\", \"$(AREA_DETECTOR)/ADAndor3\", gu, ad_org, \"ADAndor3\",", "self.ad_path = None self.motor_path = None self.extensions_path = None def", "= os.path.dirname(self.install_location) if not os.path.exists(target): valid = False message =", "'https://github.com/epics-base/' syn_org = 'https://github.com/EPICS-synApps/' mod_org = 'https://github.com/epics-modules/' ad_org = 'https://github.com/areaDetector/'", "macro-value pairs enforced at build time \"\"\" def __init__(self, install_location,", "y, y)) config.add_module(IM(\"ADCORE\", \"R3-8\", \"$(AREA_DETECTOR)/ADCore\", gu, ad_org, \"ADCore\", y, y,", "self.module_map[name] else: return -1 def get_core_version(self): \"\"\"Funciton that returns selected", "\"R2-3\", \"$(AREA_DETECTOR)/ADDexela\", gu, ad_org, \"ADDexela\", n, n, n)) config.add_module(IM(\"ADMYTHEN\", \"master\",", "= 'https://github.com/areaDetector/' seq_rel = 'http://www-csr.bessy.de/control/SoftDist/sequencer/releases/' psi_org = 'https://github.com/paulscherrerinstitute/' # Add", "config.add_module(IM(\"ADARAVIS\", \"master\", \"$(AREA_DETECTOR)/ADAravis\", gu, ad_org, \"ADAravis\", n, n, n)) config.add_module(IM(\"ADEIGER\",", "accounted for modules, replace it with that module's absolute path", "= [] for module in self.modules: if module.build == 'YES':", "gu, syn_org, \"utils\", y, y, n)) config.add_module(IM(\"SNCSEQ\", \"2.2.8\", \"$(SUPPORT)/seq\", wu,", "n, n, n)) config.add_module(IM(\"ADURL\", \"master\", \"$(AREA_DETECTOR)/ADURL\", gu, ad_org, \"ADURL\", n,", "anyway) Attributes ---------- path_to_configure : str path to the configure", "None) y = 'YES' n = 'NO' gu = 'GIT_URL'", "mod_org, \"iocStats\", y, y, y)) config.add_module(IM(\"SSCAN\", \"R2-11-3\", \"$(SUPPORT)/sscan\", gu, mod_org,", "the configuration Returns ------- List self.modules - list of modules", "module.name == \"EXTENSIONS\": self.extensions_path = module.abs_path self.module_map[module.name] = len(self.modules) self.modules.append(module)", "= 'Install location and parent directory do not exist' elif", "module name to index in module list for easier searching.", "n, n, n)) config.add_module(IM(\"ADMERLIN\", \"master\", \"$(AREA_DETECTOR)/ADMerlin\", gu, ad_org, \"ADMerlin\", n,", "InjectorFile objects are used for representing text that need to", "y, n)) config.add_module(IM(\"UTILS\", \"R6-1\", \"$(SUPPORT)/utils\", gu, syn_org, \"utils\", y, y,", "\"$(SUPPORT)/ipac\", gu, mod_org, \"ipac\", y, y, y)) config.add_module(IM(\"ASYN\", \"R4-37\", \"$(SUPPORT)/asyn\",", "will be injected. \"\"\" def __init__(self, path_to_configure, name, contents, target):", ": str Module name Returns ------- obj - InstallModule Return", "Returns ------- bool True if install location is valid, false", "config.add_module(IM(\"ADCORE\", \"R3-8\", \"$(AREA_DETECTOR)/ADCore\", gu, ad_org, \"ADCore\", y, y, y)) config.add_module(IM(\"ADPERKINELMER\",", "'startPVAServer\\n' \\ config.add_injector_file('PLUGIN_CONFIG', common_plugins_str, '$(AREA_DETECTOR)/ADCore/iocBoot/EXAMPLE_commonPlugins.cmd') config.add_injector_file('AUTOSAVE_CONFIG', autosave_str, '$(AREA_DETECTOR)/ADCore/iocBoot/EXAMPLE_commonPlugin_settings.req') if update_versions:", "to the three install location paths used for relative path", "path to install location of EPICS area detector motor_path :", "self.support_path = module.abs_path elif module.name == \"AREA_DETECTOR\": self.ad_path = module.abs_path", "y, y)) config.add_module(IM(\"MOTOR\", \"R7-1\", \"$(SUPPORT)/motor\", gu, mod_org, \"motor\", y, y,", "'https://github.com/EPICS-synApps/' mod_org = 'https://github.com/epics-modules/' ad_org = 'https://github.com/areaDetector/' seq_rel = 'http://www-csr.bessy.de/control/SoftDist/sequencer/releases/'", "= 'file \"sseqRecord_settings.req\", P=$(P), S=AcquireSequence\\n' if with_pva: autosave_str += 'file", "level packages. Parameters ---------- name : str Module name Returns", "config.add_module(IM(\"SSCAN\", \"R2-11-3\", \"$(SUPPORT)/sscan\", gu, mod_org, \"sscan\", y, y, y)) config.add_module(IM(\"IPUNIDIG\",", "module to the map of modules which will keep track", "path_to_configure): \"\"\"Constructor for the InstallConfiguration object \"\"\" # Paths to", "elif module.name == \"MOTOR\": self.motor_path = module.abs_path elif module.name ==", "module_A, module_B): \"\"\"Swaps build order of modules Used to ensure", "n)) config.add_module(IM(\"ADANDOR\", \"R2-8\", \"$(AREA_DETECTOR)/ADAndor\", gu, ad_org, \"ADAndor\", n, n, n))", "data that needs to be appended to target files at", "ad_org, \"areaDetector\", y, y, n)) config.add_module(IM(\"ADSUPPORT\", \"R1-9\", \"$(AREA_DETECTOR)/ADSupport\", gu, ad_org,", ": str Name of second module \"\"\" index_A = self.get_module_build_index(module_A)", "-> int Dictionary storing relation of module names to build", "gu, ad_org, \"ADVimba\", n, n, n)) config.add_module(IM(\"ADPOINTGREY\", \"master\", \"$(AREA_DETECTOR)/ADPointGrey\", gu,", "lower level packages. Parameters ---------- name : str Module name", "of the three key modules to track, sets the appropriate", "contents, target): \"\"\"Constructor of InjectorFile class \"\"\" self.path_to_configure = path_to_configure", "__init__(self, path_to_configure, name, contents, target): \"\"\"Constructor of InjectorFile class \"\"\"", "in self.modules: if module.build == 'YES': out.append(module.name) return out class", "= contents self.target = target def generate_default_install_config(target_install_loc='/epics', update_versions=False, with_pva=True): config", "index_A >= 0 and index_B >= 0: temp_A = self.get_module_by_name(module_B)", "self.module_map[module_A] = index_B self.module_map[module_B] = index_A def convert_path_abs(self, rel_path): \"\"\"Function", "\"ADURL\", n, n, n)) common_plugins_str = 'dbLoadRecords(\"$(DEVIOCSTATS)/db/iocAdminSoft.db\", \"IOC=$(PREFIX)\")\\n' autosave_str =", "import installSynApps from installSynApps.data_model.install_module import InstallModule as IM class InstallConfiguration:", "in the build order for the module Used for ensuring", "path to configure folder of installSynApps modules : List of", "valid InstallModule, then sets the config, and abs path, then", "self.convert_path_abs(module.rel_path) # Key paths to track if module.name == \"EPICS_BASE\":", "EPICS base support_path : str abs path to install location", "# Key paths to track if module.name == \"EPICS_BASE\": self.base_path", "config.add_module(IM(\"ADANDOR3\", \"master\", \"$(AREA_DETECTOR)/ADAndor3\", gu, ad_org, \"ADAndor3\", n, n, n)) config.add_module(IM(\"ADPROSILICA\",", "of installSynApps modules : List of InsallModule list of InstallModule", "mod_org, \"sscan\", y, y, y)) config.add_module(IM(\"IPUNIDIG\", \"R2-11\", \"$(SUPPORT)/ipUnidig\", gu, mod_org,", "and the actual list of modules that will be installed.", "list of list of str list of macro-value pairs enforced", "appropriate variables. Also, add the module to the map of", "\"R5-10\", \"$(SUPPORT)/autosave\", gu, mod_org, \"autosave\", y, y, y)) config.add_module(IM(\"BUSY\", \"R1-7-2\",", "return reference to given module object. Parameters ---------- name :", "The core Data representation for installSynApps. An InstallConfiguration object is", "of str -> int Dictionary storing relation of module names", "for modules, replace it with that module's absolute path Parameters", "first module module_B : str Name of second module \"\"\"", "gu, syn_org, \"configure\", y, y, n)) config.add_module(IM(\"UTILS\", \"R6-1\", \"$(SUPPORT)/utils\", gu,", "self.get_module_by_name('ADCORE').version def swap_module_positions(self, module_A, module_B): \"\"\"Swaps build order of modules", "rel_to_module = self.get_module_by_name(rel_to) if rel_to_module is not None: return installSynApps.join_path(rel_to_module.abs_path,", "config.add_module(IM(\"ADURL\", \"master\", \"$(AREA_DETECTOR)/ADURL\", gu, ad_org, \"ADURL\", n, n, n)) common_plugins_str", "installSynApps.join_path(self.install_location, temp) elif \"$(EPICS_BASE)\" in rel_path and self.base_path != None:", "self.extensions_path = None def is_install_valid(self): \"\"\"Function that checks if given", "modules Used to ensure dependencies are built before lower level", "data structure (since its used as a struct anyway) Attributes", "path to install location of EPICS support modules ad_path :", "= len(self.modules) self.modules.append(module) def add_injector_file(self, name, contents, target): \"\"\"Function that", "n, n, n)) config.add_module(IM(\"ADANDOR\", \"R2-8\", \"$(AREA_DETECTOR)/ADAndor\", gu, ad_org, \"ADAndor\", n,", "n, n)) config.add_module(IM(\"ADGENICAM\", \"master\", \"$(AREA_DETECTOR)/ADGenICam\", gu, ad_org, \"ADGenICam\", n, n,", "Config is saved at {}\\n\".format(self.path_to_configure) for module in self.modules: if", "in rel_path and self.base_path != None: return installSynApps.join_path(self.base_path, temp) elif", "that checks if given install location is valid Parameters ----------", "installSynApps.join_path(self.support_path, temp) elif \"$(AREA_DETECTOR)\" in rel_path and self.ad_path != None:", "os.access(target, os.W_OK | os.X_OK): valid = False message = 'Permission", "def add_injector_file(self, name, contents, target): \"\"\"Function that adds a new", "used for representing text that need to be injected into", "location of EPICS support modules ad_path : str abs path", "self.base_path = None self.support_path = None self.ad_path = None self.motor_path", "None: return installSynApps.join_path(self.support_path, temp) elif \"$(AREA_DETECTOR)\" in rel_path and self.ad_path", "path correction self.base_path = None self.support_path = None self.ad_path =", "message = 'Permission Error: {}'.format(target) return valid, message def add_module(self,", "+ \"Install Location = {}\\n\".format(self.install_location) out = out + \"This", "modules relative path to an absolute path If the macro", "First checks if parameter is a valid InstallModule, then sets", "str The target location file into which contents will be", "\"asyn\", y, y, y)) config.add_module(IM(\"AUTOSAVE\", \"R5-10\", \"$(SUPPORT)/autosave\", gu, mod_org, \"autosave\",", "\"\"\" new_injector = InjectorFile(self.path_to_configure, name, contents, target) self.injector_files.append(new_injector) def add_macros(self,", "index injector_files : list of InjectorFile list of injector files", "be injected. \"\"\" new_injector = InjectorFile(self.path_to_configure, name, contents, target) self.injector_files.append(new_injector)", "rel_path and self.base_path != None: return installSynApps.join_path(self.base_path, temp) elif \"$(SUPPORT)\"", ": dict of str -> int Dictionary storing relation of", "in rel_path and self.ad_path != None: return installSynApps.join_path(self.ad_path, temp) elif", "Returns ------- str The absolute installation path for the module.", "\\ 'startPVAServer\\n' \\ config.add_injector_file('PLUGIN_CONFIG', common_plugins_str, '$(AREA_DETECTOR)/ADCore/iocBoot/EXAMPLE_commonPlugins.cmd') config.add_injector_file('AUTOSAVE_CONFIG', autosave_str, '$(AREA_DETECTOR)/ADCore/iocBoot/EXAMPLE_commonPlugin_settings.req') if", "each module is in in the list/build order Parameters ----------", "None target = self.install_location if not os.path.exists(target): target = os.path.dirname(self.install_location)", "to a list of macros Parameters ---------- macro_list : list", "names that are set to build \"\"\" out = []", "str path to top level install location path_to_configure : str", "core Data representation for installSynApps. An InstallConfiguration object is parsed", "is valid Parameters ---------- self : InstallConfiguration Self object Returns", "out def get_module_names_list(self): \"\"\"Function that gets list of modules being", "\"EXTENSIONS\": self.extensions_path = module.abs_path self.module_map[module.name] = len(self.modules) self.modules.append(module) def add_injector_file(self,", "build order if found, otherwise -1 \"\"\" if name in", "n)) config.add_module(IM(\"ADEIGER\", \"R2-6\", \"$(AREA_DETECTOR)/ADEiger\", gu, ad_org, \"ADEiger\", n, n, n))", "\"ADCore\", y, y, y)) config.add_module(IM(\"ADPERKINELMER\", \"master\", \"$(AREA_DETECTOR)/ADPerkinElmer\", gu, ad_org, \"ADPerkinElmer\",", "returns selected version of ADCore \"\"\" return self.get_module_by_name('ADCORE').version def swap_module_positions(self,", "update_versions=False, with_pva=True): config = InstallConfiguration(target_install_loc, None) y = 'YES' n", "config.add_module(IM(\"DEVIOCSTATS\", \"master\", \"$(SUPPORT)/iocStats\", gu, mod_org, \"iocStats\", y, y, y)) config.add_module(IM(\"SSCAN\",", "parent directory do not exist' elif not os.access(target, os.W_OK |", "add the module to the map of modules which will", "n, n, n)) config.add_module(IM(\"ADMYTHEN\", \"master\", \"$(AREA_DETECTOR)/ADMythen\", gu, ad_org, \"ADMythen\", n,", "that represents an Install Configuration for installSynApps It stores the", "\"$(AREA_DETECTOR)\" in rel_path and self.ad_path != None: return installSynApps.join_path(self.ad_path, temp)", "module : InstallModule new installation module being added. \"\"\" if", "psi_org = 'https://github.com/paulscherrerinstitute/' # Add core modules that will generally", "to the configure dir that houses this injector file name", "str abs path to install location of EPICS base support_path", "\"$(SUPPORT)/motor\", gu, mod_org, \"motor\", y, y, y)) config.add_module(IM(\"QUADEM\", \"R9-3\", \"$(SUPPORT)/quadEM\",", "0: temp_A = self.get_module_by_name(module_B) temp_B = self.get_module_by_name(module_A) self.modules[index_A] = temp_A", "\"$(AREA_DETECTOR)/ADSimDetector\", gu, ad_org, \"ADSimDetector\", n, n, n)) config.add_module(IM(\"ADPILATUS\", \"R2-8\", \"$(AREA_DETECTOR)/ADPilatus\",", "of list of str list of macro-value pairs enforced at", "\"$(SUPPORT)/calc\", gu, mod_org, \"calc\", y, y, y)) config.add_module(IM(\"DEVIOCSTATS\", \"master\", \"$(SUPPORT)/iocStats\",", "The target location file into which contents will be injected.", "= {} self.injector_files = [] self.build_flags = [] # Paths", "loaded into install config self.modules = [] # Dict that", "at {}\\n\".format(self.path_to_configure) for module in self.modules: if module.clone == 'YES':", "matching module, or None if not found. \"\"\" if name", "prints installation info Prints list of all modules including clone/build/package", "\"epics-base\", y, y, y)) config.add_module(IM(\"SUPPORT\", \"R6-1\", \"$(INSTALL)/support\", gu, syn_org, \"support\",", "y, y)) config.add_module(IM(\"SSCAN\", \"R2-11-3\", \"$(SUPPORT)/sscan\", gu, mod_org, \"sscan\", y, y,", "if module.build == 'YES': out.append(module.name) return out class InjectorFile: \"\"\"Class", "swap_module_positions(self, module_A, module_B): \"\"\"Swaps build order of modules Used to", "\"AREA_DETECTOR\": self.ad_path = module.abs_path elif module.name == \"MOTOR\": self.motor_path =", "name): \"\"\"Function that returns the index in the build order", "= self.get_module_build_index(module_A) index_B = self.get_module_build_index(module_B) if index_A >= 0 and", "the install configuration \"\"\" out = \"--------------------------------\\n\" out = out", "probably be abstracted into a simpler data structure (since its", "\"\"\" self.path_to_configure = path_to_configure self.name = name self.contents = contents", "not os.path.exists(target): target = os.path.dirname(self.install_location) if not os.path.exists(target): valid =", "keep track of which position each module is in in", "\"ipac\", y, y, y)) config.add_module(IM(\"ASYN\", \"R4-37\", \"$(SUPPORT)/asyn\", gu, mod_org, \"asyn\",", "in rel_path and self.extensions_path != None: return installSynApps.join_path(self.extensions_path, temp) elif", "that maps module name to index in module list for", "given modules relative path to an absolute path If the", "files loaded by install configuration build_flags : list of list", "\"R6-1\", \"$(INSTALL)/support\", gu, syn_org, \"support\", y, y, n)) config.add_module(IM(\"CONFIGURE\", \"R6-1\",", "top level install_location, the path to the configuration files, any", "abs path, then if it is one of the three", "pairs to a list of macros Parameters ---------- macro_list :", "in this install configuration \"\"\" return self.modules def get_module_by_name(self, name):", "\"\"\"Function that gets the list of modules in the configuration", "config.add_module(IM(\"ADEIGER\", \"R2-6\", \"$(AREA_DETECTOR)/ADEiger\", gu, ad_org, \"ADEiger\", n, n, n)) config.add_module(IM(\"ADVIMBA\",", "name : str name of the file contents : str", "gu, ad_org, \"ADAndor\", n, n, n)) config.add_module(IM(\"ADDEXELA\", \"R2-3\", \"$(AREA_DETECTOR)/ADDexela\", gu,", "modules including clone/build/package information Parameters ---------- fp = None :", "if with_pva: autosave_str += 'file \"NDPva_settings.req\", P=$(P), R=Pva1:\\n' common_plugins_str +=", "0)\\n' \\ 'dbLoadRecords(\"NDPva.template\", \"P=$(PREFIX),R=Pva1:, PORT=PVA1,ADDR=0,TIMEOUT=1,NDARRAY_PORT=$(PORT)\")\\n' \\ '# Must start PVA", "are used to represent data that needs to be appended", "\"$(SUPPORT)/utils\", gu, syn_org, \"utils\", y, y, n)) config.add_module(IM(\"SNCSEQ\", \"2.2.8\", \"$(SUPPORT)/seq\",", "configurations, and the actual list of modules that will be", "object given module name Uses module name as a key", "\"$(AREA_DETECTOR)/ADAndor\", gu, ad_org, \"ADAndor\", n, n, n)) config.add_module(IM(\"ADDEXELA\", \"R2-3\", \"$(AREA_DETECTOR)/ADDexela\",", "exist' elif not os.access(target, os.W_OK | os.X_OK): valid = False", "installation module being added. \"\"\" if isinstance(module, IM): # Updates", "which position each module is in in the list/build order", "Name of second module \"\"\" index_A = self.get_module_build_index(module_A) index_B =", "to append \"\"\" self.build_flags = self.build_flags + macro_list def get_module_list(self):", "for representing text that need to be injected into configuration", "needs to be appended to target files at build time.", "the config, and abs path, then if it is one", "install_location, the path to the configuration files, any OS specific", "gu, ad_org, \"ADAndor3\", n, n, n)) config.add_module(IM(\"ADPROSILICA\", \"R2-5\", \"$(AREA_DETECTOR)/ADProsilica\", gu,", "for installSynApps. An InstallConfiguration object is parsed from a configuration,", "= macro_part.split('(')[1] rel_to_module = self.get_module_by_name(rel_to) if rel_to_module is not None:", "# Paths to the three install location paths used for", "class InstallConfiguration: \"\"\" Class that represents an Install Configuration for", "contents, target): \"\"\"Function that adds a new injector file to", "= module.abs_path self.module_map[module.name] = len(self.modules) self.modules.append(module) def add_injector_file(self, name, contents,", "gu, mod_org, \"xspress3\", y, y, y)) config.add_module(IM(\"MOTOR\", \"R7-1\", \"$(SUPPORT)/motor\", gu,", "of modules to install in this install configuration \"\"\" return", "build order of modules Used to ensure dependencies are built", "!= None: return installSynApps.join_path(self.motor_path, temp) elif \"$(EXTENSIONS)\" in rel_path and", "= path_to_configure self.install_location = os.path.abspath(install_location) # Modules loaded into install", "gu, ad_org, \"ADPerkinElmer\", n, n, n)) config.add_module(IM(\"ADGENICAM\", \"master\", \"$(AREA_DETECTOR)/ADGenICam\", gu,", "output locations self.path_to_configure = path_to_configure self.install_location = os.path.abspath(install_location) # Modules", "the module Used for ensuring dependencies are built before lower", "class InjectorFile: \"\"\"Class that represents an injector file and stores", "of str list of macro-value pairs enforced at build time", "representing the modules that will be installed base_path : str", "{}\\n\".format(self.path_to_configure) for module in self.modules: if module.clone == 'YES': out", "---------- rel_path : str The relative installation path for the", "module to the InstallConfiguration module list First checks if parameter", ": list of [str, str] list of new macros to", "of str list of module names that are set to", "y)) config.add_module(IM(\"ADCORE\", \"R3-8\", \"$(AREA_DETECTOR)/ADCore\", gu, ad_org, \"ADCore\", y, y, y))", "file name : str name of the file contents :", "self.base_path = module.abs_path elif module.name == \"SUPPORT\": self.support_path = module.abs_path", "injected. \"\"\" new_injector = InjectorFile(self.path_to_configure, name, contents, target) self.injector_files.append(new_injector) def", "module.abs_path elif module.name == \"EXTENSIONS\": self.extensions_path = module.abs_path self.module_map[module.name] =", "mod_org, \"autosave\", y, y, y)) config.add_module(IM(\"BUSY\", \"R1-7-2\", \"$(SUPPORT)/busy\", gu, mod_org,", "\"ADPilatus\", n, n, n)) config.add_module(IM(\"ADMERLIN\", \"master\", \"$(AREA_DETECTOR)/ADMerlin\", gu, ad_org, \"ADMerlin\",", "gu, ad_org, \"ADEiger\", n, n, n)) config.add_module(IM(\"ADVIMBA\", \"master\", \"$(AREA_DETECTOR)/ADVimba\", gu,", "config.add_module(IM(\"ADSUPPORT\", \"R1-9\", \"$(AREA_DETECTOR)/ADSupport\", gu, ad_org, \"ADSupport\", y, y, y)) config.add_module(IM(\"ADCORE\",", "\"$(SUPPORT)\" in rel_path and self.support_path != None: return installSynApps.join_path(self.support_path, temp)", "mod_org = 'https://github.com/epics-modules/' ad_org = 'https://github.com/areaDetector/' seq_rel = 'http://www-csr.bessy.de/control/SoftDist/sequencer/releases/' psi_org", "of InjectorFile list of injector files loaded by install configuration", "------- list of str list of module names that are", "Used for ensuring dependencies are built before lower level packages.", "\"\"\" temp = rel_path.split('/', 1)[-1] if \"$(INSTALL)\" in rel_path and", "index_B = self.get_module_build_index(module_B) if index_A >= 0 and index_B >=", "valid Parameters ---------- self : InstallConfiguration Self object Returns -------", "y, y)) config.add_module(IM(\"AUTOSAVE\", \"R5-10\", \"$(SUPPORT)/autosave\", gu, mod_org, \"autosave\", y, y,", "is enabled\\n' \\ 'startPVAServer\\n' \\ config.add_injector_file('PLUGIN_CONFIG', common_plugins_str, '$(AREA_DETECTOR)/ADCore/iocBoot/EXAMPLE_commonPlugins.cmd') config.add_injector_file('AUTOSAVE_CONFIG', autosave_str,", "\"\"\"Function that adds a module to the InstallConfiguration module list", "injector file to the install_config object Parameters ---------- name :", "maps module name to index in module list for easier", "= 'https://github.com/paulscherrerinstitute/' # Add core modules that will generally always", "of EPICS base support_path : str abs path to install", "\"ADAndor3\", n, n, n)) config.add_module(IM(\"ADPROSILICA\", \"R2-5\", \"$(AREA_DETECTOR)/ADProsilica\", gu, ad_org, \"ADProsilica\",", "simpler data structure (since its used as a struct anyway)", "\"R3-7-3\", \"$(SUPPORT)/calc\", gu, mod_org, \"calc\", y, y, y)) config.add_module(IM(\"DEVIOCSTATS\", \"master\",", "key in a dictionary to return reference to given module", "installSynApps.join_path(self.base_path, temp) elif \"$(SUPPORT)\" in rel_path and self.support_path != None:", "\"\"\"Constructor of InjectorFile class \"\"\" self.path_to_configure = path_to_configure self.name =", "Returns ------- list of str list of module names that", "n)) config.add_module(IM(\"ADMERLIN\", \"master\", \"$(AREA_DETECTOR)/ADMerlin\", gu, ad_org, \"ADMerlin\", n, n, n))", "be injected. \"\"\" def __init__(self, path_to_configure, name, contents, target): \"\"\"Constructor", "\"R7-1\", \"$(SUPPORT)/motor\", gu, mod_org, \"motor\", y, y, y)) config.add_module(IM(\"QUADEM\", \"R9-3\",", "= self.get_module_build_index(module_B) if index_A >= 0 and index_B >= 0:", "path Parameters ---------- rel_path : str The relative installation path", "[str, str] list of new macros to append \"\"\" self.build_flags", "self.support_path != None: return installSynApps.join_path(self.support_path, temp) elif \"$(AREA_DETECTOR)\" in rel_path", "motor module_map : dict of str -> int Dictionary storing", "the appropriate variables. Also, add the module to the map", "module.abs_path elif module.name == \"SUPPORT\": self.support_path = module.abs_path elif module.name", "the configure dir that houses this injector file name :", "gu, ad_org, \"ADMythen\", n, n, n)) config.add_module(IM(\"ADURL\", \"master\", \"$(AREA_DETECTOR)/ADURL\", gu,", "\"sscan\", y, y, y)) config.add_module(IM(\"IPUNIDIG\", \"R2-11\", \"$(SUPPORT)/ipUnidig\", gu, mod_org, \"ipUnidig\",", "path to install location of EPICS base support_path : str", "name as a key in a dictionary to return reference", "List of InsallModule list of InstallModule objects representing the modules", "name, contents, and target Injector file classes are used to", "rel_path def print_installation_info(self, fp = None): \"\"\"Function that prints installation", "\"master\", \"$(AREA_DETECTOR)/ADVimba\", gu, ad_org, \"ADVimba\", n, n, n)) config.add_module(IM(\"ADPOINTGREY\", \"master\",", ": str path to top level install location path_to_configure :", "objects are used for representing text that need to be", "file to the install_config object Parameters ---------- name : str", "install location path_to_configure : str path to configure folder of", "file into which contents will be injected. \"\"\" new_injector =", "import InstallModule as IM class InstallConfiguration: \"\"\" Class that represents", "modules that are commonly needed config.add_module(IM(\"XSPRESS3\", \"master\", \"$(SUPPORT)/xspress3\", gu, mod_org,", "self.ad_path != None: return installSynApps.join_path(self.ad_path, temp) elif \"$(MOTOR)\" in rel_path", "if not found. \"\"\" if name in self.module_map.keys(): return self.modules[self.module_map[name]]", "module's absolute path Parameters ---------- rel_path : str The relative", "Parameters ---------- self : InstallConfiguration Self object Returns ------- bool", "return installSynApps.join_path(self.support_path, temp) elif \"$(AREA_DETECTOR)\" in rel_path and self.ad_path !=", ": list of InjectorFile list of injector files loaded by", "Name of first module module_B : str Name of second", "ad_org, \"ADURL\", n, n, n)) common_plugins_str = 'dbLoadRecords(\"$(DEVIOCSTATS)/db/iocAdminSoft.db\", \"IOC=$(PREFIX)\")\\n' autosave_str", "= path_to_configure self.name = name self.contents = contents self.target =", "representation for installSynApps. An InstallConfiguration object is parsed from a", "\"$(INSTALL)/base\", gu, base_org, \"epics-base\", y, y, y)) config.add_module(IM(\"SUPPORT\", \"R6-1\", \"$(INSTALL)/support\",", "False message = 'Permission Error: {}'.format(target) return valid, message def", "will be injected. \"\"\" new_injector = InjectorFile(self.path_to_configure, name, contents, target)", "not os.path.exists(target): valid = False message = 'Install location and", "\"SUPPORT\": self.support_path = module.abs_path elif module.name == \"AREA_DETECTOR\": self.ad_path =", "level install_location, the path to the configuration files, any OS", "is saved at {}\\n\".format(self.path_to_configure) for module in self.modules: if module.clone", "n)) config.add_module(IM(\"ADARAVIS\", \"master\", \"$(AREA_DETECTOR)/ADAravis\", gu, ad_org, \"ADAravis\", n, n, n))", "used for relative path correction self.base_path = None self.support_path =", "ad_org, \"ADAndor3\", n, n, n)) config.add_module(IM(\"ADPROSILICA\", \"R2-5\", \"$(AREA_DETECTOR)/ADProsilica\", gu, ad_org,", "self.injector_files.append(new_injector) def add_macros(self, macro_list): \"\"\"Function that adds macro-value pairs to", "build order for the module Used for ensuring dependencies are", "y, y, y)) config.add_module(IM(\"AUTOSAVE\", \"R5-10\", \"$(SUPPORT)/autosave\", gu, mod_org, \"autosave\", y,", "clone/build/package information Parameters ---------- fp = None : file pointer", "time. Used to add to commonPlugins, commonPlugin_settings, etc. TODO: This", "classes are used to represent data that needs to be", "y, y, y)) config.add_module(IM(\"MOTOR\", \"R7-1\", \"$(SUPPORT)/motor\", gu, mod_org, \"motor\", y,", "config.add_module(IM(\"ADDEXELA\", \"R2-3\", \"$(AREA_DETECTOR)/ADDexela\", gu, ad_org, \"ADDexela\", n, n, n)) config.add_module(IM(\"ADMYTHEN\",", "\"$(AREA_DETECTOR)/ADPilatus\", gu, ad_org, \"ADPilatus\", n, n, n)) config.add_module(IM(\"ADMERLIN\", \"master\", \"$(AREA_DETECTOR)/ADMerlin\",", "{}'.format(target) return valid, message def add_module(self, module): \"\"\"Function that adds", "[] # Dict that maps module name to index in", "os.W_OK | os.X_OK): valid = False message = 'Permission Error:", "y)) # Some modules that are commonly needed config.add_module(IM(\"XSPRESS3\", \"master\",", "n, n)) config.add_module(IM(\"ADPOINTGREY\", \"master\", \"$(AREA_DETECTOR)/ADPointGrey\", gu, ad_org, \"ADPointGrey\", n, n,", "int Dictionary storing relation of module names to build index", "-1 def get_core_version(self): \"\"\"Funciton that returns selected version of ADCore", "---------- self : InstallConfiguration Self object Returns ------- bool True", "version of ADCore \"\"\" return self.get_module_by_name('ADCORE').version def swap_module_positions(self, module_A, module_B):", "if module.name == \"EPICS_BASE\": self.base_path = module.abs_path elif module.name ==", "in rel_path and self.install_location != None: return installSynApps.join_path(self.install_location, temp) elif", "used as a struct anyway) Attributes ---------- path_to_configure : str", "It stores the top level install_location, the path to the", "self.module_map[module.name] = len(self.modules) self.modules.append(module) def add_injector_file(self, name, contents, target): \"\"\"Function", "'file \"sseqRecord_settings.req\", P=$(P), S=AcquireSequence\\n' if with_pva: autosave_str += 'file \"NDPva_settings.req\",", "target files at build time. Used to add to commonPlugins,", "None: return installSynApps.join_path(self.extensions_path, temp) elif \"$(\" in rel_path: macro_part =", "installSynApps.join_path(self.motor_path, temp) elif \"$(EXTENSIONS)\" in rel_path and self.extensions_path != None:", "True message = None target = self.install_location if not os.path.exists(target):", "module in self.modules: if module.clone == 'YES': out = out", "and commonly used drivers config.add_module(IM(\"AREA_DETECTOR\", \"R3-8\", \"$(SUPPORT)/areaDetector\", gu, ad_org, \"areaDetector\",", "configuration build_flags : list of list of str list of", "the install_config object Parameters ---------- name : str name of", "temp) elif \"$(SUPPORT)\" in rel_path and self.support_path != None: return", "path to top level install location path_to_configure : str path", "\"\"\"Swaps build order of modules Used to ensure dependencies are", "found, otherwise -1 \"\"\" if name in self.module_map.keys(): return self.module_map[name]", "0, 0, 0)\\n' \\ 'dbLoadRecords(\"NDPva.template\", \"P=$(PREFIX),R=Pva1:, PORT=PVA1,ADDR=0,TIMEOUT=1,NDARRAY_PORT=$(PORT)\")\\n' \\ '# Must", "y, y, y)) config.add_module(IM(\"ADCORE\", \"R3-8\", \"$(AREA_DETECTOR)/ADCore\", gu, ad_org, \"ADCore\", y,", "gu, ad_org, \"ADMerlin\", n, n, n)) config.add_module(IM(\"ADARAVIS\", \"master\", \"$(AREA_DETECTOR)/ADAravis\", gu,", "of accounted for modules, replace it with that module's absolute", "None: return installSynApps.join_path(self.base_path, temp) elif \"$(SUPPORT)\" in rel_path and self.support_path", "config.add_module(IM(\"ADANDOR\", \"R2-8\", \"$(AREA_DETECTOR)/ADAndor\", gu, ad_org, \"ADAndor\", n, n, n)) config.add_module(IM(\"ADDEXELA\",", "y, y, n)) config.add_module(IM(\"UTILS\", \"R6-1\", \"$(SUPPORT)/utils\", gu, syn_org, \"utils\", y,", "start PVA server if this is enabled\\n' \\ 'startPVAServer\\n' \\", "ad_org, \"ADAravis\", n, n, n)) config.add_module(IM(\"ADEIGER\", \"R2-6\", \"$(AREA_DETECTOR)/ADEiger\", gu, ad_org,", "to configure folder of installSynApps modules : List of InsallModule", "y, n)) config.add_module(IM(\"CONFIGURE\", \"R6-1\", \"$(SUPPORT)/configure\", gu, syn_org, \"configure\", y, y,", "name, contents, target): \"\"\"Function that adds a new injector file", "y, y)) # Some modules that are commonly needed config.add_module(IM(\"XSPRESS3\",", "relative path to an absolute path If the macro name", "\"$(SUPPORT)/iocStats\", gu, mod_org, \"iocStats\", y, y, y)) config.add_module(IM(\"SSCAN\", \"R2-11-3\", \"$(SUPPORT)/sscan\",", "return installSynApps.join_path(self.ad_path, temp) elif \"$(MOTOR)\" in rel_path and self.motor_path !=", "of the file target : str The target location file", "y, y)) config.add_module(IM(\"ADPERKINELMER\", \"master\", \"$(AREA_DETECTOR)/ADPerkinElmer\", gu, ad_org, \"ADPerkinElmer\", n, n,", "\"ADDexela\", n, n, n)) config.add_module(IM(\"ADMYTHEN\", \"master\", \"$(AREA_DETECTOR)/ADMythen\", gu, ad_org, \"ADMythen\",", "InjectorFile class \"\"\" self.path_to_configure = path_to_configure self.name = name self.contents", "InstallConfiguration: \"\"\" Class that represents an Install Configuration for installSynApps", "config.add_module(IM(\"CALC\", \"R3-7-3\", \"$(SUPPORT)/calc\", gu, mod_org, \"calc\", y, y, y)) config.add_module(IM(\"DEVIOCSTATS\",", "paths to track if module.name == \"EPICS_BASE\": self.base_path = module.abs_path", "built before lower level packages Parameters ---------- module_A : str", "!= None: return installSynApps.join_path(self.ad_path, temp) elif \"$(MOTOR)\" in rel_path and", "\"master\", \"$(AREA_DETECTOR)/ADPointGrey\", gu, ad_org, \"ADPointGrey\", n, n, n)) config.add_module(IM(\"ADANDOR\", \"R2-8\",", "list of modules to install in this install configuration \"\"\"", "Attributes ---------- install_location : str path to top level install", "\"ADMythen\", n, n, n)) config.add_module(IM(\"ADURL\", \"master\", \"$(AREA_DETECTOR)/ADURL\", gu, ad_org, \"ADURL\",", "gu, ad_org, \"ADCore\", y, y, y)) config.add_module(IM(\"ADPERKINELMER\", \"master\", \"$(AREA_DETECTOR)/ADPerkinElmer\", gu,", "that represents an injector file and stores its name, contents,", "0, 0)\\n' \\ 'dbLoadRecords(\"NDPva.template\", \"P=$(PREFIX),R=Pva1:, PORT=PVA1,ADDR=0,TIMEOUT=1,NDARRAY_PORT=$(PORT)\")\\n' \\ '# Must start", "list of modules that will be installed. Attributes ---------- install_location", "Module name Returns ------- int Index of module in build", "= rel_path.split(')')[0] rel_to = macro_part.split('(')[1] rel_to_module = self.get_module_by_name(rel_to) if rel_to_module", "relative path correction self.base_path = None self.support_path = None self.ad_path", "y)) config.add_module(IM(\"DEVIOCSTATS\", \"master\", \"$(SUPPORT)/iocStats\", gu, mod_org, \"iocStats\", y, y, y))", "\"ADAndor\", n, n, n)) config.add_module(IM(\"ADDEXELA\", \"R2-3\", \"$(AREA_DETECTOR)/ADDexela\", gu, ad_org, \"ADDexela\",", "returns install module object given module name Uses module name", "in build order if found, otherwise -1 \"\"\" if name", "AreaDetector and commonly used drivers config.add_module(IM(\"AREA_DETECTOR\", \"R3-8\", \"$(SUPPORT)/areaDetector\", gu, ad_org,", "not None: return installSynApps.join_path(rel_to_module.abs_path, temp) return rel_path def print_installation_info(self, fp", "def print_installation_info(self, fp = None): \"\"\"Function that prints installation info", "gets list of modules being built Returns ------- list of", "the list of modules in the configuration Returns ------- List", "common_plugins_str += 'NDPvaConfigure(\"PVA1\", $(QSIZE), 0, \"$(PORT)\", 0, $(PREFIX)Pva1:Image, 0, 0,", "level packages Parameters ---------- module_A : str Name of first", "specific configurations, and the actual list of modules that will", "applicable, None otherwise \"\"\" valid = True message = None", "install location paths used for relative path correction self.base_path =", "and self.motor_path != None: return installSynApps.join_path(self.motor_path, temp) elif \"$(EXTENSIONS)\" in", "installation info Prints list of all modules including clone/build/package information", "list of macros Parameters ---------- macro_list : list of [str,", "to install location of EPICS base support_path : str abs", "the configuration files, any OS specific configurations, and the actual", "\"ADPerkinElmer\", n, n, n)) config.add_module(IM(\"ADGENICAM\", \"master\", \"$(AREA_DETECTOR)/ADGenICam\", gu, ad_org, \"ADGenICam\",", "configuration \"\"\" return self.modules def get_module_by_name(self, name): \"\"\"Function that returns", "are built before lower level packages. Parameters ---------- name :", "\"$(AREA_DETECTOR)/ADPerkinElmer\", gu, ad_org, \"ADPerkinElmer\", n, n, n)) config.add_module(IM(\"ADGENICAM\", \"master\", \"$(AREA_DETECTOR)/ADGenICam\",", "time \"\"\" def __init__(self, install_location, path_to_configure): \"\"\"Constructor for the InstallConfiguration", "\"$(SUPPORT)/configure\", gu, syn_org, \"configure\", y, y, n)) config.add_module(IM(\"UTILS\", \"R6-1\", \"$(SUPPORT)/utils\",", "\"ADProsilica\", n, n, n)) config.add_module(IM(\"ADSIMDETECTOR\", \"master\", \"$(AREA_DETECTOR)/ADSimDetector\", gu, ad_org, \"ADSimDetector\",", "return -1 def get_core_version(self): \"\"\"Funciton that returns selected version of", "Install Configuration for installSynApps It stores the top level install_location,", "dependencies are built before lower level packages. Parameters ---------- name", "gu, ad_org, \"areaDetector\", y, y, n)) config.add_module(IM(\"ADSUPPORT\", \"R1-9\", \"$(AREA_DETECTOR)/ADSupport\", gu,", "with that module's absolute path Parameters ---------- rel_path : str", "enabled\\n' \\ 'startPVAServer\\n' \\ config.add_injector_file('PLUGIN_CONFIG', common_plugins_str, '$(AREA_DETECTOR)/ADCore/iocBoot/EXAMPLE_commonPlugins.cmd') config.add_injector_file('AUTOSAVE_CONFIG', autosave_str, '$(AREA_DETECTOR)/ADCore/iocBoot/EXAMPLE_commonPlugin_settings.req')", "macro_part = rel_path.split(')')[0] rel_to = macro_part.split('(')[1] rel_to_module = self.get_module_by_name(rel_to) if", "installed base_path : str abs path to install location of", "added. \"\"\" if isinstance(module, IM): # Updates the abs path", "Returns ------- List self.modules - list of modules to install", "y, y, y)) config.add_module(IM(\"IPAC\", \"2.15\", \"$(SUPPORT)/ipac\", gu, mod_org, \"ipac\", y,", ">= 0 and index_B >= 0: temp_A = self.get_module_by_name(module_B) temp_B", "location of EPICS area detector motor_path : str abs path", "that adds macro-value pairs to a list of macros Parameters", "if name in self.module_map.keys(): return self.module_map[name] else: return -1 def", "\"\"\"Constructor for the InstallConfiguration object \"\"\" # Paths to configure", "one of the three key modules to track, sets the", "and parent directory do not exist' elif not os.access(target, os.W_OK", "y, y)) config.add_module(IM(\"IPAC\", \"2.15\", \"$(SUPPORT)/ipac\", gu, mod_org, \"ipac\", y, y,", "InjectorFile(self.path_to_configure, name, contents, target) self.injector_files.append(new_injector) def add_macros(self, macro_list): \"\"\"Function that", "installSynApps modules : List of InsallModule list of InstallModule objects", "\"calc\", y, y, y)) config.add_module(IM(\"DEVIOCSTATS\", \"master\", \"$(SUPPORT)/iocStats\", gu, mod_org, \"iocStats\",", "\"master\", \"$(AREA_DETECTOR)/ADURL\", gu, ad_org, \"ADURL\", n, n, n)) common_plugins_str =", "\"NDPva_settings.req\", P=$(P), R=Pva1:\\n' common_plugins_str += 'NDPvaConfigure(\"PVA1\", $(QSIZE), 0, \"$(PORT)\", 0,", "[] self.build_flags = [] # Paths to the three install", "struct anyway) Attributes ---------- path_to_configure : str path to the", "PORT=PVA1,ADDR=0,TIMEOUT=1,NDARRAY_PORT=$(PORT)\")\\n' \\ '# Must start PVA server if this is", "syn_org = 'https://github.com/EPICS-synApps/' mod_org = 'https://github.com/epics-modules/' ad_org = 'https://github.com/areaDetector/' seq_rel", "OS specific configurations, and the actual list of modules that", "name : str Module name Returns ------- int Index of", "to the InstallConfiguration module list First checks if parameter is", "\"$(SUPPORT)/stream\", gu, psi_org, \"StreamDevice\", y, y, y)) # AreaDetector and", "this is enabled\\n' \\ 'startPVAServer\\n' \\ config.add_injector_file('PLUGIN_CONFIG', common_plugins_str, '$(AREA_DETECTOR)/ADCore/iocBoot/EXAMPLE_commonPlugins.cmd') config.add_injector_file('AUTOSAVE_CONFIG',", "to target files at build time. Used to add to", "gu, mod_org, \"iocStats\", y, y, y)) config.add_module(IM(\"SSCAN\", \"R2-11-3\", \"$(SUPPORT)/sscan\", gu,", "in module list for easier searching. self.module_map = {} self.injector_files", "contents will be injected. \"\"\" def __init__(self, path_to_configure, name, contents,", "{} self.injector_files = [] self.build_flags = [] # Paths to", "module. (Macros are replaced) \"\"\" temp = rel_path.split('/', 1)[-1] if", "to represent data that needs to be appended to target", "Parameters ---------- module : InstallModule new installation module being added.", "str list of macro-value pairs enforced at build time \"\"\"", "= out + \"Install Location = {}\\n\".format(self.install_location) out = out", "name Returns ------- int Index of module in build order", "enforced at build time \"\"\" def __init__(self, install_location, path_to_configure): \"\"\"Constructor", "\"\"\"Function that adds a new injector file to the install_config", "\"ADMerlin\", n, n, n)) config.add_module(IM(\"ADARAVIS\", \"master\", \"$(AREA_DETECTOR)/ADAravis\", gu, ad_org, \"ADAravis\",", "temp) elif \"$(EPICS_BASE)\" in rel_path and self.base_path != None: return", "object is parsed from a configuration, and is then used", "\"\"\" self.build_flags = self.build_flags + macro_list def get_module_list(self): \"\"\"Function that", "detector motor_path : str abs path to install location of", "if applicable, None otherwise \"\"\" valid = True message =", "return valid, message def add_module(self, module): \"\"\"Function that adds a", ": str abs path to install location of EPICS support", "module module_B : str Name of second module \"\"\" index_A", "top level install location path_to_configure : str path to configure", "\"\"\" index_A = self.get_module_build_index(module_A) index_B = self.get_module_build_index(module_B) if index_A >=", "in rel_path and self.motor_path != None: return installSynApps.join_path(self.motor_path, temp) elif", "None self.extensions_path = None def is_install_valid(self): \"\"\"Function that checks if", "configuration, and is then used throughout the build process. InjectorFile", "name, contents, target): \"\"\"Constructor of InjectorFile class \"\"\" self.path_to_configure =", "config.add_module(IM(\"ADGENICAM\", \"master\", \"$(AREA_DETECTOR)/ADGenICam\", gu, ad_org, \"ADGenICam\", n, n, n)) config.add_module(IM(\"ADANDOR3\",", "common_plugins_str, '$(AREA_DETECTOR)/ADCore/iocBoot/EXAMPLE_commonPlugins.cmd') config.add_injector_file('AUTOSAVE_CONFIG', autosave_str, '$(AREA_DETECTOR)/ADCore/iocBoot/EXAMPLE_commonPlugin_settings.req') if update_versions: installSynApps.sync_all_module_tags(config) return config", "generate_default_install_config(target_install_loc='/epics', update_versions=False, with_pva=True): config = InstallConfiguration(target_install_loc, None) y = 'YES'", "gu, mod_org, \"quadEM\", y, y, y)) config.add_module(IM(\"STREAM\", \"2.8.10\", \"$(SUPPORT)/stream\", gu,", "'https://github.com/epics-modules/' ad_org = 'https://github.com/areaDetector/' seq_rel = 'http://www-csr.bessy.de/control/SoftDist/sequencer/releases/' psi_org = 'https://github.com/paulscherrerinstitute/'", "y, y, y)) # Some modules that are commonly needed", ": list of list of str list of macro-value pairs", "installSynApps from installSynApps.data_model.install_module import InstallModule as IM class InstallConfiguration: \"\"\"", "str Module name Returns ------- obj - InstallModule Return matching", "of [str, str] list of new macros to append \"\"\"", "ad_org, \"ADSimDetector\", n, n, n)) config.add_module(IM(\"ADPILATUS\", \"R2-8\", \"$(AREA_DETECTOR)/ADPilatus\", gu, ad_org,", "str Name of first module module_B : str Name of", "config.add_module(IM(\"SUPPORT\", \"R6-1\", \"$(INSTALL)/support\", gu, syn_org, \"support\", y, y, n)) config.add_module(IM(\"CONFIGURE\",", "support_path : str abs path to install location of EPICS", "to add to commonPlugins, commonPlugin_settings, etc. TODO: This class can", "rel_path and self.motor_path != None: return installSynApps.join_path(self.motor_path, temp) elif \"$(EXTENSIONS)\"", "modules ad_path : str abs path to install location of", "InstallModule new installation module being added. \"\"\" if isinstance(module, IM):", "y)) config.add_module(IM(\"ASYN\", \"R4-37\", \"$(SUPPORT)/asyn\", gu, mod_org, \"asyn\", y, y, y))", "y)) config.add_module(IM(\"SSCAN\", \"R2-11-3\", \"$(SUPPORT)/sscan\", gu, mod_org, \"sscan\", y, y, y))", "elif module.name == \"AREA_DETECTOR\": self.ad_path = module.abs_path elif module.name ==", "Self object Returns ------- bool True if install location is", "config.add_module(IM(\"ADPERKINELMER\", \"master\", \"$(AREA_DETECTOR)/ADPerkinElmer\", gu, ad_org, \"ADPerkinElmer\", n, n, n)) config.add_module(IM(\"ADGENICAM\",", "module list for easier searching. self.module_map = {} self.injector_files =", "\"\"\"Class that represents an injector file and stores its name,", ": str Name of first module module_B : str Name", "temp) elif \"$(EXTENSIONS)\" in rel_path and self.extensions_path != None: return", "pointer Optional pointer to an external log file \"\"\" if", "n)) config.add_module(IM(\"ADVIMBA\", \"master\", \"$(AREA_DETECTOR)/ADVimba\", gu, ad_org, \"ADVimba\", n, n, n))", "then sets the config, and abs path, then if it", "module.name == \"AREA_DETECTOR\": self.ad_path = module.abs_path elif module.name == \"MOTOR\":", "a list of macros Parameters ---------- macro_list : list of", "modules that will be installed. Attributes ---------- install_location : str", "location and parent directory do not exist' elif not os.access(target,", "configurations. The core Data representation for installSynApps. An InstallConfiguration object", "a key in a dictionary to return reference to given", "len(self.modules) self.modules.append(module) def add_injector_file(self, name, contents, target): \"\"\"Function that adds", "will generally always be built config.add_module(IM(\"EPICS_BASE\", \"R7.0.3\", \"$(INSTALL)/base\", gu, base_org,", "location file into which contents will be injected. \"\"\" def", "\"\"\" Class that represents an Install Configuration for installSynApps It", "None: return installSynApps.join_path(self.ad_path, temp) elif \"$(MOTOR)\" in rel_path and self.motor_path", "module name Uses module name as a key in a", "self.install_location if not os.path.exists(target): target = os.path.dirname(self.install_location) if not os.path.exists(target):", "out = out + \"This Install Config is saved at", "y, y)) config.add_module(IM(\"STREAM\", \"2.8.10\", \"$(SUPPORT)/stream\", gu, psi_org, \"StreamDevice\", y, y,", "macros to append \"\"\" self.build_flags = self.build_flags + macro_list def", "- InstallModule Return matching module, or None if not found.", "= 'GIT_URL' wu = 'WGET_URL' base_org = 'https://github.com/epics-base/' syn_org =", "file contents : str The contents of the file target", "out + module.get_printable_string() return out def get_module_names_list(self): \"\"\"Function that gets", "packages Parameters ---------- module_A : str Name of first module", "y)) config.add_module(IM(\"MOTOR\", \"R7-1\", \"$(SUPPORT)/motor\", gu, mod_org, \"motor\", y, y, y))", "the module to the map of modules which will keep", "modules, replace it with that module's absolute path Parameters ----------", "\"\"\" # Paths to configure and output locations self.path_to_configure =", "otherwise -1 \"\"\" if name in self.module_map.keys(): return self.module_map[name] else:", "Returns ------- int Index of module in build order if", "= index_B self.module_map[module_B] = index_A def convert_path_abs(self, rel_path): \"\"\"Function that", "gu, ad_org, \"ADDexela\", n, n, n)) config.add_module(IM(\"ADMYTHEN\", \"master\", \"$(AREA_DETECTOR)/ADMythen\", gu,", "y, y)) # AreaDetector and commonly used drivers config.add_module(IM(\"AREA_DETECTOR\", \"R3-8\",", "\"EPICS_BASE\": self.base_path = module.abs_path elif module.name == \"SUPPORT\": self.support_path =", "this install configuration \"\"\" return self.modules def get_module_by_name(self, name): \"\"\"Function", "\"$(AREA_DETECTOR)/ADPointGrey\", gu, ad_org, \"ADPointGrey\", n, n, n)) config.add_module(IM(\"ADANDOR\", \"R2-8\", \"$(AREA_DETECTOR)/ADAndor\",", "Returns ------- str A string representing the install configuration \"\"\"", "InstallConfigurations Returns ------- str A string representing the install configuration", "Install Config is saved at {}\\n\".format(self.path_to_configure) for module in self.modules:", "'YES' n = 'NO' gu = 'GIT_URL' wu = 'WGET_URL'", "'# Must start PVA server if this is enabled\\n' \\", "dictionary to return reference to given module object. Parameters ----------", "self.modules: if module.clone == 'YES': out = out + module.get_printable_string()", "n = 'NO' gu = 'GIT_URL' wu = 'WGET_URL' base_org", "InstallConfiguration object \"\"\" # Paths to configure and output locations", "def __init__(self, install_location, path_to_configure): \"\"\"Constructor for the InstallConfiguration object \"\"\"", "of install configurations. The core Data representation for installSynApps. An", "selected version of ADCore \"\"\" return self.get_module_by_name('ADCORE').version def swap_module_positions(self, module_A,", "commonly used drivers config.add_module(IM(\"AREA_DETECTOR\", \"R3-8\", \"$(SUPPORT)/areaDetector\", gu, ad_org, \"areaDetector\", y,", "representations of install configurations. The core Data representation for installSynApps.", "built before lower level packages. Parameters ---------- name : str", "it with that module's absolute path Parameters ---------- rel_path :", "install configuration build_flags : list of list of str list", "an InstallConfigurations Returns ------- str A string representing the install", "self.path_to_configure = path_to_configure self.name = name self.contents = contents self.target", "of second module \"\"\" index_A = self.get_module_build_index(module_A) index_B = self.get_module_build_index(module_B)", "else: return None def get_module_build_index(self, name): \"\"\"Function that returns the", "are set to build \"\"\" out = [] for module", "external log file \"\"\" if fp == None: print(self.get_printable_string().strip()) else:", "modules being built Returns ------- list of str list of", "all modules including clone/build/package information Parameters ---------- fp = None", "to builds. \"\"\" import os import installSynApps from installSynApps.data_model.install_module import", "injector files loaded by install configuration build_flags : list of", "\"$(AREA_DETECTOR)/ADURL\", gu, ad_org, \"ADURL\", n, n, n)) common_plugins_str = 'dbLoadRecords(\"$(DEVIOCSTATS)/db/iocAdminSoft.db\",", "\"iocStats\", y, y, y)) config.add_module(IM(\"SSCAN\", \"R2-11-3\", \"$(SUPPORT)/sscan\", gu, mod_org, \"sscan\",", "\"$(AREA_DETECTOR)/ADSupport\", gu, ad_org, \"ADSupport\", y, y, y)) config.add_module(IM(\"ADCORE\", \"R3-8\", \"$(AREA_DETECTOR)/ADCore\",", "S=AcquireSequence\\n' if with_pva: autosave_str += 'file \"NDPva_settings.req\", P=$(P), R=Pva1:\\n' common_plugins_str", "\"--------------------------------\\n\" out = out + \"Install Location = {}\\n\".format(self.install_location) out", "None self.motor_path = None self.extensions_path = None def is_install_valid(self): \"\"\"Function", "build_flags : list of list of str list of macro-value", "EPICS area detector motor_path : str abs path to install", "\"master\", \"$(AREA_DETECTOR)/ADMerlin\", gu, ad_org, \"ADMerlin\", n, n, n)) config.add_module(IM(\"ADARAVIS\", \"master\",", "None): \"\"\"Function that prints installation info Prints list of all", "to top level install location path_to_configure : str path to", "for module in self.modules: if module.build == 'YES': out.append(module.name) return", "install configuration \"\"\" out = \"--------------------------------\\n\" out = out +", "!= None: return installSynApps.join_path(self.extensions_path, temp) elif \"$(\" in rel_path: macro_part", "fp == None: print(self.get_printable_string().strip()) else: fp.write(self.get_printable_string()) def get_printable_string(self): \"\"\"Function that", "\"$(AREA_DETECTOR)/ADVimba\", gu, ad_org, \"ADVimba\", n, n, n)) config.add_module(IM(\"ADPOINTGREY\", \"master\", \"$(AREA_DETECTOR)/ADPointGrey\",", "target): \"\"\"Function that adds a new injector file to the", "TODO: This class can probably be abstracted into a simpler", "and output locations self.path_to_configure = path_to_configure self.install_location = os.path.abspath(install_location) #", "and index_B >= 0: temp_A = self.get_module_by_name(module_B) temp_B = self.get_module_by_name(module_A)", "adds macro-value pairs to a list of macros Parameters ----------", "# AreaDetector and commonly used drivers config.add_module(IM(\"AREA_DETECTOR\", \"R3-8\", \"$(SUPPORT)/areaDetector\", gu,", "that will be installed. Attributes ---------- install_location : str path", "base_org = 'https://github.com/epics-base/' syn_org = 'https://github.com/EPICS-synApps/' mod_org = 'https://github.com/epics-modules/' ad_org", "a struct anyway) Attributes ---------- path_to_configure : str path to", "Also, add the module to the map of modules which", "return out def get_module_names_list(self): \"\"\"Function that gets list of modules", "module.name == \"SUPPORT\": self.support_path = module.abs_path elif module.name == \"AREA_DETECTOR\":", "to the map of modules which will keep track of", "def get_core_version(self): \"\"\"Funciton that returns selected version of ADCore \"\"\"", "\"\"\" if name in self.module_map.keys(): return self.module_map[name] else: return -1", "ad_org, \"ADPilatus\", n, n, n)) config.add_module(IM(\"ADMERLIN\", \"master\", \"$(AREA_DETECTOR)/ADMerlin\", gu, ad_org,", "\"master\", \"$(AREA_DETECTOR)/ADMythen\", gu, ad_org, \"ADMythen\", n, n, n)) config.add_module(IM(\"ADURL\", \"master\",", "commonly needed config.add_module(IM(\"XSPRESS3\", \"master\", \"$(SUPPORT)/xspress3\", gu, mod_org, \"xspress3\", y, y,", "\"$(AREA_DETECTOR)/ADGenICam\", gu, ad_org, \"ADGenICam\", n, n, n)) config.add_module(IM(\"ADANDOR3\", \"master\", \"$(AREA_DETECTOR)/ADAndor3\",", "n)) config.add_module(IM(\"ADSIMDETECTOR\", \"master\", \"$(AREA_DETECTOR)/ADSimDetector\", gu, ad_org, \"ADSimDetector\", n, n, n))", "houses this injector file name : str name of the", "message def add_module(self, module): \"\"\"Function that adds a module to", "config, and abs path, then if it is one of", "InstallConfiguration Self object Returns ------- bool True if install location", "file classes are used to represent data that needs to", "def swap_module_positions(self, module_A, module_B): \"\"\"Swaps build order of modules Used", "gu, ad_org, \"ADSupport\", y, y, y)) config.add_module(IM(\"ADCORE\", \"R3-8\", \"$(AREA_DETECTOR)/ADCore\", gu,", "of modules which will keep track of which position each", "= 'Permission Error: {}'.format(target) return valid, message def add_module(self, module):", "\"R7.0.3\", \"$(INSTALL)/base\", gu, base_org, \"epics-base\", y, y, y)) config.add_module(IM(\"SUPPORT\", \"R6-1\",", "list for easier searching. self.module_map = {} self.injector_files = []", "\"StreamDevice\", y, y, y)) # AreaDetector and commonly used drivers", "object Parameters ---------- name : str name of the file", "gu, ad_org, \"ADProsilica\", n, n, n)) config.add_module(IM(\"ADSIMDETECTOR\", \"master\", \"$(AREA_DETECTOR)/ADSimDetector\", gu,", "the macro name can be found in the list of", "self.path_to_configure = path_to_configure self.install_location = os.path.abspath(install_location) # Modules loaded into", "os.path.exists(target): valid = False message = 'Install location and parent", "absolute path Parameters ---------- rel_path : str The relative installation", "\"\"\"Function that returns the index in the build order for", "\"$(INSTALL)\" in rel_path and self.install_location != None: return installSynApps.join_path(self.install_location, temp)", "None self.support_path = None self.ad_path = None self.motor_path = None", "self.modules - list of modules to install in this install", "InjectorFile: \"\"\"Class that represents an injector file and stores its", "ad_path : str abs path to install location of EPICS", "= None : file pointer Optional pointer to an external", "\"\"\"Function that checks if given install location is valid Parameters", "ensure dependencies are built before lower level packages Parameters ----------", "{}\\n\".format(self.install_location) out = out + \"This Install Config is saved", "class can probably be abstracted into a simpler data structure", "\"master\", \"$(AREA_DETECTOR)/ADGenICam\", gu, ad_org, \"ADGenICam\", n, n, n)) config.add_module(IM(\"ADANDOR3\", \"master\",", "= self.get_module_by_name(rel_to) if rel_to_module is not None: return installSynApps.join_path(rel_to_module.abs_path, temp)", "y = 'YES' n = 'NO' gu = 'GIT_URL' wu", "for easier searching. self.module_map = {} self.injector_files = [] self.build_flags", "\"R2-6\", \"$(AREA_DETECTOR)/ADEiger\", gu, ad_org, \"ADEiger\", n, n, n)) config.add_module(IM(\"ADVIMBA\", \"master\",", "\"2.2.8\", \"$(SUPPORT)/seq\", wu, seq_rel, \"seq-2.2.8.tar.gz\", y, y, y)) config.add_module(IM(\"IPAC\", \"2.15\",", ": str The contents of the file target : str", "\"ADEiger\", n, n, n)) config.add_module(IM(\"ADVIMBA\", \"master\", \"$(AREA_DETECTOR)/ADVimba\", gu, ad_org, \"ADVimba\",", "install location of EPICS motor module_map : dict of str", "\"\"\" return self.modules def get_module_by_name(self, name): \"\"\"Function that returns install", "= InjectorFile(self.path_to_configure, name, contents, target) self.injector_files.append(new_injector) def add_macros(self, macro_list): \"\"\"Function", "of all modules including clone/build/package information Parameters ---------- fp =", "\"$(SUPPORT)/sscan\", gu, mod_org, \"sscan\", y, y, y)) config.add_module(IM(\"IPUNIDIG\", \"R2-11\", \"$(SUPPORT)/ipUnidig\",", "\"\"\" def __init__(self, path_to_configure, name, contents, target): \"\"\"Constructor of InjectorFile", "rel_path): \"\"\"Function that converts a given modules relative path to", "installed. Attributes ---------- install_location : str path to top level", "elif \"$(MOTOR)\" in rel_path and self.motor_path != None: return installSynApps.join_path(self.motor_path,", "for the InstallConfiguration object \"\"\" # Paths to configure and", "prior to builds. \"\"\" import os import installSynApps from installSynApps.data_model.install_module", "module.build == 'YES': out.append(module.name) return out class InjectorFile: \"\"\"Class that", "self.motor_path != None: return installSynApps.join_path(self.motor_path, temp) elif \"$(EXTENSIONS)\" in rel_path", "fp.write(self.get_printable_string()) def get_printable_string(self): \"\"\"Function that gets a toString for an", "= 'https://github.com/EPICS-synApps/' mod_org = 'https://github.com/epics-modules/' ad_org = 'https://github.com/areaDetector/' seq_rel =", "def generate_default_install_config(target_install_loc='/epics', update_versions=False, with_pva=True): config = InstallConfiguration(target_install_loc, None) y =", "absolute path If the macro name can be found in", "can probably be abstracted into a simpler data structure (since", "self.get_module_by_name(rel_to) if rel_to_module is not None: return installSynApps.join_path(rel_to_module.abs_path, temp) return", "new injector file to the install_config object Parameters ---------- name", "list of InstallModule objects representing the modules that will be", "Dictionary storing relation of module names to build index injector_files", "storing relation of module names to build index injector_files :", "in the list of accounted for modules, replace it with", "\"$(SUPPORT)/seq\", wu, seq_rel, \"seq-2.2.8.tar.gz\", y, y, y)) config.add_module(IM(\"IPAC\", \"2.15\", \"$(SUPPORT)/ipac\",", "str abs path to install location of EPICS motor module_map", "= os.path.abspath(install_location) # Modules loaded into install config self.modules =", "are commonly needed config.add_module(IM(\"XSPRESS3\", \"master\", \"$(SUPPORT)/xspress3\", gu, mod_org, \"xspress3\", y,", "rel_path.split('/', 1)[-1] if \"$(INSTALL)\" in rel_path and self.install_location != None:", "self.modules: if module.build == 'YES': out.append(module.name) return out class InjectorFile:", "gu, mod_org, \"sscan\", y, y, y)) config.add_module(IM(\"IPUNIDIG\", \"R2-11\", \"$(SUPPORT)/ipUnidig\", gu,", "\"\"\"Function that gets a toString for an InstallConfigurations Returns -------", "install location of EPICS support modules ad_path : str abs", "a valid InstallModule, then sets the config, and abs path,", "a toString for an InstallConfigurations Returns ------- str A string", "module, or None if not found. \"\"\" if name in", "---------- name : str Module name Returns ------- int Index", "= index_A def convert_path_abs(self, rel_path): \"\"\"Function that converts a given", "= None self.ad_path = None self.motor_path = None self.extensions_path =", "\"R4-37\", \"$(SUPPORT)/asyn\", gu, mod_org, \"asyn\", y, y, y)) config.add_module(IM(\"AUTOSAVE\", \"R5-10\",", "object \"\"\" # Paths to configure and output locations self.path_to_configure", "self.name = name self.contents = contents self.target = target def", "order Parameters ---------- module : InstallModule new installation module being", "the list of accounted for modules, replace it with that", "elif \"$(EPICS_BASE)\" in rel_path and self.base_path != None: return installSynApps.join_path(self.base_path,", "drivers config.add_module(IM(\"AREA_DETECTOR\", \"R3-8\", \"$(SUPPORT)/areaDetector\", gu, ad_org, \"areaDetector\", y, y, n))", "\"R6-1\", \"$(SUPPORT)/configure\", gu, syn_org, \"configure\", y, y, n)) config.add_module(IM(\"UTILS\", \"R6-1\",", "built Returns ------- list of str list of module names", "is in in the list/build order Parameters ---------- module :", "modules that will be installed base_path : str abs path", "installSynApps.data_model.install_module import InstallModule as IM class InstallConfiguration: \"\"\" Class that", "path module.abs_path = self.convert_path_abs(module.rel_path) # Key paths to track if", "Optional pointer to an external log file \"\"\" if fp", "toString for an InstallConfigurations Returns ------- str A string representing", "else: return -1 def get_core_version(self): \"\"\"Funciton that returns selected version", "InstallModule objects representing the modules that will be installed base_path", "InjectorFile list of injector files loaded by install configuration build_flags", "name, contents, target) self.injector_files.append(new_injector) def add_macros(self, macro_list): \"\"\"Function that adds", "str Module name Returns ------- int Index of module in", "install module object given module name Uses module name as", "gu, ad_org, \"ADURL\", n, n, n)) common_plugins_str = 'dbLoadRecords(\"$(DEVIOCSTATS)/db/iocAdminSoft.db\", \"IOC=$(PREFIX)\")\\n'", "config.add_module(IM(\"ADPROSILICA\", \"R2-5\", \"$(AREA_DETECTOR)/ADProsilica\", gu, ad_org, \"ADProsilica\", n, n, n)) config.add_module(IM(\"ADSIMDETECTOR\",", "= temp_A self.modules[index_B] = temp_B self.module_map[module_A] = index_B self.module_map[module_B] =", "elif module.name == \"SUPPORT\": self.support_path = module.abs_path elif module.name ==", "path_to_configure self.name = name self.contents = contents self.target = target", "of EPICS support modules ad_path : str abs path to", "for an InstallConfigurations Returns ------- str A string representing the", "this injector file name : str name of the file", "actual list of modules that will be installed. Attributes ----------", "configuration \"\"\" out = \"--------------------------------\\n\" out = out + \"Install", "Key paths to track if module.name == \"EPICS_BASE\": self.base_path =", "\"\"\" return self.get_module_by_name('ADCORE').version def swap_module_positions(self, module_A, module_B): \"\"\"Swaps build order", "to install location of EPICS support modules ad_path : str", "adds a module to the InstallConfiguration module list First checks", "then if it is one of the three key modules", "abstracted into a simpler data structure (since its used as", "configure dir that houses this injector file name : str", "n, n, n)) config.add_module(IM(\"ADDEXELA\", \"R2-3\", \"$(AREA_DETECTOR)/ADDexela\", gu, ad_org, \"ADDexela\", n,", "path_to_configure : str path to the configure dir that houses", "\"R2-5\", \"$(AREA_DETECTOR)/ADProsilica\", gu, ad_org, \"ADProsilica\", n, n, n)) config.add_module(IM(\"ADSIMDETECTOR\", \"master\",", "path, then if it is one of the three key", "(Macros are replaced) \"\"\" temp = rel_path.split('/', 1)[-1] if \"$(INSTALL)\"", "'WGET_URL' base_org = 'https://github.com/epics-base/' syn_org = 'https://github.com/EPICS-synApps/' mod_org = 'https://github.com/epics-modules/'", "list of str list of module names that are set", "Configuration for installSynApps It stores the top level install_location, the", "and self.base_path != None: return installSynApps.join_path(self.base_path, temp) elif \"$(SUPPORT)\" in", "Paths to configure and output locations self.path_to_configure = path_to_configure self.install_location", "server if this is enabled\\n' \\ 'startPVAServer\\n' \\ config.add_injector_file('PLUGIN_CONFIG', common_plugins_str,", "__init__(self, install_location, path_to_configure): \"\"\"Constructor for the InstallConfiguration object \"\"\" #", ": str abs path to install location of EPICS motor", "three install location paths used for relative path correction self.base_path", "self.build_flags + macro_list def get_module_list(self): \"\"\"Function that gets the list", "EPICS support modules ad_path : str abs path to install", "location is valid Parameters ---------- self : InstallConfiguration Self object", "\"$(AREA_DETECTOR)/ADAravis\", gu, ad_org, \"ADAravis\", n, n, n)) config.add_module(IM(\"ADEIGER\", \"R2-6\", \"$(AREA_DETECTOR)/ADEiger\",", "etc. TODO: This class can probably be abstracted into a", "abs path to install location of EPICS motor module_map :", "install_location : str path to top level install location path_to_configure", "module \"\"\" index_A = self.get_module_build_index(module_A) index_B = self.get_module_build_index(module_B) if index_A", "\"$(MOTOR)\" in rel_path and self.motor_path != None: return installSynApps.join_path(self.motor_path, temp)", "Class that represents an Install Configuration for installSynApps It stores", ": str abs path to install location of EPICS base", "config.add_module(IM(\"SNCSEQ\", \"2.2.8\", \"$(SUPPORT)/seq\", wu, seq_rel, \"seq-2.2.8.tar.gz\", y, y, y)) config.add_module(IM(\"IPAC\",", "y, y, y)) config.add_module(IM(\"BUSY\", \"R1-7-2\", \"$(SUPPORT)/busy\", gu, mod_org, \"busy\", y,", "\"$(AREA_DETECTOR)/ADCore\", gu, ad_org, \"ADCore\", y, y, y)) config.add_module(IM(\"ADPERKINELMER\", \"master\", \"$(AREA_DETECTOR)/ADPerkinElmer\",", "Parameters ---------- macro_list : list of [str, str] list of", "injector file name : str name of the file contents", ": str The relative installation path for the given module", "\"$(SUPPORT)/ipUnidig\", gu, mod_org, \"ipUnidig\", y, y, y)) # Some modules", "that returns install module object given module name Uses module", "be built config.add_module(IM(\"EPICS_BASE\", \"R7.0.3\", \"$(INSTALL)/base\", gu, base_org, \"epics-base\", y, y,", "'GIT_URL' wu = 'WGET_URL' base_org = 'https://github.com/epics-base/' syn_org = 'https://github.com/EPICS-synApps/'", "the actual list of modules that will be installed. Attributes", "self.modules[index_B] = temp_B self.module_map[module_A] = index_B self.module_map[module_B] = index_A def", "n)) config.add_module(IM(\"ADANDOR3\", \"master\", \"$(AREA_DETECTOR)/ADAndor3\", gu, ad_org, \"ADAndor3\", n, n, n))", "\"master\", \"$(AREA_DETECTOR)/ADSimDetector\", gu, ad_org, \"ADSimDetector\", n, n, n)) config.add_module(IM(\"ADPILATUS\", \"R2-8\",", "n)) common_plugins_str = 'dbLoadRecords(\"$(DEVIOCSTATS)/db/iocAdminSoft.db\", \"IOC=$(PREFIX)\")\\n' autosave_str = 'file \"sseqRecord_settings.req\", P=$(P),", "of EPICS area detector motor_path : str abs path to", "InstallModule as IM class InstallConfiguration: \"\"\" Class that represents an", "process. InjectorFile objects are used for representing text that need", "List self.modules - list of modules to install in this", "location of EPICS motor module_map : dict of str ->", "[] for module in self.modules: if module.build == 'YES': out.append(module.name)", "that are commonly needed config.add_module(IM(\"XSPRESS3\", \"master\", \"$(SUPPORT)/xspress3\", gu, mod_org, \"xspress3\",", ": str abs path to install location of EPICS area", "as a struct anyway) Attributes ---------- path_to_configure : str path", "if name in self.module_map.keys(): return self.modules[self.module_map[name]] else: return None def", "an absolute path If the macro name can be found", "n)) config.add_module(IM(\"ADURL\", \"master\", \"$(AREA_DETECTOR)/ADURL\", gu, ad_org, \"ADURL\", n, n, n))", "syn_org, \"support\", y, y, n)) config.add_module(IM(\"CONFIGURE\", \"R6-1\", \"$(SUPPORT)/configure\", gu, syn_org,", "installSynApps.join_path(self.extensions_path, temp) elif \"$(\" in rel_path: macro_part = rel_path.split(')')[0] rel_to", "return rel_path def print_installation_info(self, fp = None): \"\"\"Function that prints", "config.add_module(IM(\"QUADEM\", \"R9-3\", \"$(SUPPORT)/quadEM\", gu, mod_org, \"quadEM\", y, y, y)) config.add_module(IM(\"STREAM\",", "\"ADVimba\", n, n, n)) config.add_module(IM(\"ADPOINTGREY\", \"master\", \"$(AREA_DETECTOR)/ADPointGrey\", gu, ad_org, \"ADPointGrey\",", "\"R9-3\", \"$(SUPPORT)/quadEM\", gu, mod_org, \"quadEM\", y, y, y)) config.add_module(IM(\"STREAM\", \"2.8.10\",", "An InstallConfiguration object is parsed from a configuration, and is", "-1 \"\"\" if name in self.module_map.keys(): return self.module_map[name] else: return", "= module.abs_path elif module.name == \"EXTENSIONS\": self.extensions_path = module.abs_path self.module_map[module.name]", ": str path to the configure dir that houses this", "y, y, y)) config.add_module(IM(\"SUPPORT\", \"R6-1\", \"$(INSTALL)/support\", gu, syn_org, \"support\", y,", "list First checks if parameter is a valid InstallModule, then", "\"$(AREA_DETECTOR)/ADMythen\", gu, ad_org, \"ADMythen\", n, n, n)) config.add_module(IM(\"ADURL\", \"master\", \"$(AREA_DETECTOR)/ADURL\",", "\"R2-11-3\", \"$(SUPPORT)/sscan\", gu, mod_org, \"sscan\", y, y, y)) config.add_module(IM(\"IPUNIDIG\", \"R2-11\",", "\"ADAravis\", n, n, n)) config.add_module(IM(\"ADEIGER\", \"R2-6\", \"$(AREA_DETECTOR)/ADEiger\", gu, ad_org, \"ADEiger\",", "None: return installSynApps.join_path(rel_to_module.abs_path, temp) return rel_path def print_installation_info(self, fp =", "modules that will generally always be built config.add_module(IM(\"EPICS_BASE\", \"R7.0.3\", \"$(INSTALL)/base\",", "the index in the build order for the module Used", "ad_org, \"ADAndor\", n, n, n)) config.add_module(IM(\"ADDEXELA\", \"R2-3\", \"$(AREA_DETECTOR)/ADDexela\", gu, ad_org,", "= temp_B self.module_map[module_A] = index_B self.module_map[module_B] = index_A def convert_path_abs(self,", "structure (since its used as a struct anyway) Attributes ----------", "isinstance(module, IM): # Updates the abs path module.abs_path = self.convert_path_abs(module.rel_path)", "list of modules being built Returns ------- list of str", "it is one of the three key modules to track,", "gu, mod_org, \"motor\", y, y, y)) config.add_module(IM(\"QUADEM\", \"R9-3\", \"$(SUPPORT)/quadEM\", gu,", "list of injector files loaded by install configuration build_flags :", "Some modules that are commonly needed config.add_module(IM(\"XSPRESS3\", \"master\", \"$(SUPPORT)/xspress3\", gu,", "Must start PVA server if this is enabled\\n' \\ 'startPVAServer\\n'", "'NO' gu = 'GIT_URL' wu = 'WGET_URL' base_org = 'https://github.com/epics-base/'", "n, n)) config.add_module(IM(\"ADEIGER\", \"R2-6\", \"$(AREA_DETECTOR)/ADEiger\", gu, ad_org, \"ADEiger\", n, n,", "name): \"\"\"Function that returns install module object given module name", "contents : str The contents of the file target :", "ad_org, \"ADCore\", y, y, y)) config.add_module(IM(\"ADPERKINELMER\", \"master\", \"$(AREA_DETECTOR)/ADPerkinElmer\", gu, ad_org,", "gu = 'GIT_URL' wu = 'WGET_URL' base_org = 'https://github.com/epics-base/' syn_org", "= module.abs_path elif module.name == \"AREA_DETECTOR\": self.ad_path = module.abs_path elif", "\"\"\"Function that converts a given modules relative path to an", "can be found in the list of accounted for modules,", "If the macro name can be found in the list", "base_org, \"epics-base\", y, y, y)) config.add_module(IM(\"SUPPORT\", \"R6-1\", \"$(INSTALL)/support\", gu, syn_org,", "is one of the three key modules to track, sets", "is_install_valid(self): \"\"\"Function that checks if given install location is valid", "in the list/build order Parameters ---------- module : InstallModule new", "order if found, otherwise -1 \"\"\" if name in self.module_map.keys():", "= target def generate_default_install_config(target_install_loc='/epics', update_versions=False, with_pva=True): config = InstallConfiguration(target_install_loc, None)", "installSynApps. An InstallConfiguration object is parsed from a configuration, and", "Updates the abs path module.abs_path = self.convert_path_abs(module.rel_path) # Key paths", "injected. \"\"\" def __init__(self, path_to_configure, name, contents, target): \"\"\"Constructor of", "level install location path_to_configure : str path to configure folder", "and self.support_path != None: return installSynApps.join_path(self.support_path, temp) elif \"$(AREA_DETECTOR)\" in", "n, n)) config.add_module(IM(\"ADURL\", \"master\", \"$(AREA_DETECTOR)/ADURL\", gu, ad_org, \"ADURL\", n, n,", "list of [str, str] list of new macros to append", "for the module. (Macros are replaced) \"\"\" temp = rel_path.split('/',", "file pointer Optional pointer to an external log file \"\"\"", "contents will be injected. \"\"\" new_injector = InjectorFile(self.path_to_configure, name, contents,", "ad_org, \"ADDexela\", n, n, n)) config.add_module(IM(\"ADMYTHEN\", \"master\", \"$(AREA_DETECTOR)/ADMythen\", gu, ad_org,", "need to be injected into configuration files prior to builds.", "\"\"\" if isinstance(module, IM): # Updates the abs path module.abs_path", "of modules that will be installed. Attributes ---------- install_location :", "containing representations of install configurations. The core Data representation for", "objects representing the modules that will be installed base_path :", "n)) config.add_module(IM(\"ADGENICAM\", \"master\", \"$(AREA_DETECTOR)/ADGenICam\", gu, ad_org, \"ADGenICam\", n, n, n))", "n, n)) config.add_module(IM(\"ADDEXELA\", \"R2-3\", \"$(AREA_DETECTOR)/ADDexela\", gu, ad_org, \"ADDexela\", n, n,", "of modules in the configuration Returns ------- List self.modules -", "Injector file classes are used to represent data that needs", "abs path to install location of EPICS support modules ad_path", "of new macros to append \"\"\" self.build_flags = self.build_flags +", "Uses module name as a key in a dictionary to", "a module to the InstallConfiguration module list First checks if", "an Install Configuration for installSynApps It stores the top level", "elif \"$(SUPPORT)\" in rel_path and self.support_path != None: return installSynApps.join_path(self.support_path,", "\"ADSimDetector\", n, n, n)) config.add_module(IM(\"ADPILATUS\", \"R2-8\", \"$(AREA_DETECTOR)/ADPilatus\", gu, ad_org, \"ADPilatus\",", "gets the list of modules in the configuration Returns -------", "ad_org, \"ADProsilica\", n, n, n)) config.add_module(IM(\"ADSIMDETECTOR\", \"master\", \"$(AREA_DETECTOR)/ADSimDetector\", gu, ad_org,", "temp_B self.module_map[module_A] = index_B self.module_map[module_B] = index_A def convert_path_abs(self, rel_path):", "Module name Returns ------- obj - InstallModule Return matching module,", "will be installed base_path : str abs path to install", "position each module is in in the list/build order Parameters", "module.abs_path self.module_map[module.name] = len(self.modules) self.modules.append(module) def add_injector_file(self, name, contents, target):", "None self.ad_path = None self.motor_path = None self.extensions_path = None", "out + \"Install Location = {}\\n\".format(self.install_location) out = out +", "if \"$(INSTALL)\" in rel_path and self.install_location != None: return installSynApps.join_path(self.install_location,", "- list of modules to install in this install configuration", "\"\"\"Funciton that returns selected version of ADCore \"\"\" return self.get_module_by_name('ADCORE').version", "at build time \"\"\" def __init__(self, install_location, path_to_configure): \"\"\"Constructor for", "config.add_module(IM(\"BUSY\", \"R1-7-2\", \"$(SUPPORT)/busy\", gu, mod_org, \"busy\", y, y, y)) config.add_module(IM(\"CALC\",", "support modules ad_path : str abs path to install location", "pairs enforced at build time \"\"\" def __init__(self, install_location, path_to_configure):", "is valid, false otherwise str Error message if applicable, None", "given module name Uses module name as a key in", "ad_org, \"ADGenICam\", n, n, n)) config.add_module(IM(\"ADANDOR3\", \"master\", \"$(AREA_DETECTOR)/ADAndor3\", gu, ad_org,", "y)) config.add_module(IM(\"BUSY\", \"R1-7-2\", \"$(SUPPORT)/busy\", gu, mod_org, \"busy\", y, y, y))", "y)) config.add_module(IM(\"SUPPORT\", \"R6-1\", \"$(INSTALL)/support\", gu, syn_org, \"support\", y, y, n))", "+= 'NDPvaConfigure(\"PVA1\", $(QSIZE), 0, \"$(PORT)\", 0, $(PREFIX)Pva1:Image, 0, 0, 0)\\n'", "of the file contents : str The contents of the", "dir that houses this injector file name : str name", "self.build_flags = self.build_flags + macro_list def get_module_list(self): \"\"\"Function that gets", "configuration files prior to builds. \"\"\" import os import installSynApps", "get_core_version(self): \"\"\"Funciton that returns selected version of ADCore \"\"\" return", "index in the build order for the module Used for", "if given install location is valid Parameters ---------- self :", "module names that are set to build \"\"\" out =", "module_B : str Name of second module \"\"\" index_A =", "track, sets the appropriate variables. Also, add the module to", "path for the module. (Macros are replaced) \"\"\" temp =", "common_plugins_str = 'dbLoadRecords(\"$(DEVIOCSTATS)/db/iocAdminSoft.db\", \"IOC=$(PREFIX)\")\\n' autosave_str = 'file \"sseqRecord_settings.req\", P=$(P), S=AcquireSequence\\n'", "n, n)) config.add_module(IM(\"ADANDOR\", \"R2-8\", \"$(AREA_DETECTOR)/ADAndor\", gu, ad_org, \"ADAndor\", n, n,", "given module Returns ------- str The absolute installation path for", "+ module.get_printable_string() return out def get_module_names_list(self): \"\"\"Function that gets list", "\"$(SUPPORT)/areaDetector\", gu, ad_org, \"areaDetector\", y, y, n)) config.add_module(IM(\"ADSUPPORT\", \"R1-9\", \"$(AREA_DETECTOR)/ADSupport\",", "is parsed from a configuration, and is then used throughout", "str name of the file contents : str The contents", "\"R1-7-2\", \"$(SUPPORT)/busy\", gu, mod_org, \"busy\", y, y, y)) config.add_module(IM(\"CALC\", \"R3-7-3\",", "str] list of new macros to append \"\"\" self.build_flags =", "config.add_module(IM(\"ADVIMBA\", \"master\", \"$(AREA_DETECTOR)/ADVimba\", gu, ad_org, \"ADVimba\", n, n, n)) config.add_module(IM(\"ADPOINTGREY\",", ": str name of the file contents : str The", "config.add_module(IM(\"ADPOINTGREY\", \"master\", \"$(AREA_DETECTOR)/ADPointGrey\", gu, ad_org, \"ADPointGrey\", n, n, n)) config.add_module(IM(\"ADANDOR\",", ": file pointer Optional pointer to an external log file", "that returns selected version of ADCore \"\"\" return self.get_module_by_name('ADCORE').version def", "the path to the configuration files, any OS specific configurations,", "target location file into which contents will be injected. \"\"\"", "throughout the build process. InjectorFile objects are used for representing", "'file \"NDPva_settings.req\", P=$(P), R=Pva1:\\n' common_plugins_str += 'NDPvaConfigure(\"PVA1\", $(QSIZE), 0, \"$(PORT)\",", "\"$(SUPPORT)/xspress3\", gu, mod_org, \"xspress3\", y, y, y)) config.add_module(IM(\"MOTOR\", \"R7-1\", \"$(SUPPORT)/motor\",", "y, y, y)) config.add_module(IM(\"ASYN\", \"R4-37\", \"$(SUPPORT)/asyn\", gu, mod_org, \"asyn\", y,", "of injector files loaded by install configuration build_flags : list", "Used to ensure dependencies are built before lower level packages", "== None: print(self.get_printable_string().strip()) else: fp.write(self.get_printable_string()) def get_printable_string(self): \"\"\"Function that gets", "ad_org, \"ADEiger\", n, n, n)) config.add_module(IM(\"ADVIMBA\", \"master\", \"$(AREA_DETECTOR)/ADVimba\", gu, ad_org,", "contents of the file target : str The target location", "config.add_module(IM(\"ASYN\", \"R4-37\", \"$(SUPPORT)/asyn\", gu, mod_org, \"asyn\", y, y, y)) config.add_module(IM(\"AUTOSAVE\",", "ad_org = 'https://github.com/areaDetector/' seq_rel = 'http://www-csr.bessy.de/control/SoftDist/sequencer/releases/' psi_org = 'https://github.com/paulscherrerinstitute/' #", "that adds a new injector file to the install_config object", "rel_path and self.ad_path != None: return installSynApps.join_path(self.ad_path, temp) elif \"$(MOTOR)\"", "n, n)) config.add_module(IM(\"ADPILATUS\", \"R2-8\", \"$(AREA_DETECTOR)/ADPilatus\", gu, ad_org, \"ADPilatus\", n, n,", "if parameter is a valid InstallModule, then sets the config,", "which contents will be injected. \"\"\" def __init__(self, path_to_configure, name,", "will keep track of which position each module is in", "gu, ad_org, \"ADPointGrey\", n, n, n)) config.add_module(IM(\"ADANDOR\", \"R2-8\", \"$(AREA_DETECTOR)/ADAndor\", gu,", "file containing representations of install configurations. The core Data representation", "the build order for the module Used for ensuring dependencies", "n, n, n)) config.add_module(IM(\"ADVIMBA\", \"master\", \"$(AREA_DETECTOR)/ADVimba\", gu, ad_org, \"ADVimba\", n,", "y, y, y)) config.add_module(IM(\"IPUNIDIG\", \"R2-11\", \"$(SUPPORT)/ipUnidig\", gu, mod_org, \"ipUnidig\", y,", "\"$(AREA_DETECTOR)/ADMerlin\", gu, ad_org, \"ADMerlin\", n, n, n)) config.add_module(IM(\"ADARAVIS\", \"master\", \"$(AREA_DETECTOR)/ADAravis\",", "into a simpler data structure (since its used as a", "commonPlugins, commonPlugin_settings, etc. TODO: This class can probably be abstracted", "P=$(P), S=AcquireSequence\\n' if with_pva: autosave_str += 'file \"NDPva_settings.req\", P=$(P), R=Pva1:\\n'", "used to represent data that needs to be appended to", "otherwise \"\"\" valid = True message = None target =", ": InstallConfiguration Self object Returns ------- bool True if install", "elif \"$(AREA_DETECTOR)\" in rel_path and self.ad_path != None: return installSynApps.join_path(self.ad_path,", "build \"\"\" out = [] for module in self.modules: if", "------- str The absolute installation path for the module. (Macros", "self.module_map = {} self.injector_files = [] self.build_flags = [] #", "Parameters ---------- module_A : str Name of first module module_B", "\"$(SUPPORT)/asyn\", gu, mod_org, \"asyn\", y, y, y)) config.add_module(IM(\"AUTOSAVE\", \"R5-10\", \"$(SUPPORT)/autosave\",", "the file target : str The target location file into", "\"$(AREA_DETECTOR)/ADProsilica\", gu, ad_org, \"ADProsilica\", n, n, n)) config.add_module(IM(\"ADSIMDETECTOR\", \"master\", \"$(AREA_DETECTOR)/ADSimDetector\",", "config.add_module(IM(\"CONFIGURE\", \"R6-1\", \"$(SUPPORT)/configure\", gu, syn_org, \"configure\", y, y, n)) config.add_module(IM(\"UTILS\",", "macro_list def get_module_list(self): \"\"\"Function that gets the list of modules", "\"utils\", y, y, n)) config.add_module(IM(\"SNCSEQ\", \"2.2.8\", \"$(SUPPORT)/seq\", wu, seq_rel, \"seq-2.2.8.tar.gz\",", "= 'https://github.com/epics-modules/' ad_org = 'https://github.com/areaDetector/' seq_rel = 'http://www-csr.bessy.de/control/SoftDist/sequencer/releases/' psi_org =", "self.modules[self.module_map[name]] else: return None def get_module_build_index(self, name): \"\"\"Function that returns", "# Dict that maps module name to index in module", "module name as a key in a dictionary to return", "temp_B = self.get_module_by_name(module_A) self.modules[index_A] = temp_A self.modules[index_B] = temp_B self.module_map[module_A]", "== \"SUPPORT\": self.support_path = module.abs_path elif module.name == \"AREA_DETECTOR\": self.ad_path", "track of which position each module is in in the", "rel_path and self.install_location != None: return installSynApps.join_path(self.install_location, temp) elif \"$(EPICS_BASE)\"", "into which contents will be injected. \"\"\" def __init__(self, path_to_configure,", "searching. self.module_map = {} self.injector_files = [] self.build_flags = []", "!= None: return installSynApps.join_path(self.install_location, temp) elif \"$(EPICS_BASE)\" in rel_path and", "wu, seq_rel, \"seq-2.2.8.tar.gz\", y, y, y)) config.add_module(IM(\"IPAC\", \"2.15\", \"$(SUPPORT)/ipac\", gu,", "name in self.module_map.keys(): return self.module_map[name] else: return -1 def get_core_version(self):", "self.ad_path = module.abs_path elif module.name == \"MOTOR\": self.motor_path = module.abs_path", "commonPlugin_settings, etc. TODO: This class can probably be abstracted into", "print_installation_info(self, fp = None): \"\"\"Function that prints installation info Prints", "int Index of module in build order if found, otherwise", "class \"\"\" self.path_to_configure = path_to_configure self.name = name self.contents =", "second module \"\"\" index_A = self.get_module_build_index(module_A) index_B = self.get_module_build_index(module_B) if", "fp = None : file pointer Optional pointer to an", "gu, mod_org, \"calc\", y, y, y)) config.add_module(IM(\"DEVIOCSTATS\", \"master\", \"$(SUPPORT)/iocStats\", gu,", "contents, and target Injector file classes are used to represent", "install location of EPICS base support_path : str abs path", "list of InjectorFile list of injector files loaded by install", "Parameters ---------- name : str Module name Returns ------- int", "PVA server if this is enabled\\n' \\ 'startPVAServer\\n' \\ config.add_injector_file('PLUGIN_CONFIG',", "file and stores its name, contents, and target Injector file", "n, n)) common_plugins_str = 'dbLoadRecords(\"$(DEVIOCSTATS)/db/iocAdminSoft.db\", \"IOC=$(PREFIX)\")\\n' autosave_str = 'file \"sseqRecord_settings.req\",", "'http://www-csr.bessy.de/control/SoftDist/sequencer/releases/' psi_org = 'https://github.com/paulscherrerinstitute/' # Add core modules that will", "configure folder of installSynApps modules : List of InsallModule list", "get_module_names_list(self): \"\"\"Function that gets list of modules being built Returns", "module Used for ensuring dependencies are built before lower level", "Add core modules that will generally always be built config.add_module(IM(\"EPICS_BASE\",", "message = None target = self.install_location if not os.path.exists(target): target", "of modules Used to ensure dependencies are built before lower", "\"$(PORT)\", 0, $(PREFIX)Pva1:Image, 0, 0, 0)\\n' \\ 'dbLoadRecords(\"NDPva.template\", \"P=$(PREFIX),R=Pva1:, PORT=PVA1,ADDR=0,TIMEOUT=1,NDARRAY_PORT=$(PORT)\")\\n'", "built config.add_module(IM(\"EPICS_BASE\", \"R7.0.3\", \"$(INSTALL)/base\", gu, base_org, \"epics-base\", y, y, y))", ": str Module name Returns ------- int Index of module", "to commonPlugins, commonPlugin_settings, etc. TODO: This class can probably be", "'YES': out.append(module.name) return out class InjectorFile: \"\"\"Class that represents an", "are used for representing text that need to be injected", "y)) config.add_module(IM(\"QUADEM\", \"R9-3\", \"$(SUPPORT)/quadEM\", gu, mod_org, \"quadEM\", y, y, y))", "\"R2-8\", \"$(AREA_DETECTOR)/ADAndor\", gu, ad_org, \"ADAndor\", n, n, n)) config.add_module(IM(\"ADDEXELA\", \"R2-3\",", "'Install location and parent directory do not exist' elif not", "list of str list of macro-value pairs enforced at build", "as a key in a dictionary to return reference to", "if not os.path.exists(target): valid = False message = 'Install location", "config.add_module(IM(\"IPAC\", \"2.15\", \"$(SUPPORT)/ipac\", gu, mod_org, \"ipac\", y, y, y)) config.add_module(IM(\"ASYN\",", "module object. Parameters ---------- name : str Module name Returns", "| os.X_OK): valid = False message = 'Permission Error: {}'.format(target)", "representing the install configuration \"\"\" out = \"--------------------------------\\n\" out =", "gu, psi_org, \"StreamDevice\", y, y, y)) # AreaDetector and commonly", "None : file pointer Optional pointer to an external log", "path_to_configure, name, contents, target): \"\"\"Constructor of InjectorFile class \"\"\" self.path_to_configure", "gu, syn_org, \"support\", y, y, n)) config.add_module(IM(\"CONFIGURE\", \"R6-1\", \"$(SUPPORT)/configure\", gu,", "y, y, n)) config.add_module(IM(\"ADSUPPORT\", \"R1-9\", \"$(AREA_DETECTOR)/ADSupport\", gu, ad_org, \"ADSupport\", y,", "'NDPvaConfigure(\"PVA1\", $(QSIZE), 0, \"$(PORT)\", 0, $(PREFIX)Pva1:Image, 0, 0, 0)\\n' \\", "temp_A self.modules[index_B] = temp_B self.module_map[module_A] = index_B self.module_map[module_B] = index_A", "in a dictionary to return reference to given module object.", "and self.extensions_path != None: return installSynApps.join_path(self.extensions_path, temp) elif \"$(\" in", "false otherwise str Error message if applicable, None otherwise \"\"\"", "'YES': out = out + module.get_printable_string() return out def get_module_names_list(self):", "in rel_path: macro_part = rel_path.split(')')[0] rel_to = macro_part.split('(')[1] rel_to_module =", "\"\"\" out = \"--------------------------------\\n\" out = out + \"Install Location", "contents, target) self.injector_files.append(new_injector) def add_macros(self, macro_list): \"\"\"Function that adds macro-value", "\"MOTOR\": self.motor_path = module.abs_path elif module.name == \"EXTENSIONS\": self.extensions_path =", "rel_to_module is not None: return installSynApps.join_path(rel_to_module.abs_path, temp) return rel_path def", "str A string representing the install configuration \"\"\" out =", "---------- name : str Module name Returns ------- obj -", "return self.module_map[name] else: return -1 def get_core_version(self): \"\"\"Funciton that returns", "y, y, y)) config.add_module(IM(\"SSCAN\", \"R2-11-3\", \"$(SUPPORT)/sscan\", gu, mod_org, \"sscan\", y,", "1)[-1] if \"$(INSTALL)\" in rel_path and self.install_location != None: return", "self.module_map.keys(): return self.modules[self.module_map[name]] else: return None def get_module_build_index(self, name): \"\"\"Function", "\"\"\"Function that returns install module object given module name Uses", "represents an injector file and stores its name, contents, and", "sets the appropriate variables. Also, add the module to the", "saved at {}\\n\".format(self.path_to_configure) for module in self.modules: if module.clone ==", "before lower level packages. Parameters ---------- name : str Module", "of first module module_B : str Name of second module", "module list First checks if parameter is a valid InstallModule,", "None: print(self.get_printable_string().strip()) else: fp.write(self.get_printable_string()) def get_printable_string(self): \"\"\"Function that gets a", "rel_path: macro_part = rel_path.split(')')[0] rel_to = macro_part.split('(')[1] rel_to_module = self.get_module_by_name(rel_to)", "module being added. \"\"\" if isinstance(module, IM): # Updates the", "module Returns ------- str The absolute installation path for the", "def __init__(self, path_to_configure, name, contents, target): \"\"\"Constructor of InjectorFile class", "y)) # AreaDetector and commonly used drivers config.add_module(IM(\"AREA_DETECTOR\", \"R3-8\", \"$(SUPPORT)/areaDetector\",", "0, \"$(PORT)\", 0, $(PREFIX)Pva1:Image, 0, 0, 0)\\n' \\ 'dbLoadRecords(\"NDPva.template\", \"P=$(PREFIX),R=Pva1:,", "reference to given module object. Parameters ---------- name : str", "def is_install_valid(self): \"\"\"Function that checks if given install location is", "in self.module_map.keys(): return self.module_map[name] else: return -1 def get_core_version(self): \"\"\"Funciton", "def get_printable_string(self): \"\"\"Function that gets a toString for an InstallConfigurations", "y, y, y)) config.add_module(IM(\"QUADEM\", \"R9-3\", \"$(SUPPORT)/quadEM\", gu, mod_org, \"quadEM\", y,", "that needs to be appended to target files at build", "\"R2-11\", \"$(SUPPORT)/ipUnidig\", gu, mod_org, \"ipUnidig\", y, y, y)) # Some", "n, n, n)) config.add_module(IM(\"ADGENICAM\", \"master\", \"$(AREA_DETECTOR)/ADGenICam\", gu, ad_org, \"ADGenICam\", n,", "new macros to append \"\"\" self.build_flags = self.build_flags + macro_list", "from a configuration, and is then used throughout the build", "\"$(EPICS_BASE)\" in rel_path and self.base_path != None: return installSynApps.join_path(self.base_path, temp)", "the top level install_location, the path to the configuration files,", "module_B): \"\"\"Swaps build order of modules Used to ensure dependencies", "y, y, y)) config.add_module(IM(\"CALC\", \"R3-7-3\", \"$(SUPPORT)/calc\", gu, mod_org, \"calc\", y,", "and self.install_location != None: return installSynApps.join_path(self.install_location, temp) elif \"$(EPICS_BASE)\" in", "Location = {}\\n\".format(self.install_location) out = out + \"This Install Config", "ad_org, \"ADMerlin\", n, n, n)) config.add_module(IM(\"ADARAVIS\", \"master\", \"$(AREA_DETECTOR)/ADAravis\", gu, ad_org,", "\"R6-1\", \"$(SUPPORT)/utils\", gu, syn_org, \"utils\", y, y, n)) config.add_module(IM(\"SNCSEQ\", \"2.2.8\",", "the build process. InjectorFile objects are used for representing text", "sets the config, and abs path, then if it is", "self.get_module_build_index(module_A) index_B = self.get_module_build_index(module_B) if index_A >= 0 and index_B", "module names to build index injector_files : list of InjectorFile", "Error: {}'.format(target) return valid, message def add_module(self, module): \"\"\"Function that", "self.build_flags = [] # Paths to the three install location", "names to build index injector_files : list of InjectorFile list", "\"\"\"Function that gets list of modules being built Returns -------", "elif module.name == \"EXTENSIONS\": self.extensions_path = module.abs_path self.module_map[module.name] = len(self.modules)", "str Name of second module \"\"\" index_A = self.get_module_build_index(module_A) index_B", "name : str Module name Returns ------- obj - InstallModule", "Used to add to commonPlugins, commonPlugin_settings, etc. TODO: This class", "\"Install Location = {}\\n\".format(self.install_location) out = out + \"This Install", "n)) config.add_module(IM(\"CONFIGURE\", \"R6-1\", \"$(SUPPORT)/configure\", gu, syn_org, \"configure\", y, y, n))", "\\ '# Must start PVA server if this is enabled\\n'", "str Error message if applicable, None otherwise \"\"\" valid =", "y, y)) config.add_module(IM(\"DEVIOCSTATS\", \"master\", \"$(SUPPORT)/iocStats\", gu, mod_org, \"iocStats\", y, y,", "out = out + \"Install Location = {}\\n\".format(self.install_location) out =", "for installSynApps It stores the top level install_location, the path", "Paths to the three install location paths used for relative", "'dbLoadRecords(\"NDPva.template\", \"P=$(PREFIX),R=Pva1:, PORT=PVA1,ADDR=0,TIMEOUT=1,NDARRAY_PORT=$(PORT)\")\\n' \\ '# Must start PVA server if", "= self.install_location if not os.path.exists(target): target = os.path.dirname(self.install_location) if not", "stores the top level install_location, the path to the configuration", "\"$(\" in rel_path: macro_part = rel_path.split(')')[0] rel_to = macro_part.split('(')[1] rel_to_module", "location paths used for relative path correction self.base_path = None", "def convert_path_abs(self, rel_path): \"\"\"Function that converts a given modules relative", "def get_module_build_index(self, name): \"\"\"Function that returns the index in the", "y, y)) config.add_module(IM(\"IPUNIDIG\", \"R2-11\", \"$(SUPPORT)/ipUnidig\", gu, mod_org, \"ipUnidig\", y, y,", "injector_files : list of InjectorFile list of injector files loaded", "\"\"\"A file containing representations of install configurations. The core Data", "'Permission Error: {}'.format(target) return valid, message def add_module(self, module): \"\"\"Function", "return self.get_module_by_name('ADCORE').version def swap_module_positions(self, module_A, module_B): \"\"\"Swaps build order of", "easier searching. self.module_map = {} self.injector_files = [] self.build_flags =", "mod_org, \"calc\", y, y, y)) config.add_module(IM(\"DEVIOCSTATS\", \"master\", \"$(SUPPORT)/iocStats\", gu, mod_org,", "gu, mod_org, \"autosave\", y, y, y)) config.add_module(IM(\"BUSY\", \"R1-7-2\", \"$(SUPPORT)/busy\", gu,", "self.install_location != None: return installSynApps.join_path(self.install_location, temp) elif \"$(EPICS_BASE)\" in rel_path", "location path_to_configure : str path to configure folder of installSynApps", "information Parameters ---------- fp = None : file pointer Optional", "= 'WGET_URL' base_org = 'https://github.com/epics-base/' syn_org = 'https://github.com/EPICS-synApps/' mod_org =", "files, any OS specific configurations, and the actual list of", "\"R1-9\", \"$(AREA_DETECTOR)/ADSupport\", gu, ad_org, \"ADSupport\", y, y, y)) config.add_module(IM(\"ADCORE\", \"R3-8\",", "path to the configure dir that houses this injector file", "the abs path module.abs_path = self.convert_path_abs(module.rel_path) # Key paths to", "modules in the configuration Returns ------- List self.modules - list", "are built before lower level packages Parameters ---------- module_A :", "rel_to = macro_part.split('(')[1] rel_to_module = self.get_module_by_name(rel_to) if rel_to_module is not", "gu, mod_org, \"busy\", y, y, y)) config.add_module(IM(\"CALC\", \"R3-7-3\", \"$(SUPPORT)/calc\", gu,", "Parameters ---------- name : str name of the file contents", "\"\"\" out = [] for module in self.modules: if module.build", "\"motor\", y, y, y)) config.add_module(IM(\"QUADEM\", \"R9-3\", \"$(SUPPORT)/quadEM\", gu, mod_org, \"quadEM\",", "= 'YES' n = 'NO' gu = 'GIT_URL' wu =", "will be installed. Attributes ---------- install_location : str path to", "name self.contents = contents self.target = target def generate_default_install_config(target_install_loc='/epics', update_versions=False,", "install_config object Parameters ---------- name : str name of the", "os.X_OK): valid = False message = 'Permission Error: {}'.format(target) return", "rel_path.split(')')[0] rel_to = macro_part.split('(')[1] rel_to_module = self.get_module_by_name(rel_to) if rel_to_module is", "\"$(SUPPORT)/busy\", gu, mod_org, \"busy\", y, y, y)) config.add_module(IM(\"CALC\", \"R3-7-3\", \"$(SUPPORT)/calc\",", "modules which will keep track of which position each module", "of InstallModule objects representing the modules that will be installed", "that adds a module to the InstallConfiguration module list First", "module): \"\"\"Function that adds a module to the InstallConfiguration module", "y, y)) config.add_module(IM(\"ASYN\", \"R4-37\", \"$(SUPPORT)/asyn\", gu, mod_org, \"asyn\", y, y,", "with_pva=True): config = InstallConfiguration(target_install_loc, None) y = 'YES' n =", "\"This Install Config is saved at {}\\n\".format(self.path_to_configure) for module in", "\"quadEM\", y, y, y)) config.add_module(IM(\"STREAM\", \"2.8.10\", \"$(SUPPORT)/stream\", gu, psi_org, \"StreamDevice\",", "\"master\", \"$(SUPPORT)/iocStats\", gu, mod_org, \"iocStats\", y, y, y)) config.add_module(IM(\"SSCAN\", \"R2-11-3\",", "to be injected into configuration files prior to builds. \"\"\"", "\"busy\", y, y, y)) config.add_module(IM(\"CALC\", \"R3-7-3\", \"$(SUPPORT)/calc\", gu, mod_org, \"calc\",", "None: return installSynApps.join_path(self.install_location, temp) elif \"$(EPICS_BASE)\" in rel_path and self.base_path", "self.get_module_by_name(module_B) temp_B = self.get_module_by_name(module_A) self.modules[index_A] = temp_A self.modules[index_B] = temp_B", "---------- module : InstallModule new installation module being added. \"\"\"", "dict of str -> int Dictionary storing relation of module", "self.target = target def generate_default_install_config(target_install_loc='/epics', update_versions=False, with_pva=True): config = InstallConfiguration(target_install_loc,", "to ensure dependencies are built before lower level packages Parameters", "that gets list of modules being built Returns ------- list", "valid, message def add_module(self, module): \"\"\"Function that adds a module", "out = \"--------------------------------\\n\" out = out + \"Install Location =", "gu, mod_org, \"asyn\", y, y, y)) config.add_module(IM(\"AUTOSAVE\", \"R5-10\", \"$(SUPPORT)/autosave\", gu,", "the list/build order Parameters ---------- module : InstallModule new installation", "------- int Index of module in build order if found,", "temp_A = self.get_module_by_name(module_B) temp_B = self.get_module_by_name(module_A) self.modules[index_A] = temp_A self.modules[index_B]", "n)) config.add_module(IM(\"SNCSEQ\", \"2.2.8\", \"$(SUPPORT)/seq\", wu, seq_rel, \"seq-2.2.8.tar.gz\", y, y, y))", "= module.abs_path elif module.name == \"MOTOR\": self.motor_path = module.abs_path elif", "config.add_module(IM(\"XSPRESS3\", \"master\", \"$(SUPPORT)/xspress3\", gu, mod_org, \"xspress3\", y, y, y)) config.add_module(IM(\"MOTOR\",", "temp = rel_path.split('/', 1)[-1] if \"$(INSTALL)\" in rel_path and self.install_location", "of module in build order if found, otherwise -1 \"\"\"", "self.extensions_path = module.abs_path self.module_map[module.name] = len(self.modules) self.modules.append(module) def add_injector_file(self, name,", "self.modules = [] # Dict that maps module name to", "str The contents of the file target : str The", "y, y, y)) config.add_module(IM(\"ADPERKINELMER\", \"master\", \"$(AREA_DETECTOR)/ADPerkinElmer\", gu, ad_org, \"ADPerkinElmer\", n,", "\"\"\" import os import installSynApps from installSynApps.data_model.install_module import InstallModule as", "index_B >= 0: temp_A = self.get_module_by_name(module_B) temp_B = self.get_module_by_name(module_A) self.modules[index_A]", "path If the macro name can be found in the", "Modules loaded into install config self.modules = [] # Dict", "to return reference to given module object. Parameters ---------- name", "file target : str The target location file into which", "= self.build_flags + macro_list def get_module_list(self): \"\"\"Function that gets the", "install configuration \"\"\" return self.modules def get_module_by_name(self, name): \"\"\"Function that", "for ensuring dependencies are built before lower level packages. Parameters", "info Prints list of all modules including clone/build/package information Parameters", "its name, contents, and target Injector file classes are used", "add_macros(self, macro_list): \"\"\"Function that adds macro-value pairs to a list", "to track if module.name == \"EPICS_BASE\": self.base_path = module.abs_path elif", "to install location of EPICS area detector motor_path : str", "\"$(SUPPORT)/quadEM\", gu, mod_org, \"quadEM\", y, y, y)) config.add_module(IM(\"STREAM\", \"2.8.10\", \"$(SUPPORT)/stream\",", "None if not found. \"\"\" if name in self.module_map.keys(): return", "to an absolute path If the macro name can be", "y, y)) config.add_module(IM(\"SUPPORT\", \"R6-1\", \"$(INSTALL)/support\", gu, syn_org, \"support\", y, y,", "n, n, n)) common_plugins_str = 'dbLoadRecords(\"$(DEVIOCSTATS)/db/iocAdminSoft.db\", \"IOC=$(PREFIX)\")\\n' autosave_str = 'file", "rel_path and self.extensions_path != None: return installSynApps.join_path(self.extensions_path, temp) elif \"$(\"", "= None self.motor_path = None self.extensions_path = None def is_install_valid(self):", "target = os.path.dirname(self.install_location) if not os.path.exists(target): valid = False message", "out class InjectorFile: \"\"\"Class that represents an injector file and", "n)) config.add_module(IM(\"ADPROSILICA\", \"R2-5\", \"$(AREA_DETECTOR)/ADProsilica\", gu, ad_org, \"ADProsilica\", n, n, n))", "if not os.path.exists(target): target = os.path.dirname(self.install_location) if not os.path.exists(target): valid", "list of new macros to append \"\"\" self.build_flags = self.build_flags", "replaced) \"\"\" temp = rel_path.split('/', 1)[-1] if \"$(INSTALL)\" in rel_path", "rel_path and self.support_path != None: return installSynApps.join_path(self.support_path, temp) elif \"$(AREA_DETECTOR)\"", "of InsallModule list of InstallModule objects representing the modules that", "append \"\"\" self.build_flags = self.build_flags + macro_list def get_module_list(self): \"\"\"Function", "to index in module list for easier searching. self.module_map =", "base_path : str abs path to install location of EPICS", "y)) config.add_module(IM(\"CALC\", \"R3-7-3\", \"$(SUPPORT)/calc\", gu, mod_org, \"calc\", y, y, y))", "order for the module Used for ensuring dependencies are built", "name to index in module list for easier searching. self.module_map", "abs path module.abs_path = self.convert_path_abs(module.rel_path) # Key paths to track", "target : str The target location file into which contents", "\"\"\" if name in self.module_map.keys(): return self.modules[self.module_map[name]] else: return None", "that will generally always be built config.add_module(IM(\"EPICS_BASE\", \"R7.0.3\", \"$(INSTALL)/base\", gu,", "module.get_printable_string() return out def get_module_names_list(self): \"\"\"Function that gets list of", "target): \"\"\"Constructor of InjectorFile class \"\"\" self.path_to_configure = path_to_configure self.name", "files prior to builds. \"\"\" import os import installSynApps from", "autosave_str += 'file \"NDPva_settings.req\", P=$(P), R=Pva1:\\n' common_plugins_str += 'NDPvaConfigure(\"PVA1\", $(QSIZE),", "key modules to track, sets the appropriate variables. Also, add", "get_module_by_name(self, name): \"\"\"Function that returns install module object given module", "n, n, n)) config.add_module(IM(\"ADPOINTGREY\", \"master\", \"$(AREA_DETECTOR)/ADPointGrey\", gu, ad_org, \"ADPointGrey\", n,", "self.extensions_path != None: return installSynApps.join_path(self.extensions_path, temp) elif \"$(\" in rel_path:", "---------- name : str name of the file contents :", "to build index injector_files : list of InjectorFile list of", "elif not os.access(target, os.W_OK | os.X_OK): valid = False message", "+= 'file \"NDPva_settings.req\", P=$(P), R=Pva1:\\n' common_plugins_str += 'NDPvaConfigure(\"PVA1\", $(QSIZE), 0,", "gu, mod_org, \"ipUnidig\", y, y, y)) # Some modules that", "motor_path : str abs path to install location of EPICS", "\"$(AREA_DETECTOR)/ADEiger\", gu, ad_org, \"ADEiger\", n, n, n)) config.add_module(IM(\"ADVIMBA\", \"master\", \"$(AREA_DETECTOR)/ADVimba\",", "module object given module name Uses module name as a", "syn_org, \"configure\", y, y, n)) config.add_module(IM(\"UTILS\", \"R6-1\", \"$(SUPPORT)/utils\", gu, syn_org,", "n, n)) config.add_module(IM(\"ADSIMDETECTOR\", \"master\", \"$(AREA_DETECTOR)/ADSimDetector\", gu, ad_org, \"ADSimDetector\", n, n,", "module.abs_path elif module.name == \"MOTOR\": self.motor_path = module.abs_path elif module.name", "config.add_module(IM(\"ADPILATUS\", \"R2-8\", \"$(AREA_DETECTOR)/ADPilatus\", gu, ad_org, \"ADPilatus\", n, n, n)) config.add_module(IM(\"ADMERLIN\",", "os.path.abspath(install_location) # Modules loaded into install config self.modules = []", "always be built config.add_module(IM(\"EPICS_BASE\", \"R7.0.3\", \"$(INSTALL)/base\", gu, base_org, \"epics-base\", y,", "macro-value pairs to a list of macros Parameters ---------- macro_list", "mod_org, \"quadEM\", y, y, y)) config.add_module(IM(\"STREAM\", \"2.8.10\", \"$(SUPPORT)/stream\", gu, psi_org,", "build process. InjectorFile objects are used for representing text that", "which will keep track of which position each module is", "given module object. Parameters ---------- name : str Module name", "self.support_path = None self.ad_path = None self.motor_path = None self.extensions_path", "the three key modules to track, sets the appropriate variables.", "InstallConfiguration(target_install_loc, None) y = 'YES' n = 'NO' gu =", "= 'dbLoadRecords(\"$(DEVIOCSTATS)/db/iocAdminSoft.db\", \"IOC=$(PREFIX)\")\\n' autosave_str = 'file \"sseqRecord_settings.req\", P=$(P), S=AcquireSequence\\n' if", "= None target = self.install_location if not os.path.exists(target): target =", "def get_module_names_list(self): \"\"\"Function that gets list of modules being built", "n)) config.add_module(IM(\"ADSUPPORT\", \"R1-9\", \"$(AREA_DETECTOR)/ADSupport\", gu, ad_org, \"ADSupport\", y, y, y))", "message = 'Install location and parent directory do not exist'", "into install config self.modules = [] # Dict that maps", "the map of modules which will keep track of which", "y)) config.add_module(IM(\"STREAM\", \"2.8.10\", \"$(SUPPORT)/stream\", gu, psi_org, \"StreamDevice\", y, y, y))", "------- str A string representing the install configuration \"\"\" out", "0 and index_B >= 0: temp_A = self.get_module_by_name(module_B) temp_B =", "mod_org, \"xspress3\", y, y, y)) config.add_module(IM(\"MOTOR\", \"R7-1\", \"$(SUPPORT)/motor\", gu, mod_org,", "is a valid InstallModule, then sets the config, and abs", "None otherwise \"\"\" valid = True message = None target", "if rel_to_module is not None: return installSynApps.join_path(rel_to_module.abs_path, temp) return rel_path", "os.path.dirname(self.install_location) if not os.path.exists(target): valid = False message = 'Install", "\"$(AREA_DETECTOR)/ADAndor3\", gu, ad_org, \"ADAndor3\", n, n, n)) config.add_module(IM(\"ADPROSILICA\", \"R2-5\", \"$(AREA_DETECTOR)/ADProsilica\",", "to the install_config object Parameters ---------- name : str name", "path_to_configure self.install_location = os.path.abspath(install_location) # Modules loaded into install config", "checks if parameter is a valid InstallModule, then sets the", "location of EPICS base support_path : str abs path to", "of macros Parameters ---------- macro_list : list of [str, str]", "return installSynApps.join_path(self.extensions_path, temp) elif \"$(\" in rel_path: macro_part = rel_path.split(')')[0]", "macros Parameters ---------- macro_list : list of [str, str] list", "gu, mod_org, \"ipac\", y, y, y)) config.add_module(IM(\"ASYN\", \"R4-37\", \"$(SUPPORT)/asyn\", gu,", "get_module_build_index(self, name): \"\"\"Function that returns the index in the build", "fp = None): \"\"\"Function that prints installation info Prints list", "module is in in the list/build order Parameters ---------- module", "get_module_list(self): \"\"\"Function that gets the list of modules in the", "if it is one of the three key modules to", "\"$(INSTALL)/support\", gu, syn_org, \"support\", y, y, n)) config.add_module(IM(\"CONFIGURE\", \"R6-1\", \"$(SUPPORT)/configure\",", "\"R3-8\", \"$(SUPPORT)/areaDetector\", gu, ad_org, \"areaDetector\", y, y, n)) config.add_module(IM(\"ADSUPPORT\", \"R1-9\",", "\"seq-2.2.8.tar.gz\", y, y, y)) config.add_module(IM(\"IPAC\", \"2.15\", \"$(SUPPORT)/ipac\", gu, mod_org, \"ipac\",", "build index injector_files : list of InjectorFile list of injector", "return installSynApps.join_path(self.motor_path, temp) elif \"$(EXTENSIONS)\" in rel_path and self.extensions_path !=", "ad_org, \"ADMythen\", n, n, n)) config.add_module(IM(\"ADURL\", \"master\", \"$(AREA_DETECTOR)/ADURL\", gu, ad_org,", "str path to configure folder of installSynApps modules : List", "\"ADPointGrey\", n, n, n)) config.add_module(IM(\"ADANDOR\", \"R2-8\", \"$(AREA_DETECTOR)/ADAndor\", gu, ad_org, \"ADAndor\",", "target) self.injector_files.append(new_injector) def add_macros(self, macro_list): \"\"\"Function that adds macro-value pairs", "not os.access(target, os.W_OK | os.X_OK): valid = False message =", "at build time. Used to add to commonPlugins, commonPlugin_settings, etc.", "---------- macro_list : list of [str, str] list of new", "Parameters ---------- fp = None : file pointer Optional pointer", "an external log file \"\"\" if fp == None: print(self.get_printable_string().strip())", "y, n)) config.add_module(IM(\"SNCSEQ\", \"2.2.8\", \"$(SUPPORT)/seq\", wu, seq_rel, \"seq-2.2.8.tar.gz\", y, y,", "abs path to install location of EPICS area detector motor_path", "representing text that need to be injected into configuration files", "and abs path, then if it is one of the", "location file into which contents will be injected. \"\"\" new_injector", "name Returns ------- obj - InstallModule Return matching module, or", "\"areaDetector\", y, y, n)) config.add_module(IM(\"ADSUPPORT\", \"R1-9\", \"$(AREA_DETECTOR)/ADSupport\", gu, ad_org, \"ADSupport\",", "= 'http://www-csr.bessy.de/control/SoftDist/sequencer/releases/' psi_org = 'https://github.com/paulscherrerinstitute/' # Add core modules that", "paths used for relative path correction self.base_path = None self.support_path", "$(QSIZE), 0, \"$(PORT)\", 0, $(PREFIX)Pva1:Image, 0, 0, 0)\\n' \\ 'dbLoadRecords(\"NDPva.template\",", "A string representing the install configuration \"\"\" out = \"--------------------------------\\n\"", "list of modules in the configuration Returns ------- List self.modules", "obj - InstallModule Return matching module, or None if not", "list of all modules including clone/build/package information Parameters ---------- fp", "add to commonPlugins, commonPlugin_settings, etc. TODO: This class can probably", "= self.convert_path_abs(module.rel_path) # Key paths to track if module.name ==", "not found. \"\"\" if name in self.module_map.keys(): return self.modules[self.module_map[name]] else:", "InstallConfiguration module list First checks if parameter is a valid", "self : InstallConfiguration Self object Returns ------- bool True if", "a configuration, and is then used throughout the build process.", "def add_macros(self, macro_list): \"\"\"Function that adds macro-value pairs to a", "mod_org, \"asyn\", y, y, y)) config.add_module(IM(\"AUTOSAVE\", \"R5-10\", \"$(SUPPORT)/autosave\", gu, mod_org,", "Dict that maps module name to index in module list", "ensuring dependencies are built before lower level packages. Parameters ----------", "path to the configuration files, any OS specific configurations, and", "core modules that will generally always be built config.add_module(IM(\"EPICS_BASE\", \"R7.0.3\",", "index in module list for easier searching. self.module_map = {}", "y)) config.add_module(IM(\"AUTOSAVE\", \"R5-10\", \"$(SUPPORT)/autosave\", gu, mod_org, \"autosave\", y, y, y))", "log file \"\"\" if fp == None: print(self.get_printable_string().strip()) else: fp.write(self.get_printable_string())", "to install location of EPICS motor module_map : dict of", "stores its name, contents, and target Injector file classes are", "files at build time. Used to add to commonPlugins, commonPlugin_settings,", "y, y, n)) config.add_module(IM(\"SNCSEQ\", \"2.2.8\", \"$(SUPPORT)/seq\", wu, seq_rel, \"seq-2.2.8.tar.gz\", y,", "return out class InjectorFile: \"\"\"Class that represents an injector file", "given install location is valid Parameters ---------- self : InstallConfiguration", "self.injector_files = [] self.build_flags = [] # Paths to the", "\"master\", \"$(AREA_DETECTOR)/ADAndor3\", gu, ad_org, \"ADAndor3\", n, n, n)) config.add_module(IM(\"ADPROSILICA\", \"R2-5\",", "0, $(PREFIX)Pva1:Image, 0, 0, 0)\\n' \\ 'dbLoadRecords(\"NDPva.template\", \"P=$(PREFIX),R=Pva1:, PORT=PVA1,ADDR=0,TIMEOUT=1,NDARRAY_PORT=$(PORT)\")\\n' \\", "installSynApps.join_path(rel_to_module.abs_path, temp) return rel_path def print_installation_info(self, fp = None): \"\"\"Function", "location is valid, false otherwise str Error message if applicable,", "be installed. Attributes ---------- install_location : str path to top", "str The relative installation path for the given module Returns", "as IM class InstallConfiguration: \"\"\" Class that represents an Install", "\\ 'dbLoadRecords(\"NDPva.template\", \"P=$(PREFIX),R=Pva1:, PORT=PVA1,ADDR=0,TIMEOUT=1,NDARRAY_PORT=$(PORT)\")\\n' \\ '# Must start PVA server", "injector file and stores its name, contents, and target Injector", "module in build order if found, otherwise -1 \"\"\" if", "that returns the index in the build order for the", "for module in self.modules: if module.clone == 'YES': out =", "to given module object. Parameters ---------- name : str Module", "module.name == \"MOTOR\": self.motor_path = module.abs_path elif module.name == \"EXTENSIONS\":", "configuration files, any OS specific configurations, and the actual list", "if install location is valid, false otherwise str Error message", "abs path to install location of EPICS base support_path :", "before lower level packages Parameters ---------- module_A : str Name", "ad_org, \"ADPerkinElmer\", n, n, n)) config.add_module(IM(\"ADGENICAM\", \"master\", \"$(AREA_DETECTOR)/ADGenICam\", gu, ad_org,", "def get_module_list(self): \"\"\"Function that gets the list of modules in", "\"xspress3\", y, y, y)) config.add_module(IM(\"MOTOR\", \"R7-1\", \"$(SUPPORT)/motor\", gu, mod_org, \"motor\",", "name in self.module_map.keys(): return self.modules[self.module_map[name]] else: return None def get_module_build_index(self,", "# Some modules that are commonly needed config.add_module(IM(\"XSPRESS3\", \"master\", \"$(SUPPORT)/xspress3\",", "the given module Returns ------- str The absolute installation path", "= None self.support_path = None self.ad_path = None self.motor_path =", "if isinstance(module, IM): # Updates the abs path module.abs_path =", "used throughout the build process. InjectorFile objects are used for", "y)) config.add_module(IM(\"IPAC\", \"2.15\", \"$(SUPPORT)/ipac\", gu, mod_org, \"ipac\", y, y, y))", "path to install location of EPICS motor module_map : dict", "= out + \"This Install Config is saved at {}\\n\".format(self.path_to_configure)", "import os import installSynApps from installSynApps.data_model.install_module import InstallModule as IM", "\"configure\", y, y, n)) config.add_module(IM(\"UTILS\", \"R6-1\", \"$(SUPPORT)/utils\", gu, syn_org, \"utils\",", "\"support\", y, y, n)) config.add_module(IM(\"CONFIGURE\", \"R6-1\", \"$(SUPPORT)/configure\", gu, syn_org, \"configure\",", "n)) config.add_module(IM(\"ADDEXELA\", \"R2-3\", \"$(AREA_DETECTOR)/ADDexela\", gu, ad_org, \"ADDexela\", n, n, n))", "= False message = 'Permission Error: {}'.format(target) return valid, message", "== 'YES': out.append(module.name) return out class InjectorFile: \"\"\"Class that represents", "Data representation for installSynApps. An InstallConfiguration object is parsed from", "= False message = 'Install location and parent directory do", "= True message = None target = self.install_location if not", "The relative installation path for the given module Returns -------", "of EPICS motor module_map : dict of str -> int", "True if install location is valid, false otherwise str Error", "relation of module names to build index injector_files : list", "file \"\"\" if fp == None: print(self.get_printable_string().strip()) else: fp.write(self.get_printable_string()) def", "return self.modules[self.module_map[name]] else: return None def get_module_build_index(self, name): \"\"\"Function that", "wu = 'WGET_URL' base_org = 'https://github.com/epics-base/' syn_org = 'https://github.com/EPICS-synApps/' mod_org", "InstallConfiguration object is parsed from a configuration, and is then", "config.add_module(IM(\"MOTOR\", \"R7-1\", \"$(SUPPORT)/motor\", gu, mod_org, \"motor\", y, y, y)) config.add_module(IM(\"QUADEM\",", "is not None: return installSynApps.join_path(rel_to_module.abs_path, temp) return rel_path def print_installation_info(self,", "\"master\", \"$(SUPPORT)/xspress3\", gu, mod_org, \"xspress3\", y, y, y)) config.add_module(IM(\"MOTOR\", \"R7-1\",", ": InstallModule new installation module being added. \"\"\" if isinstance(module,", "---------- module_A : str Name of first module module_B :", "injected into configuration files prior to builds. \"\"\" import os", "y, y)) config.add_module(IM(\"CALC\", \"R3-7-3\", \"$(SUPPORT)/calc\", gu, mod_org, \"calc\", y, y,", ": str path to configure folder of installSynApps modules :", "\"R3-8\", \"$(AREA_DETECTOR)/ADCore\", gu, ad_org, \"ADCore\", y, y, y)) config.add_module(IM(\"ADPERKINELMER\", \"master\",", "in in the list/build order Parameters ---------- module : InstallModule", "install location is valid Parameters ---------- self : InstallConfiguration Self", "== \"MOTOR\": self.motor_path = module.abs_path elif module.name == \"EXTENSIONS\": self.extensions_path", "get_printable_string(self): \"\"\"Function that gets a toString for an InstallConfigurations Returns", "the three install location paths used for relative path correction", "y, y, y)) config.add_module(IM(\"STREAM\", \"2.8.10\", \"$(SUPPORT)/stream\", gu, psi_org, \"StreamDevice\", y,", "that prints installation info Prints list of all modules including", "module_A : str Name of first module module_B : str", "which contents will be injected. \"\"\" new_injector = InjectorFile(self.path_to_configure, name,", "an injector file and stores its name, contents, and target", "for the module Used for ensuring dependencies are built before", "target Injector file classes are used to represent data that", "self.get_module_by_name(module_A) self.modules[index_A] = temp_A self.modules[index_B] = temp_B self.module_map[module_A] = index_B", "valid = False message = 'Install location and parent directory", "to configure and output locations self.path_to_configure = path_to_configure self.install_location =", "found. \"\"\" if name in self.module_map.keys(): return self.modules[self.module_map[name]] else: return", "installation path for the module. (Macros are replaced) \"\"\" temp", "= 'https://github.com/epics-base/' syn_org = 'https://github.com/EPICS-synApps/' mod_org = 'https://github.com/epics-modules/' ad_org =", "generally always be built config.add_module(IM(\"EPICS_BASE\", \"R7.0.3\", \"$(INSTALL)/base\", gu, base_org, \"epics-base\",", "= None def is_install_valid(self): \"\"\"Function that checks if given install", "its used as a struct anyway) Attributes ---------- path_to_configure :", "config.add_module(IM(\"IPUNIDIG\", \"R2-11\", \"$(SUPPORT)/ipUnidig\", gu, mod_org, \"ipUnidig\", y, y, y)) #", "str path to the configure dir that houses this injector", "new_injector = InjectorFile(self.path_to_configure, name, contents, target) self.injector_files.append(new_injector) def add_macros(self, macro_list):", "------- obj - InstallModule Return matching module, or None if", "\\ config.add_injector_file('PLUGIN_CONFIG', common_plugins_str, '$(AREA_DETECTOR)/ADCore/iocBoot/EXAMPLE_commonPlugins.cmd') config.add_injector_file('AUTOSAVE_CONFIG', autosave_str, '$(AREA_DETECTOR)/ADCore/iocBoot/EXAMPLE_commonPlugin_settings.req') if update_versions: installSynApps.sync_all_module_tags(config)", "out.append(module.name) return out class InjectorFile: \"\"\"Class that represents an injector", "path to an absolute path If the macro name can", "# Updates the abs path module.abs_path = self.convert_path_abs(module.rel_path) # Key", "bool True if install location is valid, false otherwise str", "list/build order Parameters ---------- module : InstallModule new installation module", "P=$(P), R=Pva1:\\n' common_plugins_str += 'NDPvaConfigure(\"PVA1\", $(QSIZE), 0, \"$(PORT)\", 0, $(PREFIX)Pva1:Image,", "be installed base_path : str abs path to install location", "= None self.extensions_path = None def is_install_valid(self): \"\"\"Function that checks", "lower level packages Parameters ---------- module_A : str Name of", "from installSynApps.data_model.install_module import InstallModule as IM class InstallConfiguration: \"\"\" Class", "n, n)) config.add_module(IM(\"ADMYTHEN\", \"master\", \"$(AREA_DETECTOR)/ADMythen\", gu, ad_org, \"ADMythen\", n, n,", "if fp == None: print(self.get_printable_string().strip()) else: fp.write(self.get_printable_string()) def get_printable_string(self): \"\"\"Function", "mod_org, \"motor\", y, y, y)) config.add_module(IM(\"QUADEM\", \"R9-3\", \"$(SUPPORT)/quadEM\", gu, mod_org,", "by install configuration build_flags : list of list of str", "Parameters ---------- name : str Module name Returns ------- obj", "packages. Parameters ---------- name : str Module name Returns -------", "n, n)) config.add_module(IM(\"ADVIMBA\", \"master\", \"$(AREA_DETECTOR)/ADVimba\", gu, ad_org, \"ADVimba\", n, n,", "builds. \"\"\" import os import installSynApps from installSynApps.data_model.install_module import InstallModule", "order of modules Used to ensure dependencies are built before", "n)) config.add_module(IM(\"UTILS\", \"R6-1\", \"$(SUPPORT)/utils\", gu, syn_org, \"utils\", y, y, n))", "gu, ad_org, \"ADGenICam\", n, n, n)) config.add_module(IM(\"ADANDOR3\", \"master\", \"$(AREA_DETECTOR)/ADAndor3\", gu,", "self.install_location = os.path.abspath(install_location) # Modules loaded into install config self.modules", "relative installation path for the given module Returns ------- str", "== \"AREA_DETECTOR\": self.ad_path = module.abs_path elif module.name == \"MOTOR\": self.motor_path", "be found in the list of accounted for modules, replace", "install configurations. The core Data representation for installSynApps. An InstallConfiguration", "------- bool True if install location is valid, false otherwise", "n, n)) config.add_module(IM(\"ADMERLIN\", \"master\", \"$(AREA_DETECTOR)/ADMerlin\", gu, ad_org, \"ADMerlin\", n, n,", "$(PREFIX)Pva1:Image, 0, 0, 0)\\n' \\ 'dbLoadRecords(\"NDPva.template\", \"P=$(PREFIX),R=Pva1:, PORT=PVA1,ADDR=0,TIMEOUT=1,NDARRAY_PORT=$(PORT)\")\\n' \\ '#", "self.motor_path = None self.extensions_path = None def is_install_valid(self): \"\"\"Function that", "== \"EXTENSIONS\": self.extensions_path = module.abs_path self.module_map[module.name] = len(self.modules) self.modules.append(module) def", "gets a toString for an InstallConfigurations Returns ------- str A", "== \"EPICS_BASE\": self.base_path = module.abs_path elif module.name == \"SUPPORT\": self.support_path", "= {}\\n\".format(self.install_location) out = out + \"This Install Config is", "valid = False message = 'Permission Error: {}'.format(target) return valid,", "------- List self.modules - list of modules to install in", "self.motor_path = module.abs_path elif module.name == \"EXTENSIONS\": self.extensions_path = module.abs_path", "name Uses module name as a key in a dictionary", "found in the list of accounted for modules, replace it", "Index of module in build order if found, otherwise -1", "of InjectorFile class \"\"\" self.path_to_configure = path_to_configure self.name = name", "return installSynApps.join_path(rel_to_module.abs_path, temp) return rel_path def print_installation_info(self, fp = None):", "+ \"This Install Config is saved at {}\\n\".format(self.path_to_configure) for module", "str list of module names that are set to build", "adds a new injector file to the install_config object Parameters", "return installSynApps.join_path(self.install_location, temp) elif \"$(EPICS_BASE)\" in rel_path and self.base_path !=", "y)) config.add_module(IM(\"ADPERKINELMER\", \"master\", \"$(AREA_DETECTOR)/ADPerkinElmer\", gu, ad_org, \"ADPerkinElmer\", n, n, n))", "module_map : dict of str -> int Dictionary storing relation", "otherwise str Error message if applicable, None otherwise \"\"\" valid", "---------- install_location : str path to top level install location", "Parameters ---------- rel_path : str The relative installation path for", "that are set to build \"\"\" out = [] for", "then used throughout the build process. InjectorFile objects are used", "config.add_module(IM(\"STREAM\", \"2.8.10\", \"$(SUPPORT)/stream\", gu, psi_org, \"StreamDevice\", y, y, y)) #", "str abs path to install location of EPICS support modules", "list of accounted for modules, replace it with that module's", "that houses this injector file name : str name of", "of module names that are set to build \"\"\" out", "# Modules loaded into install config self.modules = [] #", "target = self.install_location if not os.path.exists(target): target = os.path.dirname(self.install_location) if", "The absolute installation path for the module. (Macros are replaced)", "set to build \"\"\" out = [] for module in", "including clone/build/package information Parameters ---------- fp = None : file", "variables. Also, add the module to the map of modules", "str The absolute installation path for the module. (Macros are", "loaded by install configuration build_flags : list of list of", "= rel_path.split('/', 1)[-1] if \"$(INSTALL)\" in rel_path and self.install_location !=", "= 'NO' gu = 'GIT_URL' wu = 'WGET_URL' base_org =", "n, n)) config.add_module(IM(\"ADANDOR3\", \"master\", \"$(AREA_DETECTOR)/ADAndor3\", gu, ad_org, \"ADAndor3\", n, n,", "string representing the install configuration \"\"\" out = \"--------------------------------\\n\" out", "map of modules which will keep track of which position", "InsallModule list of InstallModule objects representing the modules that will", "to install in this install configuration \"\"\" return self.modules def", "config.add_module(IM(\"UTILS\", \"R6-1\", \"$(SUPPORT)/utils\", gu, syn_org, \"utils\", y, y, n)) config.add_module(IM(\"SNCSEQ\",", "install location is valid, false otherwise str Error message if", "correction self.base_path = None self.support_path = None self.ad_path = None", "config.add_injector_file('PLUGIN_CONFIG', common_plugins_str, '$(AREA_DETECTOR)/ADCore/iocBoot/EXAMPLE_commonPlugins.cmd') config.add_injector_file('AUTOSAVE_CONFIG', autosave_str, '$(AREA_DETECTOR)/ADCore/iocBoot/EXAMPLE_commonPlugin_settings.req') if update_versions: installSynApps.sync_all_module_tags(config) return", "self.modules.append(module) def add_injector_file(self, name, contents, target): \"\"\"Function that adds a", "\"R2-8\", \"$(AREA_DETECTOR)/ADPilatus\", gu, ad_org, \"ADPilatus\", n, n, n)) config.add_module(IM(\"ADMERLIN\", \"master\",", "IM): # Updates the abs path module.abs_path = self.convert_path_abs(module.rel_path) #", "parameter is a valid InstallModule, then sets the config, and", "= [] self.build_flags = [] # Paths to the three", "in rel_path and self.support_path != None: return installSynApps.join_path(self.support_path, temp) elif", "mod_org, \"ipUnidig\", y, y, y)) # Some modules that are", "\"\"\"Function that adds macro-value pairs to a list of macros", "file into which contents will be injected. \"\"\" def __init__(self,", "module.clone == 'YES': out = out + module.get_printable_string() return out", "in self.module_map.keys(): return self.modules[self.module_map[name]] else: return None def get_module_build_index(self, name):", "ad_org, \"ADSupport\", y, y, y)) config.add_module(IM(\"ADCORE\", \"R3-8\", \"$(AREA_DETECTOR)/ADCore\", gu, ad_org,", "self.module_map.keys(): return self.module_map[name] else: return -1 def get_core_version(self): \"\"\"Funciton that", "= name self.contents = contents self.target = target def generate_default_install_config(target_install_loc='/epics',", "to track, sets the appropriate variables. Also, add the module", "to the configuration files, any OS specific configurations, and the", "macro name can be found in the list of accounted", "to be appended to target files at build time. Used", "gu, ad_org, \"ADPilatus\", n, n, n)) config.add_module(IM(\"ADMERLIN\", \"master\", \"$(AREA_DETECTOR)/ADMerlin\", gu,", "that gets the list of modules in the configuration Returns", "that need to be injected into configuration files prior to", "gu, base_org, \"epics-base\", y, y, y)) config.add_module(IM(\"SUPPORT\", \"R6-1\", \"$(INSTALL)/support\", gu,", "\"ADSupport\", y, y, y)) config.add_module(IM(\"ADCORE\", \"R3-8\", \"$(AREA_DETECTOR)/ADCore\", gu, ad_org, \"ADCore\",", "ad_org, \"ADVimba\", n, n, n)) config.add_module(IM(\"ADPOINTGREY\", \"master\", \"$(AREA_DETECTOR)/ADPointGrey\", gu, ad_org,", "of which position each module is in in the list/build", "y, y)) config.add_module(IM(\"QUADEM\", \"R9-3\", \"$(SUPPORT)/quadEM\", gu, mod_org, \"quadEM\", y, y,", "return None def get_module_build_index(self, name): \"\"\"Function that returns the index", "used drivers config.add_module(IM(\"AREA_DETECTOR\", \"R3-8\", \"$(SUPPORT)/areaDetector\", gu, ad_org, \"areaDetector\", y, y,", "contents self.target = target def generate_default_install_config(target_install_loc='/epics', update_versions=False, with_pva=True): config =", "'https://github.com/paulscherrerinstitute/' # Add core modules that will generally always be", ": List of InsallModule list of InstallModule objects representing the", "macro_list : list of [str, str] list of new macros", "= [] # Paths to the three install location paths", "config.add_module(IM(\"AUTOSAVE\", \"R5-10\", \"$(SUPPORT)/autosave\", gu, mod_org, \"autosave\", y, y, y)) config.add_module(IM(\"BUSY\",", "converts a given modules relative path to an absolute path", "module.name == \"EPICS_BASE\": self.base_path = module.abs_path elif module.name == \"SUPPORT\":", "def get_module_by_name(self, name): \"\"\"Function that returns install module object given", "\"\"\" valid = True message = None target = self.install_location", "rel_path : str The relative installation path for the given", "\"$(AREA_DETECTOR)/ADDexela\", gu, ad_org, \"ADDexela\", n, n, n)) config.add_module(IM(\"ADMYTHEN\", \"master\", \"$(AREA_DETECTOR)/ADMythen\",", "elif \"$(\" in rel_path: macro_part = rel_path.split(')')[0] rel_to = macro_part.split('(')[1]", "n, n, n)) config.add_module(IM(\"ADEIGER\", \"R2-6\", \"$(AREA_DETECTOR)/ADEiger\", gu, ad_org, \"ADEiger\", n,", "temp) elif \"$(MOTOR)\" in rel_path and self.motor_path != None: return", "= self.get_module_by_name(module_A) self.modules[index_A] = temp_A self.modules[index_B] = temp_B self.module_map[module_A] =", "self.module_map[module_B] = index_A def convert_path_abs(self, rel_path): \"\"\"Function that converts a", "list of macro-value pairs enforced at build time \"\"\" def", "appended to target files at build time. Used to add", "three key modules to track, sets the appropriate variables. Also,", "directory do not exist' elif not os.access(target, os.W_OK | os.X_OK):", "configuration Returns ------- List self.modules - list of modules to", "replace it with that module's absolute path Parameters ---------- rel_path", "= None): \"\"\"Function that prints installation info Prints list of", "and stores its name, contents, and target Injector file classes", "modules to install in this install configuration \"\"\" return self.modules", "y, y)) config.add_module(IM(\"BUSY\", \"R1-7-2\", \"$(SUPPORT)/busy\", gu, mod_org, \"busy\", y, y,", "and target Injector file classes are used to represent data", "be appended to target files at build time. Used to", "y, y, n)) config.add_module(IM(\"CONFIGURE\", \"R6-1\", \"$(SUPPORT)/configure\", gu, syn_org, \"configure\", y,", "name of the file contents : str The contents of", "y, y, y)) config.add_module(IM(\"DEVIOCSTATS\", \"master\", \"$(SUPPORT)/iocStats\", gu, mod_org, \"iocStats\", y,", "+ macro_list def get_module_list(self): \"\"\"Function that gets the list of", "path_to_configure : str path to configure folder of installSynApps modules", "== 'YES': out = out + module.get_printable_string() return out def", "and is then used throughout the build process. InjectorFile objects", "n)) config.add_module(IM(\"ADPOINTGREY\", \"master\", \"$(AREA_DETECTOR)/ADPointGrey\", gu, ad_org, \"ADPointGrey\", n, n, n))", "\"sseqRecord_settings.req\", P=$(P), S=AcquireSequence\\n' if with_pva: autosave_str += 'file \"NDPva_settings.req\", P=$(P),", "needed config.add_module(IM(\"XSPRESS3\", \"master\", \"$(SUPPORT)/xspress3\", gu, mod_org, \"xspress3\", y, y, y))", "---------- path_to_configure : str path to the configure dir that", "\"\"\" if fp == None: print(self.get_printable_string().strip()) else: fp.write(self.get_printable_string()) def get_printable_string(self):", "in the configuration Returns ------- List self.modules - list of", "index_B self.module_map[module_B] = index_A def convert_path_abs(self, rel_path): \"\"\"Function that converts", "index_A def convert_path_abs(self, rel_path): \"\"\"Function that converts a given modules", "n, n)) config.add_module(IM(\"ADARAVIS\", \"master\", \"$(AREA_DETECTOR)/ADAravis\", gu, ad_org, \"ADAravis\", n, n,", "= \"--------------------------------\\n\" out = out + \"Install Location = {}\\n\".format(self.install_location)", "This class can probably be abstracted into a simpler data", "for the given module Returns ------- str The absolute installation", "seq_rel, \"seq-2.2.8.tar.gz\", y, y, y)) config.add_module(IM(\"IPAC\", \"2.15\", \"$(SUPPORT)/ipac\", gu, mod_org,", "new installation module being added. \"\"\" if isinstance(module, IM): #", "else: fp.write(self.get_printable_string()) def get_printable_string(self): \"\"\"Function that gets a toString for", ">= 0: temp_A = self.get_module_by_name(module_B) temp_B = self.get_module_by_name(module_A) self.modules[index_A] =", "gu, ad_org, \"ADSimDetector\", n, n, n)) config.add_module(IM(\"ADPILATUS\", \"R2-8\", \"$(AREA_DETECTOR)/ADPilatus\", gu,", "to build \"\"\" out = [] for module in self.modules:", "return installSynApps.join_path(self.base_path, temp) elif \"$(SUPPORT)\" in rel_path and self.support_path !=", "returns the index in the build order for the module", "absolute installation path for the module. (Macros are replaced) \"\"\"", "seq_rel = 'http://www-csr.bessy.de/control/SoftDist/sequencer/releases/' psi_org = 'https://github.com/paulscherrerinstitute/' # Add core modules", "if index_A >= 0 and index_B >= 0: temp_A =", "parsed from a configuration, and is then used throughout the", "(since its used as a struct anyway) Attributes ---------- path_to_configure", "'dbLoadRecords(\"$(DEVIOCSTATS)/db/iocAdminSoft.db\", \"IOC=$(PREFIX)\")\\n' autosave_str = 'file \"sseqRecord_settings.req\", P=$(P), S=AcquireSequence\\n' if with_pva:", "into which contents will be injected. \"\"\" new_injector = InjectorFile(self.path_to_configure,", "\"2.15\", \"$(SUPPORT)/ipac\", gu, mod_org, \"ipac\", y, y, y)) config.add_module(IM(\"ASYN\", \"R4-37\",", "'https://github.com/areaDetector/' seq_rel = 'http://www-csr.bessy.de/control/SoftDist/sequencer/releases/' psi_org = 'https://github.com/paulscherrerinstitute/' # Add core", "\"IOC=$(PREFIX)\")\\n' autosave_str = 'file \"sseqRecord_settings.req\", P=$(P), S=AcquireSequence\\n' if with_pva: autosave_str", "config.add_module(IM(\"EPICS_BASE\", \"R7.0.3\", \"$(INSTALL)/base\", gu, base_org, \"epics-base\", y, y, y)) config.add_module(IM(\"SUPPORT\",", "# Paths to configure and output locations self.path_to_configure = path_to_configure", "\"master\", \"$(AREA_DETECTOR)/ADAravis\", gu, ad_org, \"ADAravis\", n, n, n)) config.add_module(IM(\"ADEIGER\", \"R2-6\",", "config = InstallConfiguration(target_install_loc, None) y = 'YES' n = 'NO'", "os.path.exists(target): target = os.path.dirname(self.install_location) if not os.path.exists(target): valid = False", "add_module(self, module): \"\"\"Function that adds a module to the InstallConfiguration", "the file contents : str The contents of the file", "object. Parameters ---------- name : str Module name Returns -------", "the module. (Macros are replaced) \"\"\" temp = rel_path.split('/', 1)[-1]", "Prints list of all modules including clone/build/package information Parameters ----------", "# Add core modules that will generally always be built", "= [] # Dict that maps module name to index", "InstallModule, then sets the config, and abs path, then if", "install in this install configuration \"\"\" return self.modules def get_module_by_name(self,", "= InstallConfiguration(target_install_loc, None) y = 'YES' n = 'NO' gu", "represent data that needs to be appended to target files", "the InstallConfiguration object \"\"\" # Paths to configure and output", "\"$(EXTENSIONS)\" in rel_path and self.extensions_path != None: return installSynApps.join_path(self.extensions_path, temp)", "!= None: return installSynApps.join_path(self.base_path, temp) elif \"$(SUPPORT)\" in rel_path and", "y)) config.add_module(IM(\"IPUNIDIG\", \"R2-11\", \"$(SUPPORT)/ipUnidig\", gu, mod_org, \"ipUnidig\", y, y, y))", "checks if given install location is valid Parameters ---------- self", "InstallModule Return matching module, or None if not found. \"\"\"", "mod_org, \"busy\", y, y, y)) config.add_module(IM(\"CALC\", \"R3-7-3\", \"$(SUPPORT)/calc\", gu, mod_org,", "pointer to an external log file \"\"\" if fp ==", "self.modules def get_module_by_name(self, name): \"\"\"Function that returns install module object", "a simpler data structure (since its used as a struct", "out + \"This Install Config is saved at {}\\n\".format(self.path_to_configure) for", "None: return installSynApps.join_path(self.motor_path, temp) elif \"$(EXTENSIONS)\" in rel_path and self.extensions_path", "= self.get_module_by_name(module_B) temp_B = self.get_module_by_name(module_A) self.modules[index_A] = temp_A self.modules[index_B] =", "config self.modules = [] # Dict that maps module name", "message if applicable, None otherwise \"\"\" valid = True message", "of modules being built Returns ------- list of str list", "IM class InstallConfiguration: \"\"\" Class that represents an Install Configuration", "the InstallConfiguration module list First checks if parameter is a", "config.add_module(IM(\"AREA_DETECTOR\", \"R3-8\", \"$(SUPPORT)/areaDetector\", gu, ad_org, \"areaDetector\", y, y, n)) config.add_module(IM(\"ADSUPPORT\",", "\"P=$(PREFIX),R=Pva1:, PORT=PVA1,ADDR=0,TIMEOUT=1,NDARRAY_PORT=$(PORT)\")\\n' \\ '# Must start PVA server if this", "!= None: return installSynApps.join_path(self.support_path, temp) elif \"$(AREA_DETECTOR)\" in rel_path and", "\"\"\" def __init__(self, install_location, path_to_configure): \"\"\"Constructor for the InstallConfiguration object", "being built Returns ------- list of str list of module", "installSynApps.join_path(self.ad_path, temp) elif \"$(MOTOR)\" in rel_path and self.motor_path != None:", "that converts a given modules relative path to an absolute", "being added. \"\"\" if isinstance(module, IM): # Updates the abs", "if module.clone == 'YES': out = out + module.get_printable_string() return", "valid = True message = None target = self.install_location if", "build time \"\"\" def __init__(self, install_location, path_to_configure): \"\"\"Constructor for the", "add_injector_file(self, name, contents, target): \"\"\"Function that adds a new injector", "[] # Paths to the three install location paths used", "os import installSynApps from installSynApps.data_model.install_module import InstallModule as IM class", "self.contents = contents self.target = target def generate_default_install_config(target_install_loc='/epics', update_versions=False, with_pva=True):", "if found, otherwise -1 \"\"\" if name in self.module_map.keys(): return", "a dictionary to return reference to given module object. Parameters", "self.base_path != None: return installSynApps.join_path(self.base_path, temp) elif \"$(SUPPORT)\" in rel_path", "\"\"\"Function that prints installation info Prints list of all modules" ]
[ "form.instance email = form.cleaned_data['email'] msg = settings.INVITE_MESSAGE % { 'user':", "sid=sid) if not invite: return {'redirect': 'core:ufo'} form = InviteRegisterForm(request.POST", "request.user.api_key.generate_key() request.user.api_key.save() key = request.user.api_key.key return {'success': True, 'key': key}", "django.db import transaction from django.utils.translation import ugettext_lazy as _ @render_to('accounts/login.html')", "None) if request.method == 'POST': if form.is_valid(): user = form.cleaned_data['user']", "form.cleaned_data['user'] auth.login(request, user) return {'redirect': 'core:index'} return { 'form': form", "import render_to, ajax_response, get_object_or_None from apps.core.decorators import lock, login_required_json from", ":) send_mail( subject=unicode(_('You have been invited to b3ban service')), message=unicode(msg),", "invite: return {'redirect': 'core:ufo'} form = InviteRegisterForm(request.POST or None) if", "import check_invite from apps.accounts.forms import ( LoginForm, AccountRegisterForm, SendInviteForm, InviteRegisterForm", "if request.method == \"POST\": if form.is_valid(): user = form.save(commit=False) user.set_password(form.cleaned_data['password'])", "form } #@check for possibility to register @transaction.commit_on_success @check_invite(sid='sid') @render_to('accounts/invite_register.html')", "'accounts:invite-success'} return { 'form': form } #@check for possibility to", "django.core.urlresolvers import reverse from django.contrib import auth from django.contrib.auth.decorators import", "request.user.api_key.key return {'success': True, 'key': key} return {'success': False} @lock(\"REGISTER_ALLOWED\")", "django.utils.translation import ugettext_lazy as _ @render_to('accounts/login.html') def login(request): form =", "\"POST\": if form.is_valid(): user = form.save(commit=False) user.set_password(form.cleaned_data['password']) user.save() return {'redirect':", "lock, login_required_json from apps.accounts.models import Invite from apps.accounts.decorators import check_invite", "request=request) if request.method == 'POST': if form.is_valid(): form.save(commit=False) invite =", "apps.core.decorators import lock, login_required_json from apps.accounts.models import Invite from apps.accounts.decorators", "{ 'user': request.user.username, 'link': \"http://b3ban.blacklibrary.ru%s\" % reverse('accounts:invite-register', args=(invite.sid, )) }", "None) if request.method == 'POST': if form.is_valid(): invite.is_verified = True", "request.user.api_key.key = request.user.api_key.generate_key() request.user.api_key.save() key = request.user.api_key.key return {'success': True,", "_ @render_to('accounts/login.html') def login(request): form = LoginForm(request.POST or None) if", "( LoginForm, AccountRegisterForm, SendInviteForm, InviteRegisterForm ) from django.core.mail import send_mail", "return {'success': False} @lock(\"REGISTER_ALLOWED\") @render_to('accounts/register.html') def register(request): form = AccountRegisterForm(request.POST", "@render_to('accounts/invite.html') def invite(request): form = SendInviteForm(request.POST or None, request=request) if", "if request.method == 'POST': if form.is_valid(): form.save(commit=False) invite = form.instance", "form.is_valid(): invite.is_verified = True invite.save() user = form.save(commit=False) user.email =", "auth from django.contrib.auth.decorators import login_required from django.conf import settings from", "user.set_password(form.cleaned_data['password']) user.save() return {'redirect': 'accounts:invite-register-success'} return {'form': form, 'sid': sid}", "import transaction from django.utils.translation import ugettext_lazy as _ @render_to('accounts/login.html') def", "} #@check for possibility to register @transaction.commit_on_success @check_invite(sid='sid') @render_to('accounts/invite_register.html') def", "InviteRegisterForm(request.POST or None) if request.method == 'POST': if form.is_valid(): invite.is_verified", "return { 'form': form } @render_to('index.html') def logout(request): auth.logout(request) return", "= get_object_or_None(Invite, sid=sid) if not invite: return {'redirect': 'core:ufo'} form", "def invite(request): form = SendInviteForm(request.POST or None, request=request) if request.method", "user.email = invite.email user.set_password(form.cleaned_data['password']) user.save() return {'redirect': 'accounts:invite-register-success'} return {'form':", "or None) if request.method == 'POST': if form.is_valid(): invite.is_verified =", "'key': key} return {'success': False} @lock(\"REGISTER_ALLOWED\") @render_to('accounts/register.html') def register(request): form", "@render_to('accounts/profile.html') def profile(request): return {} @login_required_json @ajax_response def generate_new_api_key(request): if", "LoginForm, AccountRegisterForm, SendInviteForm, InviteRegisterForm ) from django.core.mail import send_mail from", "form.cleaned_data['email'] msg = settings.INVITE_MESSAGE % { 'user': request.user.username, 'link': \"http://b3ban.blacklibrary.ru%s\"", "def generate_new_api_key(request): if request.method == 'POST': request.user.api_key.key = request.user.api_key.generate_key() request.user.api_key.save()", "key = request.user.api_key.key return {'success': True, 'key': key} return {'success':", "if request.method == 'POST': if form.is_valid(): user = form.cleaned_data['user'] auth.login(request,", "logout(request): auth.logout(request) return {} @render_to('accounts/profile.html') def profile(request): return {} @login_required_json", "LoginForm(request.POST or None) if request.method == 'POST': if form.is_valid(): user", "user) return {'redirect': 'core:index'} return { 'form': form } @render_to('index.html')", "= SendInviteForm(request.POST or None, request=request) if request.method == 'POST': if", "if not invite: return {'redirect': 'core:ufo'} form = InviteRegisterForm(request.POST or", "== 'POST': if form.is_valid(): user = form.cleaned_data['user'] auth.login(request, user) return", "ugettext_lazy as _ @render_to('accounts/login.html') def login(request): form = LoginForm(request.POST or", "ajax_response, get_object_or_None from apps.core.decorators import lock, login_required_json from apps.accounts.models import", "from apps.core.helpers import render_to, ajax_response, get_object_or_None from apps.core.decorators import lock,", "from django.core.urlresolvers import reverse from django.contrib import auth from django.contrib.auth.decorators", "form } @render_to('index.html') def logout(request): auth.logout(request) return {} @render_to('accounts/profile.html') def", "{} @render_to('accounts/profile.html') def profile(request): return {} @login_required_json @ajax_response def generate_new_api_key(request):", "apps.accounts.decorators import check_invite from apps.accounts.forms import ( LoginForm, AccountRegisterForm, SendInviteForm,", "check_invite from apps.accounts.forms import ( LoginForm, AccountRegisterForm, SendInviteForm, InviteRegisterForm )", "msg = settings.INVITE_MESSAGE % { 'user': request.user.username, 'link': \"http://b3ban.blacklibrary.ru%s\" %", "apps.accounts.forms import ( LoginForm, AccountRegisterForm, SendInviteForm, InviteRegisterForm ) from django.core.mail", "coding: utf-8 -*- from apps.core.helpers import render_to, ajax_response, get_object_or_None from", "def profile(request): return {} @login_required_json @ajax_response def generate_new_api_key(request): if request.method", "form.save(commit=False) user.set_password(form.cleaned_data['password']) user.save() return {'redirect': 'core:index'} return { 'form': form", "invite.is_verified = True invite.save() user = form.save(commit=False) user.email = invite.email", "return {'success': True, 'key': key} return {'success': False} @lock(\"REGISTER_ALLOWED\") @render_to('accounts/register.html')", "{} @login_required_json @ajax_response def generate_new_api_key(request): if request.method == 'POST': request.user.api_key.key", "form.save(commit=False) invite = form.instance email = form.cleaned_data['email'] msg = settings.INVITE_MESSAGE", "= form.instance email = form.cleaned_data['email'] msg = settings.INVITE_MESSAGE % {", "invite.save() return {'redirect': 'accounts:invite-success'} return { 'form': form } #@check", "login(request): form = LoginForm(request.POST or None) if request.method == 'POST':", "= form.cleaned_data['user'] auth.login(request, user) return {'redirect': 'core:index'} return { 'form':", "send_mail from django.core.urlresolvers import reverse from django.contrib import auth from", "login_required from django.conf import settings from django.db import transaction from", "request.method == \"POST\": if form.is_valid(): user = form.save(commit=False) user.set_password(form.cleaned_data['password']) user.save()", "{'success': True, 'key': key} return {'success': False} @lock(\"REGISTER_ALLOWED\") @render_to('accounts/register.html') def", "False} @lock(\"REGISTER_ALLOWED\") @render_to('accounts/register.html') def register(request): form = AccountRegisterForm(request.POST or None)", "apps.core.helpers import render_to, ajax_response, get_object_or_None from apps.core.decorators import lock, login_required_json", "invite = form.instance email = form.cleaned_data['email'] msg = settings.INVITE_MESSAGE %", "render_to, ajax_response, get_object_or_None from apps.core.decorators import lock, login_required_json from apps.accounts.models", "'core:index'} return { 'form': form } @login_required @render_to('accounts/invite.html') def invite(request):", "send_mail( subject=unicode(_('You have been invited to b3ban service')), message=unicode(msg), from_email=settings.EMAIL_FROM,", "register @transaction.commit_on_success @check_invite(sid='sid') @render_to('accounts/invite_register.html') def invite_register(request, sid): invite = get_object_or_None(Invite,", "form.is_valid(): form.save(commit=False) invite = form.instance email = form.cleaned_data['email'] msg =", "invite_register(request, sid): invite = get_object_or_None(Invite, sid=sid) if not invite: return", "if request.method == 'POST': if form.is_valid(): invite.is_verified = True invite.save()", "True invite.save() user = form.save(commit=False) user.email = invite.email user.set_password(form.cleaned_data['password']) user.save()", "invite.save() user = form.save(commit=False) user.email = invite.email user.set_password(form.cleaned_data['password']) user.save() return", "import lock, login_required_json from apps.accounts.models import Invite from apps.accounts.decorators import", "possibility to register @transaction.commit_on_success @check_invite(sid='sid') @render_to('accounts/invite_register.html') def invite_register(request, sid): invite", ")) } #no mail send, no money :) send_mail( subject=unicode(_('You", "from django.core.mail import send_mail from django.core.urlresolvers import reverse from django.contrib", "message=unicode(msg), from_email=settings.EMAIL_FROM, recipient_list=[email] ) invite.save() return {'redirect': 'accounts:invite-success'} return {", "return {'redirect': 'accounts:invite-success'} return { 'form': form } #@check for", "django.contrib import auth from django.contrib.auth.decorators import login_required from django.conf import", "{'redirect': 'core:index'} return { 'form': form } @render_to('index.html') def logout(request):", "or None) if request.method == 'POST': if form.is_valid(): user =", "{'success': False} @lock(\"REGISTER_ALLOWED\") @render_to('accounts/register.html') def register(request): form = AccountRegisterForm(request.POST or", "return { 'form': form } #@check for possibility to register", "not invite: return {'redirect': 'core:ufo'} form = InviteRegisterForm(request.POST or None)", "@render_to('accounts/register.html') def register(request): form = AccountRegisterForm(request.POST or None) if request.method", "if form.is_valid(): form.save(commit=False) invite = form.instance email = form.cleaned_data['email'] msg", "your views here. # -*- coding: utf-8 -*- from apps.core.helpers", "== \"POST\": if form.is_valid(): user = form.save(commit=False) user.set_password(form.cleaned_data['password']) user.save() return", "settings.INVITE_MESSAGE % { 'user': request.user.username, 'link': \"http://b3ban.blacklibrary.ru%s\" % reverse('accounts:invite-register', args=(invite.sid,", "#@check for possibility to register @transaction.commit_on_success @check_invite(sid='sid') @render_to('accounts/invite_register.html') def invite_register(request,", "return {'redirect': 'core:ufo'} form = InviteRegisterForm(request.POST or None) if request.method", "def logout(request): auth.logout(request) return {} @render_to('accounts/profile.html') def profile(request): return {}", "no money :) send_mail( subject=unicode(_('You have been invited to b3ban", "@render_to('accounts/invite_register.html') def invite_register(request, sid): invite = get_object_or_None(Invite, sid=sid) if not", "Invite from apps.accounts.decorators import check_invite from apps.accounts.forms import ( LoginForm,", "import reverse from django.contrib import auth from django.contrib.auth.decorators import login_required", "form = SendInviteForm(request.POST or None, request=request) if request.method == 'POST':", "} #no mail send, no money :) send_mail( subject=unicode(_('You have", "return {'redirect': 'core:index'} return { 'form': form } @login_required @render_to('accounts/invite.html')", "login_required_json from apps.accounts.models import Invite from apps.accounts.decorators import check_invite from", "auth.logout(request) return {} @render_to('accounts/profile.html') def profile(request): return {} @login_required_json @ajax_response", "'POST': if form.is_valid(): form.save(commit=False) invite = form.instance email = form.cleaned_data['email']", "profile(request): return {} @login_required_json @ajax_response def generate_new_api_key(request): if request.method ==", "form = LoginForm(request.POST or None) if request.method == 'POST': if", "from apps.accounts.decorators import check_invite from apps.accounts.forms import ( LoginForm, AccountRegisterForm,", "request.method == 'POST': if form.is_valid(): user = form.cleaned_data['user'] auth.login(request, user)", "= AccountRegisterForm(request.POST or None) if request.method == \"POST\": if form.is_valid():", "'user': request.user.username, 'link': \"http://b3ban.blacklibrary.ru%s\" % reverse('accounts:invite-register', args=(invite.sid, )) } #no", "invite = get_object_or_None(Invite, sid=sid) if not invite: return {'redirect': 'core:ufo'}", ") invite.save() return {'redirect': 'accounts:invite-success'} return { 'form': form }", "= settings.INVITE_MESSAGE % { 'user': request.user.username, 'link': \"http://b3ban.blacklibrary.ru%s\" % reverse('accounts:invite-register',", "from_email=settings.EMAIL_FROM, recipient_list=[email] ) invite.save() return {'redirect': 'accounts:invite-success'} return { 'form':", "-*- coding: utf-8 -*- from apps.core.helpers import render_to, ajax_response, get_object_or_None", "'link': \"http://b3ban.blacklibrary.ru%s\" % reverse('accounts:invite-register', args=(invite.sid, )) } #no mail send,", "= InviteRegisterForm(request.POST or None) if request.method == 'POST': if form.is_valid():", "if form.is_valid(): invite.is_verified = True invite.save() user = form.save(commit=False) user.email", "{ 'form': form } @login_required @render_to('accounts/invite.html') def invite(request): form =", "#no mail send, no money :) send_mail( subject=unicode(_('You have been", "= form.cleaned_data['email'] msg = settings.INVITE_MESSAGE % { 'user': request.user.username, 'link':", "== 'POST': if form.is_valid(): form.save(commit=False) invite = form.instance email =", "from django.utils.translation import ugettext_lazy as _ @render_to('accounts/login.html') def login(request): form", "if form.is_valid(): user = form.cleaned_data['user'] auth.login(request, user) return {'redirect': 'core:index'}", "user = form.save(commit=False) user.set_password(form.cleaned_data['password']) user.save() return {'redirect': 'core:index'} return {", "return { 'form': form } @login_required @render_to('accounts/invite.html') def invite(request): form", "form.is_valid(): user = form.cleaned_data['user'] auth.login(request, user) return {'redirect': 'core:index'} return", "from django.contrib.auth.decorators import login_required from django.conf import settings from django.db", "import send_mail from django.core.urlresolvers import reverse from django.contrib import auth", "form = InviteRegisterForm(request.POST or None) if request.method == 'POST': if", "# Create your views here. # -*- coding: utf-8 -*-", "import ugettext_lazy as _ @render_to('accounts/login.html') def login(request): form = LoginForm(request.POST", "import settings from django.db import transaction from django.utils.translation import ugettext_lazy", "return {} @login_required_json @ajax_response def generate_new_api_key(request): if request.method == 'POST':", "money :) send_mail( subject=unicode(_('You have been invited to b3ban service')),", "from django.db import transaction from django.utils.translation import ugettext_lazy as _", "InviteRegisterForm ) from django.core.mail import send_mail from django.core.urlresolvers import reverse", "reverse('accounts:invite-register', args=(invite.sid, )) } #no mail send, no money :)", "= invite.email user.set_password(form.cleaned_data['password']) user.save() return {'redirect': 'accounts:invite-register-success'} return {'form': form,", "register(request): form = AccountRegisterForm(request.POST or None) if request.method == \"POST\":", "'core:index'} return { 'form': form } @render_to('index.html') def logout(request): auth.logout(request)", "import Invite from apps.accounts.decorators import check_invite from apps.accounts.forms import (", "% reverse('accounts:invite-register', args=(invite.sid, )) } #no mail send, no money", "django.contrib.auth.decorators import login_required from django.conf import settings from django.db import", "to b3ban service')), message=unicode(msg), from_email=settings.EMAIL_FROM, recipient_list=[email] ) invite.save() return {'redirect':", "= LoginForm(request.POST or None) if request.method == 'POST': if form.is_valid():", "key} return {'success': False} @lock(\"REGISTER_ALLOWED\") @render_to('accounts/register.html') def register(request): form =", "form } @login_required @render_to('accounts/invite.html') def invite(request): form = SendInviteForm(request.POST or", "for possibility to register @transaction.commit_on_success @check_invite(sid='sid') @render_to('accounts/invite_register.html') def invite_register(request, sid):", "request.user.api_key.save() key = request.user.api_key.key return {'success': True, 'key': key} return", "AccountRegisterForm(request.POST or None) if request.method == \"POST\": if form.is_valid(): user", "or None, request=request) if request.method == 'POST': if form.is_valid(): form.save(commit=False)", "request.method == 'POST': if form.is_valid(): invite.is_verified = True invite.save() user", "get_object_or_None from apps.core.decorators import lock, login_required_json from apps.accounts.models import Invite", "<gh_stars>1-10 # Create your views here. # -*- coding: utf-8", "invite(request): form = SendInviteForm(request.POST or None, request=request) if request.method ==", "SendInviteForm(request.POST or None, request=request) if request.method == 'POST': if form.is_valid():", "utf-8 -*- from apps.core.helpers import render_to, ajax_response, get_object_or_None from apps.core.decorators", "reverse from django.contrib import auth from django.contrib.auth.decorators import login_required from", "{ 'form': form } @render_to('index.html') def logout(request): auth.logout(request) return {}", "@render_to('index.html') def logout(request): auth.logout(request) return {} @render_to('accounts/profile.html') def profile(request): return", "been invited to b3ban service')), message=unicode(msg), from_email=settings.EMAIL_FROM, recipient_list=[email] ) invite.save()", "from apps.accounts.models import Invite from apps.accounts.decorators import check_invite from apps.accounts.forms", "@login_required_json @ajax_response def generate_new_api_key(request): if request.method == 'POST': request.user.api_key.key =", "= request.user.api_key.generate_key() request.user.api_key.save() key = request.user.api_key.key return {'success': True, 'key':", "True, 'key': key} return {'success': False} @lock(\"REGISTER_ALLOWED\") @render_to('accounts/register.html') def register(request):", "% { 'user': request.user.username, 'link': \"http://b3ban.blacklibrary.ru%s\" % reverse('accounts:invite-register', args=(invite.sid, ))", "form = AccountRegisterForm(request.POST or None) if request.method == \"POST\": if", "} @login_required @render_to('accounts/invite.html') def invite(request): form = SendInviteForm(request.POST or None,", "here. # -*- coding: utf-8 -*- from apps.core.helpers import render_to,", "mail send, no money :) send_mail( subject=unicode(_('You have been invited", "@ajax_response def generate_new_api_key(request): if request.method == 'POST': request.user.api_key.key = request.user.api_key.generate_key()", "# -*- coding: utf-8 -*- from apps.core.helpers import render_to, ajax_response,", "request.method == 'POST': if form.is_valid(): form.save(commit=False) invite = form.instance email", "{'redirect': 'core:index'} return { 'form': form } @login_required @render_to('accounts/invite.html') def", "\"http://b3ban.blacklibrary.ru%s\" % reverse('accounts:invite-register', args=(invite.sid, )) } #no mail send, no", "service')), message=unicode(msg), from_email=settings.EMAIL_FROM, recipient_list=[email] ) invite.save() return {'redirect': 'accounts:invite-success'} return", "import auth from django.contrib.auth.decorators import login_required from django.conf import settings", "return {'redirect': 'core:index'} return { 'form': form } @render_to('index.html') def", "'POST': if form.is_valid(): invite.is_verified = True invite.save() user = form.save(commit=False)", "or None) if request.method == \"POST\": if form.is_valid(): user =", "django.conf import settings from django.db import transaction from django.utils.translation import", "SendInviteForm, InviteRegisterForm ) from django.core.mail import send_mail from django.core.urlresolvers import", "auth.login(request, user) return {'redirect': 'core:index'} return { 'form': form }", "return {} @render_to('accounts/profile.html') def profile(request): return {} @login_required_json @ajax_response def", "transaction from django.utils.translation import ugettext_lazy as _ @render_to('accounts/login.html') def login(request):", "user = form.cleaned_data['user'] auth.login(request, user) return {'redirect': 'core:index'} return {", "from apps.accounts.forms import ( LoginForm, AccountRegisterForm, SendInviteForm, InviteRegisterForm ) from", "def login(request): form = LoginForm(request.POST or None) if request.method ==", "if request.method == 'POST': request.user.api_key.key = request.user.api_key.generate_key() request.user.api_key.save() key =", "AccountRegisterForm, SendInviteForm, InviteRegisterForm ) from django.core.mail import send_mail from django.core.urlresolvers", "= True invite.save() user = form.save(commit=False) user.email = invite.email user.set_password(form.cleaned_data['password'])", "user = form.save(commit=False) user.email = invite.email user.set_password(form.cleaned_data['password']) user.save() return {'redirect':", "def invite_register(request, sid): invite = get_object_or_None(Invite, sid=sid) if not invite:", "to register @transaction.commit_on_success @check_invite(sid='sid') @render_to('accounts/invite_register.html') def invite_register(request, sid): invite =", "generate_new_api_key(request): if request.method == 'POST': request.user.api_key.key = request.user.api_key.generate_key() request.user.api_key.save() key", "have been invited to b3ban service')), message=unicode(msg), from_email=settings.EMAIL_FROM, recipient_list=[email] )", "sid): invite = get_object_or_None(Invite, sid=sid) if not invite: return {'redirect':", "request.method == 'POST': request.user.api_key.key = request.user.api_key.generate_key() request.user.api_key.save() key = request.user.api_key.key", "request.user.username, 'link': \"http://b3ban.blacklibrary.ru%s\" % reverse('accounts:invite-register', args=(invite.sid, )) } #no mail", "args=(invite.sid, )) } #no mail send, no money :) send_mail(", "subject=unicode(_('You have been invited to b3ban service')), message=unicode(msg), from_email=settings.EMAIL_FROM, recipient_list=[email]", "{'redirect': 'core:ufo'} form = InviteRegisterForm(request.POST or None) if request.method ==", "'POST': request.user.api_key.key = request.user.api_key.generate_key() request.user.api_key.save() key = request.user.api_key.key return {'success':", "{'redirect': 'accounts:invite-success'} return { 'form': form } #@check for possibility", "'form': form } @render_to('index.html') def logout(request): auth.logout(request) return {} @render_to('accounts/profile.html')", "@login_required @render_to('accounts/invite.html') def invite(request): form = SendInviteForm(request.POST or None, request=request)", "recipient_list=[email] ) invite.save() return {'redirect': 'accounts:invite-success'} return { 'form': form", "user.set_password(form.cleaned_data['password']) user.save() return {'redirect': 'core:index'} return { 'form': form }", "{ 'form': form } #@check for possibility to register @transaction.commit_on_success", "== 'POST': if form.is_valid(): invite.is_verified = True invite.save() user =", "'form': form } #@check for possibility to register @transaction.commit_on_success @check_invite(sid='sid')", "def register(request): form = AccountRegisterForm(request.POST or None) if request.method ==", "from apps.core.decorators import lock, login_required_json from apps.accounts.models import Invite from", "@render_to('accounts/login.html') def login(request): form = LoginForm(request.POST or None) if request.method", "} @render_to('index.html') def logout(request): auth.logout(request) return {} @render_to('accounts/profile.html') def profile(request):", "if form.is_valid(): user = form.save(commit=False) user.set_password(form.cleaned_data['password']) user.save() return {'redirect': 'core:index'}", "== 'POST': request.user.api_key.key = request.user.api_key.generate_key() request.user.api_key.save() key = request.user.api_key.key return", "'form': form } @login_required @render_to('accounts/invite.html') def invite(request): form = SendInviteForm(request.POST", "form.save(commit=False) user.email = invite.email user.set_password(form.cleaned_data['password']) user.save() return {'redirect': 'accounts:invite-register-success'} return", "as _ @render_to('accounts/login.html') def login(request): form = LoginForm(request.POST or None)", "from django.contrib import auth from django.contrib.auth.decorators import login_required from django.conf", "from django.conf import settings from django.db import transaction from django.utils.translation", "invited to b3ban service')), message=unicode(msg), from_email=settings.EMAIL_FROM, recipient_list=[email] ) invite.save() return", "import ( LoginForm, AccountRegisterForm, SendInviteForm, InviteRegisterForm ) from django.core.mail import", "@check_invite(sid='sid') @render_to('accounts/invite_register.html') def invite_register(request, sid): invite = get_object_or_None(Invite, sid=sid) if", "None, request=request) if request.method == 'POST': if form.is_valid(): form.save(commit=False) invite", "email = form.cleaned_data['email'] msg = settings.INVITE_MESSAGE % { 'user': request.user.username,", ") from django.core.mail import send_mail from django.core.urlresolvers import reverse from", "Create your views here. # -*- coding: utf-8 -*- from", "import login_required from django.conf import settings from django.db import transaction", "apps.accounts.models import Invite from apps.accounts.decorators import check_invite from apps.accounts.forms import", "'POST': if form.is_valid(): user = form.cleaned_data['user'] auth.login(request, user) return {'redirect':", "= request.user.api_key.key return {'success': True, 'key': key} return {'success': False}", "form.is_valid(): user = form.save(commit=False) user.set_password(form.cleaned_data['password']) user.save() return {'redirect': 'core:index'} return", "None) if request.method == \"POST\": if form.is_valid(): user = form.save(commit=False)", "b3ban service')), message=unicode(msg), from_email=settings.EMAIL_FROM, recipient_list=[email] ) invite.save() return {'redirect': 'accounts:invite-success'}", "'core:ufo'} form = InviteRegisterForm(request.POST or None) if request.method == 'POST':", "= form.save(commit=False) user.email = invite.email user.set_password(form.cleaned_data['password']) user.save() return {'redirect': 'accounts:invite-register-success'}", "@lock(\"REGISTER_ALLOWED\") @render_to('accounts/register.html') def register(request): form = AccountRegisterForm(request.POST or None) if", "get_object_or_None(Invite, sid=sid) if not invite: return {'redirect': 'core:ufo'} form =", "django.core.mail import send_mail from django.core.urlresolvers import reverse from django.contrib import", "@transaction.commit_on_success @check_invite(sid='sid') @render_to('accounts/invite_register.html') def invite_register(request, sid): invite = get_object_or_None(Invite, sid=sid)", "= form.save(commit=False) user.set_password(form.cleaned_data['password']) user.save() return {'redirect': 'core:index'} return { 'form':", "user.save() return {'redirect': 'core:index'} return { 'form': form } @login_required", "-*- from apps.core.helpers import render_to, ajax_response, get_object_or_None from apps.core.decorators import", "invite.email user.set_password(form.cleaned_data['password']) user.save() return {'redirect': 'accounts:invite-register-success'} return {'form': form, 'sid':", "views here. # -*- coding: utf-8 -*- from apps.core.helpers import", "send, no money :) send_mail( subject=unicode(_('You have been invited to", "settings from django.db import transaction from django.utils.translation import ugettext_lazy as" ]
[ "the structured fields will be ignored in the requests and", "backup frequency. returned: on success type: str sample: ONE_HOUR offset_type:", "backup start time should be shifted from the default interval", "name. Does not have to be unique, and it's changeable.", "resource. Each key is predefined and scoped to a namespace.", "For more information about Oracle defined backup policies and user", "backups to. Example `us-ashburn-1`. See L(Region Pairs,https://docs.cloud.oracle.com/iaas/Content/Block/Tasks/schedulingvolumebackups.htm#RegionPairs) for details about", "the volume backup policies available in the specified compartment. -", "backup to create. returned: on success type: str sample: FULL", "oracle.oci.oracle_display_name_option ] \"\"\" EXAMPLES = \"\"\" - name: Get a", "on success type: str sample: \"2013-10-20T19:20:30+01:00\" compartment_id: description: - The", "in requests and users should ignore its value from the", "'ONE_YEAR'.\" - They will be ignored in the requests for", "will be ignored in the requests for inapplicable periods. -", "number of seconds that the volume backup start time should", "on success type: int sample: 56 time_zone: description: - Specifies", "backup. returned: on success type: int sample: 56 month: description:", "paired regions. returned: on success type: str sample: us-phoenix-1 time_created:", "the specified compartment. - For more information about Oracle defined", "[\"id\"] compartment_id: description: - The OCID of the compartment. If", "entering confidential information. returned: on success type: str sample: display_name_example", "responses. - \"`hourOfDay` is applicable for periods `ONE_DAY`, `ONE_WEEK`, `ONE_MONTH`", "NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN from __future__", "module.fail_json(msg=\"oci python sdk required for this module.\") resource_facts_helper = ResourceFactsHelper(", "return oci_common_utils.call_with_backoff( self.client.get_volume_backup_policy, policy_id=self.module.params.get(\"policy_id\"), ) def list_resources(self): optional_list_method_params = [", "license. # GNU General Public License v3.0+ (see COPYING or", "success type: str sample: JANUARY retention_seconds: description: - How long,", "of the volume backup policy. returned: on success type: str", "returned: on success type: int sample: 56 month: description: -", "= [] if resource_facts_helper.is_get(): result = [resource_facts_helper.get()] elif resource_facts_helper.is_list(): result", "is specified, the Oracle defined backup policies are listed. type:", "volume backups created by this schedule. returned: on success type:", "{'Department': 'Finance'} }] \"\"\" from ansible.module_utils.basic import AnsibleModule from ansible_collections.oracle.oci.plugins.module_utils", "They will be ignored in the requests for inapplicable periods.", "complex contains: display_name: description: - A user-friendly name. Does not", "ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import ( OCIResourceFactsHelperBase, get_custom_class, ) try: from oci.core import", "volume_backup_policy oci_blockstorage_volume_backup_policy_facts: # required policy_id: \"ocid1.policy.oc1..xxxxxxEXAMPLExxxxxx\" - name: List volume_backup_policies", "is just like `NUMERIC_SECONDS`. returned: on success type: str sample:", "# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)", "period: description: - The volume backup frequency. returned: on success", "] \"\"\" EXAMPLES = \"\"\" - name: Get a specific", "type: str sample: FULL offset_seconds: description: - The number of", "policy_id=self.module.params.get(\"policy_id\"), ) def list_resources(self): optional_list_method_params = [ \"compartment_id\", \"display_name\", ]", "result = [resource_facts_helper.get()] elif resource_facts_helper.is_list(): result = resource_facts_helper.list() else: resource_facts_helper.fail()", "Apache License v2.0 # See LICENSE.TXT for details. # GENERATED", "description: - The collection of schedules that this policy will", "the volume backup start time should be shifted from the", "on success type: complex contains: display_name: description: - A user-friendly", "`hourOfDay`, `dayOfWeek`, `dayOfMonth`, and `month` fields are used and `offsetSeconds`", "python sdk required for this module.\") resource_facts_helper = ResourceFactsHelper( module=module,", "then `offsetSeconds` will be used for both requests and responses", "optional_kwargs = dict( (param, self.module.params[param]) for param in optional_list_method_params if", "changeable. Avoid entering confidential information. returned: on success type: str", "param in optional_list_method_params if self.module.params.get(param) is not None ) return", "type: int sample: 56 month: description: - The month of", "specified compartment. - For more information about Oracle defined backup", "destination region for copying scheduled backups to. Example `us-ashburn-1`. See", "the requests for inapplicable periods. - If value is `NUMERIC_SECONDS`,", "long, in seconds, to keep the volume backups created by", "`ONE_YEAR`.\" - \"`dayOfWeek` is applicable for period `ONE_WEEK`.\" - \"`dayOfMonth`", "keep the volume backups created by this schedule. returned: on", "'US'}} freeform_tags: description: - Free-form tags for this resource. Each", "volume_backup_policies: description: - List of VolumeBackupPolicy resources returned: on success", "(@oracle) options: policy_id: description: - The OCID of the volume", "[ \"policy_id\", ] def get_required_params_for_list(self): return [] def get_resource(self): return", "on success type: dict sample: {'Department': 'Finance'} sample: [{ \"display_name\":", "will be ignored in requests and users should ignore its", "The OCID of the volume backup policy. returned: on success", "v2.0 # See LICENSE.TXT for details. # GENERATED FILE -", "OCID of the compartment that contains the volume backup. returned:", "of volume backup to create. returned: on success type: str", "`ONE_DAY`, `ONE_WEEK`, `ONE_MONTH` and `ONE_YEAR`.\" - \"`dayOfWeek` is applicable for", "resource. Each tag is a simple key-value pair with no", "GPL 3.0 license or the Apache 2.0 license. # GNU", "offset_seconds: description: - The number of seconds that the volume", "(c) 2020, 2022 Oracle and/or its affiliates. # This software", "policy_id: \"ocid1.policy.oc1..xxxxxxEXAMPLExxxxxx\" - name: List volume_backup_policies oci_blockstorage_volume_backup_policy_facts: # optional compartment_id:", "VolumeBackupPolicy resources in Oracle Cloud Infrastructure - Lists all the", "For clients using older versions of Apis and not sending", "] def get_required_params_for_list(self): return [] def get_resource(self): return oci_common_utils.call_with_backoff( self.client.get_volume_backup_policy,", "of the GPL 3.0 license or the Apache 2.0 license.", "sample: [{ \"display_name\": \"display_name_example\", \"id\": \"ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx\", \"schedules\": [{ \"backup_type\": \"FULL\",", "information, see L(Resource Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). - \"Example: `{\\\\\"Operations\\\\\": {\\\\\"CostCenter\\\\\": \\\\\"42\\\\\"}}`\" returned:", "periods `ONE_MONTH` and `ONE_YEAR`.\" - \"'month' is applicable for period", "backup. returned: on success type: str sample: MONDAY day_of_month: description:", "[] def get_resource(self): return oci_common_utils.call_with_backoff( self.client.get_volume_backup_policy, policy_id=self.module.params.get(\"policy_id\"), ) def list_resources(self):", "applicable for periods `ONE_DAY`, `ONE_WEEK`, `ONE_MONTH` and `ONE_YEAR`.\" - \"`dayOfWeek`", "the volume backup. returned: on success type: str sample: MONDAY", "not HAS_OCI_PY_SDK: module.fail_json(msg=\"oci python sdk required for this module.\") resource_facts_helper", "and/or its affiliates. # This software is made available to", "absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = { \"metadata_version\":", "type: str sample: JANUARY retention_seconds: description: - How long, in", "its value from the responses. - \"`hourOfDay` is applicable for", "returned: on success type: complex contains: display_name: description: - A", "version_added: \"2.9.0\" author: Oracle (@oracle) options: policy_id: description: - The", "is applicable for periods `ONE_MONTH` and `ONE_YEAR`.\" - \"'month' is", "List volume_backup_policies oci_blockstorage_volume_backup_policy_facts: # optional compartment_id: \"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\" \"\"\" RETURN =", "\"JANUARY\", \"retention_seconds\": 56, \"time_zone\": \"UTC\" }], \"destination_region\": \"us-phoenix-1\", \"time_created\": \"2013-10-20T19:20:30+01:00\",", "day_of_week: description: - The day of the week to schedule", "56 time_zone: description: - Specifies what time zone is the", "on success type: str sample: ONE_HOUR offset_type: description: - Indicates", "Example `us-ashburn-1`. See L(Region Pairs,https://docs.cloud.oracle.com/iaas/Content/Block/Tasks/schedulingvolumebackups.htm#RegionPairs) for details about paired regions.", "HAS_OCI_PY_SDK: module.fail_json(msg=\"oci python sdk required for this module.\") resource_facts_helper =", "policies, see L(Policy-Based Backups,https://docs.cloud.oracle.com/iaas/Content/Block/Tasks/schedulingvolumebackups.htm). - If I(policy_id) is specified, the", "\"ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx\" schedules: description: - The collection of schedules that this", "True except ImportError: HAS_OCI_PY_SDK = False class VolumeBackupPolicyFactsHelperGen(OCIResourceFactsHelperBase): \"\"\"Supported operations:", "tag is a simple key-value pair with no predefined name,", "Apis and not sending `offsetType` in their requests, the behaviour", "\"ONE_HOUR\", \"offset_type\": \"STRUCTURED\", \"hour_of_day\": 56, \"day_of_week\": \"MONDAY\", \"day_of_month\": 56, \"month\":", "of the day to schedule the volume backup. returned: on", "`offsetSeconds` will be used for both requests and responses and", "like `NUMERIC_SECONDS`. returned: on success type: str sample: STRUCTURED hour_of_day:", "type, or namespace. For more information, see L(Resource Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). -", "returned. version_added: \"2.9.0\" author: Oracle (@oracle) options: policy_id: description: -", "on success type: str sample: us-phoenix-1 time_created: description: - The", "self.client.get_volume_backup_policy, policy_id=self.module.params.get(\"policy_id\"), ) def list_resources(self): optional_list_method_params = [ \"compartment_id\", \"display_name\",", "Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). - \"Example: `{\\\\\"Operations\\\\\": {\\\\\"CostCenter\\\\\": \\\\\"42\\\\\"}}`\" returned: on success type:", "The volume backup frequency. returned: on success type: str sample:", "= [resource_facts_helper.get()] elif resource_facts_helper.is_list(): result = resource_facts_helper.list() else: resource_facts_helper.fail() module.exit_json(volume_backup_policies=result)", "contains the volume backup. returned: on success type: str sample:", "The hour of the day to schedule the volume backup.", "description: - The paired destination region for copying scheduled backups", "users should ignore its value from the responses. - \"`hourOfDay`", "The type of volume backup to create. returned: on success", "to create. returned: on success type: str sample: FULL offset_seconds:", "import ( OCIResourceFactsHelperBase, get_custom_class, ) try: from oci.core import BlockstorageClient", "information about Oracle defined backup policies and user defined backup", "period. The volume backup start time is the frequency start", "Get a specific volume_backup_policy oci_blockstorage_volume_backup_policy_facts: # required policy_id: \"ocid1.policy.oc1..xxxxxxEXAMPLExxxxxx\" -", "is `NUMERIC_SECONDS`, then `offsetSeconds` will be used for both requests", "\"\"\" - name: Get a specific volume_backup_policy oci_blockstorage_volume_backup_policy_facts: # required", "offset_type: description: - Indicates how the offset is defined. If", "oci_common_utils.call_with_backoff( self.client.get_volume_backup_policy, policy_id=self.module.params.get(\"policy_id\"), ) def list_resources(self): optional_list_method_params = [ \"compartment_id\",", "unique, and it's changeable. Avoid entering confidential information. returned: on", "Cloud Infrastructure - Lists all the volume backup policies available", "\"display_name_example\", \"id\": \"ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx\", \"schedules\": [{ \"backup_type\": \"FULL\", \"offset_seconds\": 56, \"period\":", "type: str aliases: [\"id\"] compartment_id: description: - The OCID of", "- How long, in seconds, to keep the volume backups", "see L(Policy-Based Backups,https://docs.cloud.oracle.com/iaas/Content/Block/Tasks/schedulingvolumebackups.htm). - If I(policy_id) is specified, the details", "created. Format defined by L(RFC3339,https://tools.ietf.org/html/rfc3339). returned: on success type: str", "- The type of volume backup to create. returned: on", "# This software is made available to you under the", "volume backup. returned: on success type: int sample: 56 day_of_week:", "requests, the behaviour is just like `NUMERIC_SECONDS`. returned: on success", "what time zone is the schedule in returned: on success", "specific volume_backup_policy. type: str aliases: [\"id\"] compartment_id: description: - The", "time the volume backup policy was created. Format defined by", "[] if resource_facts_helper.is_get(): result = [resource_facts_helper.get()] elif resource_facts_helper.is_list(): result =", "<reponame>LaudateCorpus1/oci-ansible-collection #!/usr/bin/python # Copyright (c) 2020, 2022 Oracle and/or its", "applicable for period `ONE_WEEK`.\" - \"`dayOfMonth` is applicable for periods", "volume backup to create. returned: on success type: str sample:", "the requests and users should ignore their values from the", "returned: on success type: str sample: \"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\" defined_tags: description: -", "backup policy was created. Format defined by L(RFC3339,https://tools.ietf.org/html/rfc3339). returned: on", "}], \"destination_region\": \"us-phoenix-1\", \"time_created\": \"2013-10-20T19:20:30+01:00\", \"compartment_id\": \"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\", \"defined_tags\": {'Operations': {'CostCenter':", "month of the year to schedule the volume backup. returned:", "import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {", "key is predefined and scoped to a namespace. For more", "returned: on success type: str sample: display_name_example id: description: -", "us-phoenix-1 time_created: description: - The date and time the volume", "- DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN", "int sample: 56 time_zone: description: - Specifies what time zone", "= AnsibleModule(argument_spec=module_args) if not HAS_OCI_PY_SDK: module.fail_json(msg=\"oci python sdk required for", "\"time_zone\": \"UTC\" }], \"destination_region\": \"us-phoenix-1\", \"time_created\": \"2013-10-20T19:20:30+01:00\", \"compartment_id\": \"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\", \"defined_tags\":", "def get_required_params_for_list(self): return [] def get_resource(self): return oci_common_utils.call_with_backoff( self.client.get_volume_backup_policy, policy_id=self.module.params.get(\"policy_id\"),", "'Finance'} sample: [{ \"display_name\": \"display_name_example\", \"id\": \"ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx\", \"schedules\": [{ \"backup_type\":", "the volume backup policy. - Required to get a specific", "\"2.9.0\" author: Oracle (@oracle) options: policy_id: description: - The OCID", "a specific volume_backup_policy oci_blockstorage_volume_backup_policy_facts: # required policy_id: \"ocid1.policy.oc1..xxxxxxEXAMPLExxxxxx\" - name:", "to keep the volume backups created by this schedule. returned:", "try: from oci.core import BlockstorageClient HAS_OCI_PY_SDK = True except ImportError:", "of seconds that the volume backup start time should be", "\"status\": [\"preview\"], \"supported_by\": \"community\", } DOCUMENTATION = \"\"\" --- module:", "short_description: Fetches details about one or multiple VolumeBackupPolicy resources in", "[\"preview\"], \"supported_by\": \"community\", } DOCUMENTATION = \"\"\" --- module: oci_blockstorage_volume_backup_policy_facts", "under the terms of the GPL 3.0 license or the", "If value is `STRUCTURED`, then `hourOfDay`, `dayOfWeek`, `dayOfMonth`, and `month`", "returned: on success type: int sample: 56 time_zone: description: -", "values from the responses. - For clients using older versions", "elif resource_facts_helper.is_list(): result = resource_facts_helper.list() else: resource_facts_helper.fail() module.exit_json(volume_backup_policies=result) if __name__", "UTC destination_region: description: - The paired destination region for copying", "List of VolumeBackupPolicy resources returned: on success type: complex contains:", "specified by the period. The volume backup start time is", "of the month to schedule the volume backup. returned: on", "list_resources(self): optional_list_method_params = [ \"compartment_id\", \"display_name\", ] optional_kwargs = dict(", "Infrastructure - Lists all the volume backup policies available in", "extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_display_name_option ] \"\"\" EXAMPLES = \"\"\" -", "boundaries specified by the period. The volume backup start time", "schedule in returned: on success type: str sample: UTC destination_region:", "type: str sample: \"2013-10-20T19:20:30+01:00\" compartment_id: description: - The OCID of", "None ) return oci_common_utils.list_all_resources( self.client.list_volume_backup_policies, **optional_kwargs ) VolumeBackupPolicyFactsHelperCustom = get_custom_class(", "LICENSE.TXT for details. # GENERATED FILE - DO NOT EDIT", "returned: on success type: dict sample: {'Operations': {'CostCenter': 'US'}} freeform_tags:", "# required policy_id: \"ocid1.policy.oc1..xxxxxxEXAMPLExxxxxx\" - name: List volume_backup_policies oci_blockstorage_volume_backup_policy_facts: #", "sample: {'Operations': {'CostCenter': 'US'}} freeform_tags: description: - Free-form tags for", "the period. The volume backup start time is the frequency", "the default interval boundaries specified by the period. The volume", "- Indicates how the offset is defined. If value is", "the volume backup. returned: on success type: int sample: 56", "on success type: complex contains: backup_type: description: - The type", "from the responses. - \"`hourOfDay` is applicable for periods `ONE_DAY`,", "ResourceFactsHelper( module=module, resource_type=\"volume_backup_policy\", service_client_class=BlockstorageClient, namespace=\"core\", ) result = [] if", "WILL BE OVERWRITTEN from __future__ import absolute_import, division, print_function __metaclass__", "defined by L(RFC3339,https://tools.ietf.org/html/rfc3339). returned: on success type: str sample: \"2013-10-20T19:20:30+01:00\"", "using older versions of Apis and not sending `offsetType` in", "type: str sample: \"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\" defined_tags: description: - Defined tags for", "operations: get, list\"\"\" def get_required_params_for_get(self): return [ \"policy_id\", ] def", "display_name=dict(type=\"str\"), ) ) module = AnsibleModule(argument_spec=module_args) if not HAS_OCI_PY_SDK: module.fail_json(msg=\"oci", "if not HAS_OCI_PY_SDK: module.fail_json(msg=\"oci python sdk required for this module.\")", "backup. returned: on success type: int sample: 56 day_of_week: description:", "dict( (param, self.module.params[param]) for param in optional_list_method_params if self.module.params.get(param) is", "volume backup policy. returned: on success type: str sample: \"ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx\"", "be unique, and it's changeable. Avoid entering confidential information. returned:", "with no predefined name, type, or namespace. For more information,", "name, type, or namespace. For more information, see L(Resource Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm).", "] optional_kwargs = dict( (param, self.module.params[param]) for param in optional_list_method_params", "\"\"\" volume_backup_policies: description: - List of VolumeBackupPolicy resources returned: on", "str sample: \"2013-10-20T19:20:30+01:00\" compartment_id: description: - The OCID of the", "responses and the structured fields will be ignored in the", "type: int sample: 56 day_of_week: description: - The day of", "policies available in the specified compartment. - For more information", "\"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\" \"\"\" RETURN = \"\"\" volume_backup_policies: description: - List of", "and users should ignore its value from the responses. -", "on success type: str sample: MONDAY day_of_month: description: - The", "= \"\"\" - name: Get a specific volume_backup_policy oci_blockstorage_volume_backup_policy_facts: #", "optional_list_method_params = [ \"compartment_id\", \"display_name\", ] optional_kwargs = dict( (param,", "are used and `offsetSeconds` will be ignored in requests and", "sample: \"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\" defined_tags: description: - Defined tags for this resource.", "multiple VolumeBackupPolicy resources in Oracle Cloud Infrastructure description: - Fetches", "L(RFC3339,https://tools.ietf.org/html/rfc3339). returned: on success type: str sample: \"2013-10-20T19:20:30+01:00\" compartment_id: description:", "returned: on success type: int sample: 56 day_of_week: description: -", "for details about paired regions. returned: on success type: str", "56, \"day_of_week\": \"MONDAY\", \"day_of_month\": 56, \"month\": \"JANUARY\", \"retention_seconds\": 56, \"time_zone\":", "\"schedules\": [{ \"backup_type\": \"FULL\", \"offset_seconds\": 56, \"period\": \"ONE_HOUR\", \"offset_type\": \"STRUCTURED\",", "import oci_common_utils from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import ( OCIResourceFactsHelperBase, get_custom_class, ) try:", "2022 Oracle and/or its affiliates. # This software is made", "sample: STRUCTURED hour_of_day: description: - The hour of the day", "returned: on success type: str sample: ONE_HOUR offset_type: description: -", "success type: str sample: ONE_HOUR offset_type: description: - Indicates how", "`NUMERIC_SECONDS`, then `offsetSeconds` will be used for both requests and", "regions. returned: on success type: str sample: us-phoenix-1 time_created: description:", "pass def main(): module_args = oci_common_utils.get_common_arg_spec() module_args.update( dict( policy_id=dict(aliases=[\"id\"], type=\"str\"),", "MANUAL CHANGES WILL BE OVERWRITTEN from __future__ import absolute_import, division,", "and the structured fields will be ignored in the requests", "\"1.1\", \"status\": [\"preview\"], \"supported_by\": \"community\", } DOCUMENTATION = \"\"\" ---", "schedules: description: - The collection of schedules that this policy", "str sample: ONE_HOUR offset_type: description: - Indicates how the offset", "all the volume backup policies available in the specified compartment.", "= { \"metadata_version\": \"1.1\", \"status\": [\"preview\"], \"supported_by\": \"community\", } DOCUMENTATION", "inapplicable periods. - If value is `NUMERIC_SECONDS`, then `offsetSeconds` will", "by this schedule. returned: on success type: int sample: 56", "of a single VolumeBackupPolicy will be returned. version_added: \"2.9.0\" author:", "CHANGES WILL BE OVERWRITTEN from __future__ import absolute_import, division, print_function", "[ oracle.oci.oracle, oracle.oci.oracle_display_name_option ] \"\"\" EXAMPLES = \"\"\" - name:", "2020, 2022 Oracle and/or its affiliates. # This software is", "--- module: oci_blockstorage_volume_backup_policy_facts short_description: Fetches details about one or multiple", "description: - Specifies what time zone is the schedule in", "FULL offset_seconds: description: - The number of seconds that the", "in Oracle Cloud Infrastructure - Lists all the volume backup", "day to schedule the volume backup. returned: on success type:", "to a namespace. For more information, see L(Resource Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). -", "or https://www.gnu.org/licenses/gpl-3.0.txt) # Apache License v2.0 # See LICENSE.TXT for", "str extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_display_name_option ] \"\"\" EXAMPLES = \"\"\"", "backup policy. returned: on success type: str sample: \"ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx\" schedules:", "`ONE_MONTH` and `ONE_YEAR`.\" - \"`dayOfWeek` is applicable for period `ONE_WEEK`.\"", "region for copying scheduled backups to. Example `us-ashburn-1`. See L(Region", "ignored in requests and users should ignore its value from", "v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # Apache License v2.0 #", "oci_blockstorage_volume_backup_policy_facts: # required policy_id: \"ocid1.policy.oc1..xxxxxxEXAMPLExxxxxx\" - name: List volume_backup_policies oci_blockstorage_volume_backup_policy_facts:", "oracle.oci.oracle, oracle.oci.oracle_display_name_option ] \"\"\" EXAMPLES = \"\"\" - name: Get", "week to schedule the volume backup. returned: on success type:", "\"supported_by\": \"community\", } DOCUMENTATION = \"\"\" --- module: oci_blockstorage_volume_backup_policy_facts short_description:", "is defined. If value is `STRUCTURED`, then `hourOfDay`, `dayOfWeek`, `dayOfMonth`,", "False class VolumeBackupPolicyFactsHelperGen(OCIResourceFactsHelperBase): \"\"\"Supported operations: get, list\"\"\" def get_required_params_for_get(self): return", "The collection of schedules that this policy will apply. returned:", "\"destination_region\": \"us-phoenix-1\", \"time_created\": \"2013-10-20T19:20:30+01:00\", \"compartment_id\": \"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\", \"defined_tags\": {'Operations': {'CostCenter': 'US'}},", "int sample: 56 period: description: - The volume backup frequency.", "policies are listed. type: str extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_display_name_option ]", "volume_backup_policies oci_blockstorage_volume_backup_policy_facts: # optional compartment_id: \"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\" \"\"\" RETURN = \"\"\"", "more information, see L(Resource Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). - \"Example: `{\\\\\"Department\\\\\": \\\\\"Finance\\\\\"}`\" returned:", "the volume backups created by this schedule. returned: on success", "\"Example: `{\\\\\"Department\\\\\": \\\\\"Finance\\\\\"}`\" returned: on success type: dict sample: {'Department':", "module_args = oci_common_utils.get_common_arg_spec() module_args.update( dict( policy_id=dict(aliases=[\"id\"], type=\"str\"), compartment_id=dict(type=\"str\"), display_name=dict(type=\"str\"), )", "and `offsetSeconds` will be ignored in requests and users should", "older versions of Apis and not sending `offsetType` in their", "will be used for both requests and responses and the", "simple key-value pair with no predefined name, type, or namespace.", "\"period\": \"ONE_HOUR\", \"offset_type\": \"STRUCTURED\", \"hour_of_day\": 56, \"day_of_week\": \"MONDAY\", \"day_of_month\": 56,", "one or multiple VolumeBackupPolicy resources in Oracle Cloud Infrastructure -", "this resource. Each key is predefined and scoped to a", "aliases: [\"id\"] compartment_id: description: - The OCID of the compartment.", "Lists all the volume backup policies available in the specified", "frequency. returned: on success type: str sample: ONE_HOUR offset_type: description:", "author: Oracle (@oracle) options: policy_id: description: - The OCID of", "\\\\\"42\\\\\"}}`\" returned: on success type: dict sample: {'Operations': {'CostCenter': 'US'}}", "specified, the details of a single VolumeBackupPolicy will be returned.", "str sample: UTC destination_region: description: - The paired destination region", "str sample: FULL offset_seconds: description: - The number of seconds", "of VolumeBackupPolicy resources returned: on success type: complex contains: display_name:", "module=module, resource_type=\"volume_backup_policy\", service_client_class=BlockstorageClient, namespace=\"core\", ) result = [] if resource_facts_helper.is_get():", "The volume backup start time is the frequency start time", "\"Example: `{\\\\\"Operations\\\\\": {\\\\\"CostCenter\\\\\": \\\\\"42\\\\\"}}`\" returned: on success type: dict sample:", "description: - The type of volume backup to create. returned:", ") VolumeBackupPolicyFactsHelperCustom = get_custom_class( \"VolumeBackupPolicyFactsHelperCustom\" ) class ResourceFactsHelper( VolumeBackupPolicyFactsHelperCustom, VolumeBackupPolicyFactsHelperGen", "affiliates. # This software is made available to you under", "or namespace. For more information, see L(Resource Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). - \"Example:", "will apply. returned: on success type: complex contains: backup_type: description:", "sample: MONDAY day_of_month: description: - The day of the month", "from the responses. - For clients using older versions of", "description: - The month of the year to schedule the", "str sample: STRUCTURED hour_of_day: description: - The hour of the", "sample: {'Department': 'Finance'} sample: [{ \"display_name\": \"display_name_example\", \"id\": \"ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx\", \"schedules\":", "\"hour_of_day\": 56, \"day_of_week\": \"MONDAY\", \"day_of_month\": 56, \"month\": \"JANUARY\", \"retention_seconds\": 56,", "not sending `offsetType` in their requests, the behaviour is just", "def main(): module_args = oci_common_utils.get_common_arg_spec() module_args.update( dict( policy_id=dict(aliases=[\"id\"], type=\"str\"), compartment_id=dict(type=\"str\"),", "interval boundaries specified by the period. The volume backup start", "backup policies are listed. type: str extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_display_name_option", "int sample: 56 day_of_week: description: - The day of the", "\"2013-10-20T19:20:30+01:00\" compartment_id: description: - The OCID of the compartment that", "except ImportError: HAS_OCI_PY_SDK = False class VolumeBackupPolicyFactsHelperGen(OCIResourceFactsHelperBase): \"\"\"Supported operations: get,", ") module = AnsibleModule(argument_spec=module_args) if not HAS_OCI_PY_SDK: module.fail_json(msg=\"oci python sdk", "how the offset is defined. If value is `STRUCTURED`, then", "volume_backup_policy. type: str aliases: [\"id\"] compartment_id: description: - The OCID", "description: - The date and time the volume backup policy", ") result = [] if resource_facts_helper.is_get(): result = [resource_facts_helper.get()] elif", "\"FULL\", \"offset_seconds\": 56, \"period\": \"ONE_HOUR\", \"offset_type\": \"STRUCTURED\", \"hour_of_day\": 56, \"day_of_week\":", "\"time_created\": \"2013-10-20T19:20:30+01:00\", \"compartment_id\": \"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\", \"defined_tags\": {'Operations': {'CostCenter': 'US'}}, \"freeform_tags\": {'Department':", "and `ONE_YEAR`.\" - \"`dayOfWeek` is applicable for period `ONE_WEEK`.\" -", "- Defined tags for this resource. Each key is predefined", "be returned. version_added: \"2.9.0\" author: Oracle (@oracle) options: policy_id: description:", "on success type: str sample: display_name_example id: description: - The", "from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import ( OCIResourceFactsHelperBase, get_custom_class, ) try: from oci.core", "offset. returned: on success type: int sample: 56 period: description:", "- \"'month' is applicable for period 'ONE_YEAR'.\" - They will", "\\\\\"Finance\\\\\"}`\" returned: on success type: dict sample: {'Department': 'Finance'} sample:", "defined. If value is `STRUCTURED`, then `hourOfDay`, `dayOfWeek`, `dayOfMonth`, and", "= oci_common_utils.get_common_arg_spec() module_args.update( dict( policy_id=dict(aliases=[\"id\"], type=\"str\"), compartment_id=dict(type=\"str\"), display_name=dict(type=\"str\"), ) )", "\"freeform_tags\": {'Department': 'Finance'} }] \"\"\" from ansible.module_utils.basic import AnsibleModule from", "`ONE_WEEK`, `ONE_MONTH` and `ONE_YEAR`.\" - \"`dayOfWeek` is applicable for period", "STRUCTURED hour_of_day: description: - The hour of the day to", "VolumeBackupPolicyFactsHelperCustom, VolumeBackupPolicyFactsHelperGen ): pass def main(): module_args = oci_common_utils.get_common_arg_spec() module_args.update(", "used and `offsetSeconds` will be ignored in requests and users", "compartment_id: description: - The OCID of the compartment that contains", "description: - The number of seconds that the volume backup", "the frequency start time plus the offset. returned: on success", "of Apis and not sending `offsetType` in their requests, the", "value is `STRUCTURED`, then `hourOfDay`, `dayOfWeek`, `dayOfMonth`, and `month` fields", "is a simple key-value pair with no predefined name, type,", "- \"`dayOfWeek` is applicable for period `ONE_WEEK`.\" - \"`dayOfMonth` is", "type: complex contains: display_name: description: - A user-friendly name. Does", "Each key is predefined and scoped to a namespace. For", "Required to get a specific volume_backup_policy. type: str aliases: [\"id\"]", "How long, in seconds, to keep the volume backups created", "on success type: int sample: 56 period: description: - The", "Indicates how the offset is defined. If value is `STRUCTURED`,", "backup start time is the frequency start time plus the", "description: - List of VolumeBackupPolicy resources returned: on success type:", "its affiliates. # This software is made available to you", "\"\"\" RETURN = \"\"\" volume_backup_policies: description: - List of VolumeBackupPolicy", "ansible.module_utils.basic import AnsibleModule from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import", "- The month of the year to schedule the volume", "\"offset_type\": \"STRUCTURED\", \"hour_of_day\": 56, \"day_of_week\": \"MONDAY\", \"day_of_month\": 56, \"month\": \"JANUARY\",", "\"ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx\", \"schedules\": [{ \"backup_type\": \"FULL\", \"offset_seconds\": 56, \"period\": \"ONE_HOUR\", \"offset_type\":", "- For more information about Oracle defined backup policies and", "\"id\": \"ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx\", \"schedules\": [{ \"backup_type\": \"FULL\", \"offset_seconds\": 56, \"period\": \"ONE_HOUR\",", "in returned: on success type: str sample: UTC destination_region: description:", "for details. # GENERATED FILE - DO NOT EDIT -", "schedule. returned: on success type: int sample: 56 time_zone: description:", "schedules that this policy will apply. returned: on success type:", "volume backup frequency. returned: on success type: str sample: ONE_HOUR", "56, \"time_zone\": \"UTC\" }], \"destination_region\": \"us-phoenix-1\", \"time_created\": \"2013-10-20T19:20:30+01:00\", \"compartment_id\": \"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\",", "the offset is defined. If value is `STRUCTURED`, then `hourOfDay`,", "backup_type: description: - The type of volume backup to create.", "} DOCUMENTATION = \"\"\" --- module: oci_blockstorage_volume_backup_policy_facts short_description: Fetches details", "- The collection of schedules that this policy will apply.", "description: - Indicates how the offset is defined. If value", "The paired destination region for copying scheduled backups to. Example", "of the volume backup policy. - Required to get a", "no predefined name, type, or namespace. For more information, see", "used for both requests and responses and the structured fields", "dict sample: {'Operations': {'CostCenter': 'US'}} freeform_tags: description: - Free-form tags", "the volume backup policy. returned: on success type: str sample:", "the behaviour is just like `NUMERIC_SECONDS`. returned: on success type:", "get_custom_class( \"VolumeBackupPolicyFactsHelperCustom\" ) class ResourceFactsHelper( VolumeBackupPolicyFactsHelperCustom, VolumeBackupPolicyFactsHelperGen ): pass def", "defined_tags: description: - Defined tags for this resource. Each key", "FILE - DO NOT EDIT - MANUAL CHANGES WILL BE", "success type: str sample: \"2013-10-20T19:20:30+01:00\" compartment_id: description: - The OCID", "type: str sample: ONE_HOUR offset_type: description: - Indicates how the", "of schedules that this policy will apply. returned: on success", "success type: int sample: 56 time_zone: description: - Specifies what", "{ \"metadata_version\": \"1.1\", \"status\": [\"preview\"], \"supported_by\": \"community\", } DOCUMENTATION =", "policy. - Required to get a specific volume_backup_policy. type: str", "zone is the schedule in returned: on success type: str", "Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). - \"Example: `{\\\\\"Department\\\\\": \\\\\"Finance\\\\\"}`\" returned: on success type: dict", "returned: on success type: dict sample: {'Department': 'Finance'} sample: [{", "get_required_params_for_get(self): return [ \"policy_id\", ] def get_required_params_for_list(self): return [] def", "optional compartment_id: \"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\" \"\"\" RETURN = \"\"\" volume_backup_policies: description: -", ") def list_resources(self): optional_list_method_params = [ \"compartment_id\", \"display_name\", ] optional_kwargs", "single VolumeBackupPolicy will be returned. version_added: \"2.9.0\" author: Oracle (@oracle)", "is `STRUCTURED`, then `hourOfDay`, `dayOfWeek`, `dayOfMonth`, and `month` fields are", "optional_list_method_params if self.module.params.get(param) is not None ) return oci_common_utils.list_all_resources( self.client.list_volume_backup_policies,", "Does not have to be unique, and it's changeable. Avoid", "sample: \"ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx\" schedules: description: - The collection of schedules that", "'US'}}, \"freeform_tags\": {'Department': 'Finance'} }] \"\"\" from ansible.module_utils.basic import AnsibleModule", "- name: Get a specific volume_backup_policy oci_blockstorage_volume_backup_policy_facts: # required policy_id:", "volume backup start time should be shifted from the default", "be ignored in the requests and users should ignore their", "backups created by this schedule. returned: on success type: int", "description: - Defined tags for this resource. Each key is", "tags for this resource. Each tag is a simple key-value", "required policy_id: \"ocid1.policy.oc1..xxxxxxEXAMPLExxxxxx\" - name: List volume_backup_policies oci_blockstorage_volume_backup_policy_facts: # optional", "If no compartment is specified, the Oracle defined backup policies", "Free-form tags for this resource. Each tag is a simple", "`ONE_YEAR`.\" - \"'month' is applicable for period 'ONE_YEAR'.\" - They", "sample: us-phoenix-1 time_created: description: - The date and time the", "This software is made available to you under the terms", "volume backup policies available in the specified compartment. - For", "description: - The hour of the day to schedule the", "COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # Apache License v2.0 # See LICENSE.TXT", "time_created: description: - The date and time the volume backup", "users should ignore their values from the responses. - For", "confidential information. returned: on success type: str sample: display_name_example id:", "sample: FULL offset_seconds: description: - The number of seconds that", "by L(RFC3339,https://tools.ietf.org/html/rfc3339). returned: on success type: str sample: \"2013-10-20T19:20:30+01:00\" compartment_id:", "AnsibleModule from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import ( OCIResourceFactsHelperBase,", "description: - The OCID of the volume backup policy. -", "# See LICENSE.TXT for details. # GENERATED FILE - DO", "ANSIBLE_METADATA = { \"metadata_version\": \"1.1\", \"status\": [\"preview\"], \"supported_by\": \"community\", }", "- Fetches details about one or multiple VolumeBackupPolicy resources in", "type: str sample: display_name_example id: description: - The OCID of", "oci.core import BlockstorageClient HAS_OCI_PY_SDK = True except ImportError: HAS_OCI_PY_SDK =", "= ResourceFactsHelper( module=module, resource_type=\"volume_backup_policy\", service_client_class=BlockstorageClient, namespace=\"core\", ) result = []", "- List of VolumeBackupPolicy resources returned: on success type: complex", "sample: JANUARY retention_seconds: description: - How long, in seconds, to", "VolumeBackupPolicyFactsHelperGen(OCIResourceFactsHelperBase): \"\"\"Supported operations: get, list\"\"\" def get_required_params_for_get(self): return [ \"policy_id\",", "sending `offsetType` in their requests, the behaviour is just like", "type: str sample: MONDAY day_of_month: description: - The day of", "import BlockstorageClient HAS_OCI_PY_SDK = True except ImportError: HAS_OCI_PY_SDK = False", "key-value pair with no predefined name, type, or namespace. For", "The day of the week to schedule the volume backup.", "retention_seconds: description: - How long, in seconds, to keep the", "description: - Fetches details about one or multiple VolumeBackupPolicy resources", "OCID of the compartment. If no compartment is specified, the", "is made available to you under the terms of the", "collection of schedules that this policy will apply. returned: on", "requests and responses and the structured fields will be ignored", "L(Policy-Based Backups,https://docs.cloud.oracle.com/iaas/Content/Block/Tasks/schedulingvolumebackups.htm). - If I(policy_id) is specified, the details of", "oci_common_utils.get_common_arg_spec() module_args.update( dict( policy_id=dict(aliases=[\"id\"], type=\"str\"), compartment_id=dict(type=\"str\"), display_name=dict(type=\"str\"), ) ) module", "returned: on success type: str sample: UTC destination_region: description: -", "should be shifted from the default interval boundaries specified by", "policy was created. Format defined by L(RFC3339,https://tools.ietf.org/html/rfc3339). returned: on success", "compartment_id: description: - The OCID of the compartment. If no", "you under the terms of the GPL 3.0 license or", "- The OCID of the volume backup policy. - Required", "\"STRUCTURED\", \"hour_of_day\": 56, \"day_of_week\": \"MONDAY\", \"day_of_month\": 56, \"month\": \"JANUARY\", \"retention_seconds\":", "56, \"month\": \"JANUARY\", \"retention_seconds\": 56, \"time_zone\": \"UTC\" }], \"destination_region\": \"us-phoenix-1\",", "list\"\"\" def get_required_params_for_get(self): return [ \"policy_id\", ] def get_required_params_for_list(self): return", "main(): module_args = oci_common_utils.get_common_arg_spec() module_args.update( dict( policy_id=dict(aliases=[\"id\"], type=\"str\"), compartment_id=dict(type=\"str\"), display_name=dict(type=\"str\"),", "type of volume backup to create. returned: on success type:", "the day to schedule the volume backup. returned: on success", "( OCIResourceFactsHelperBase, get_custom_class, ) try: from oci.core import BlockstorageClient HAS_OCI_PY_SDK", "returned: on success type: str sample: us-phoenix-1 time_created: description: -", "backup policy. - Required to get a specific volume_backup_policy. type:", "schedule the volume backup. returned: on success type: str sample:", "ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import ( OCIResourceFactsHelperBase, get_custom_class, )", "compartment. - For more information about Oracle defined backup policies", "about one or multiple VolumeBackupPolicy resources in Oracle Cloud Infrastructure", "resources in Oracle Cloud Infrastructure - Lists all the volume", "VolumeBackupPolicy will be returned. version_added: \"2.9.0\" author: Oracle (@oracle) options:", "the Apache 2.0 license. # GNU General Public License v3.0+", "- Lists all the volume backup policies available in the", "str sample: MONDAY day_of_month: description: - The day of the", "= False class VolumeBackupPolicyFactsHelperGen(OCIResourceFactsHelperBase): \"\"\"Supported operations: get, list\"\"\" def get_required_params_for_get(self):", "- MANUAL CHANGES WILL BE OVERWRITTEN from __future__ import absolute_import,", "resource_facts_helper.is_get(): result = [resource_facts_helper.get()] elif resource_facts_helper.is_list(): result = resource_facts_helper.list() else:", "should ignore its value from the responses. - \"`hourOfDay` is", "result = [] if resource_facts_helper.is_get(): result = [resource_facts_helper.get()] elif resource_facts_helper.is_list():", "\"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\", \"defined_tags\": {'Operations': {'CostCenter': 'US'}}, \"freeform_tags\": {'Department': 'Finance'} }] \"\"\"", "description: - The OCID of the volume backup policy. returned:", "in the requests for inapplicable periods. - If value is", "service_client_class=BlockstorageClient, namespace=\"core\", ) result = [] if resource_facts_helper.is_get(): result =", "copying scheduled backups to. Example `us-ashburn-1`. See L(Region Pairs,https://docs.cloud.oracle.com/iaas/Content/Block/Tasks/schedulingvolumebackups.htm#RegionPairs) for", "- The volume backup frequency. returned: on success type: str", "See L(Region Pairs,https://docs.cloud.oracle.com/iaas/Content/Block/Tasks/schedulingvolumebackups.htm#RegionPairs) for details about paired regions. returned: on", "`us-ashburn-1`. See L(Region Pairs,https://docs.cloud.oracle.com/iaas/Content/Block/Tasks/schedulingvolumebackups.htm#RegionPairs) for details about paired regions. returned:", "The OCID of the volume backup policy. - Required to", "resource_type=\"volume_backup_policy\", service_client_class=BlockstorageClient, namespace=\"core\", ) result = [] if resource_facts_helper.is_get(): result", "responses. - For clients using older versions of Apis and", "are listed. type: str extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_display_name_option ] \"\"\"", "namespace=\"core\", ) result = [] if resource_facts_helper.is_get(): result = [resource_facts_helper.get()]", "policy will apply. returned: on success type: complex contains: backup_type:", "\"'month' is applicable for period 'ONE_YEAR'.\" - They will be", "MONDAY day_of_month: description: - The day of the month to", "frequency start time plus the offset. returned: on success type:", "multiple VolumeBackupPolicy resources in Oracle Cloud Infrastructure - Lists all", "returned: on success type: str sample: \"ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx\" schedules: description: -", "success type: int sample: 56 period: description: - The volume", "on success type: str sample: UTC destination_region: description: - The", "for copying scheduled backups to. Example `us-ashburn-1`. See L(Region Pairs,https://docs.cloud.oracle.com/iaas/Content/Block/Tasks/schedulingvolumebackups.htm#RegionPairs)", "hour of the day to schedule the volume backup. returned:", "destination_region: description: - The paired destination region for copying scheduled", "The date and time the volume backup policy was created.", "OVERWRITTEN from __future__ import absolute_import, division, print_function __metaclass__ = type", "[{ \"display_name\": \"display_name_example\", \"id\": \"ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx\", \"schedules\": [{ \"backup_type\": \"FULL\", \"offset_seconds\":", "success type: dict sample: {'Department': 'Finance'} sample: [{ \"display_name\": \"display_name_example\",", "day of the week to schedule the volume backup. returned:", "for this resource. Each tag is a simple key-value pair", "for param in optional_list_method_params if self.module.params.get(param) is not None )", "import AnsibleModule from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (", "def get_resource(self): return oci_common_utils.call_with_backoff( self.client.get_volume_backup_policy, policy_id=self.module.params.get(\"policy_id\"), ) def list_resources(self): optional_list_method_params", "= \"\"\" --- module: oci_blockstorage_volume_backup_policy_facts short_description: Fetches details about one", "volume backup. returned: on success type: str sample: \"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\" defined_tags:", "to get a specific volume_backup_policy. type: str aliases: [\"id\"] compartment_id:", "\"compartment_id\": \"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\", \"defined_tags\": {'Operations': {'CostCenter': 'US'}}, \"freeform_tags\": {'Department': 'Finance'} }]", "- The paired destination region for copying scheduled backups to.", "VolumeBackupPolicyFactsHelperGen ): pass def main(): module_args = oci_common_utils.get_common_arg_spec() module_args.update( dict(", "options: policy_id: description: - The OCID of the volume backup", "description: - The day of the month to schedule the", ") class ResourceFactsHelper( VolumeBackupPolicyFactsHelperCustom, VolumeBackupPolicyFactsHelperGen ): pass def main(): module_args", "required for this module.\") resource_facts_helper = ResourceFactsHelper( module=module, resource_type=\"volume_backup_policy\", service_client_class=BlockstorageClient,", "Copyright (c) 2020, 2022 Oracle and/or its affiliates. # This", "General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # Apache", "volume backup policy. - Required to get a specific volume_backup_policy.", "to be unique, and it's changeable. Avoid entering confidential information.", "\"\"\"Supported operations: get, list\"\"\" def get_required_params_for_get(self): return [ \"policy_id\", ]", "self.module.params.get(param) is not None ) return oci_common_utils.list_all_resources( self.client.list_volume_backup_policies, **optional_kwargs )", "details about paired regions. returned: on success type: str sample:", "\"`dayOfMonth` is applicable for periods `ONE_MONTH` and `ONE_YEAR`.\" - \"'month'", "The day of the month to schedule the volume backup.", "ONE_HOUR offset_type: description: - Indicates how the offset is defined.", "sample: ONE_HOUR offset_type: description: - Indicates how the offset is", "offset is defined. If value is `STRUCTURED`, then `hourOfDay`, `dayOfWeek`,", "in their requests, the behaviour is just like `NUMERIC_SECONDS`. returned:", "sample: \"2013-10-20T19:20:30+01:00\" compartment_id: description: - The OCID of the compartment", "details about one or multiple VolumeBackupPolicy resources in Oracle Cloud", "license or the Apache 2.0 license. # GNU General Public", "listed. type: str extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_display_name_option ] \"\"\" EXAMPLES", "plus the offset. returned: on success type: int sample: 56", "AnsibleModule(argument_spec=module_args) if not HAS_OCI_PY_SDK: module.fail_json(msg=\"oci python sdk required for this", "description: - How long, in seconds, to keep the volume", "start time should be shifted from the default interval boundaries", "Infrastructure description: - Fetches details about one or multiple VolumeBackupPolicy", "for inapplicable periods. - If value is `NUMERIC_SECONDS`, then `offsetSeconds`", "backup. returned: on success type: str sample: JANUARY retention_seconds: description:", "is not None ) return oci_common_utils.list_all_resources( self.client.list_volume_backup_policies, **optional_kwargs ) VolumeBackupPolicyFactsHelperCustom", "ResourceFactsHelper( VolumeBackupPolicyFactsHelperCustom, VolumeBackupPolicyFactsHelperGen ): pass def main(): module_args = oci_common_utils.get_common_arg_spec()", "defined backup policies are listed. type: str extends_documentation_fragment: [ oracle.oci.oracle,", "policy. returned: on success type: str sample: \"ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx\" schedules: description:", "sample: 56 period: description: - The volume backup frequency. returned:", "freeform_tags: description: - Free-form tags for this resource. Each tag", "`month` fields are used and `offsetSeconds` will be ignored in", "start time plus the offset. returned: on success type: int", "L(Region Pairs,https://docs.cloud.oracle.com/iaas/Content/Block/Tasks/schedulingvolumebackups.htm#RegionPairs) for details about paired regions. returned: on success", "\"\"\" --- module: oci_blockstorage_volume_backup_policy_facts short_description: Fetches details about one or", "scheduled backups to. Example `us-ashburn-1`. See L(Region Pairs,https://docs.cloud.oracle.com/iaas/Content/Block/Tasks/schedulingvolumebackups.htm#RegionPairs) for details", "- For clients using older versions of Apis and not", "OCIResourceFactsHelperBase, get_custom_class, ) try: from oci.core import BlockstorageClient HAS_OCI_PY_SDK =", "and responses and the structured fields will be ignored in", "License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # Apache License v2.0", "predefined name, type, or namespace. For more information, see L(Resource", "by the period. The volume backup start time is the", "[{ \"backup_type\": \"FULL\", \"offset_seconds\": 56, \"period\": \"ONE_HOUR\", \"offset_type\": \"STRUCTURED\", \"hour_of_day\":", "of the week to schedule the volume backup. returned: on", "returned: on success type: str sample: JANUARY retention_seconds: description: -", "the responses. - \"`hourOfDay` is applicable for periods `ONE_DAY`, `ONE_WEEK`,", "user defined backup policies, see L(Policy-Based Backups,https://docs.cloud.oracle.com/iaas/Content/Block/Tasks/schedulingvolumebackups.htm). - If I(policy_id)", "to schedule the volume backup. returned: on success type: int", "dict sample: {'Department': 'Finance'} sample: [{ \"display_name\": \"display_name_example\", \"id\": \"ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx\",", "for period `ONE_WEEK`.\" - \"`dayOfMonth` is applicable for periods `ONE_MONTH`", "be used for both requests and responses and the structured", "in seconds, to keep the volume backups created by this", "L(Resource Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). - \"Example: `{\\\\\"Operations\\\\\": {\\\\\"CostCenter\\\\\": \\\\\"42\\\\\"}}`\" returned: on success", "dict( policy_id=dict(aliases=[\"id\"], type=\"str\"), compartment_id=dict(type=\"str\"), display_name=dict(type=\"str\"), ) ) module = AnsibleModule(argument_spec=module_args)", "be ignored in requests and users should ignore its value", "the terms of the GPL 3.0 license or the Apache", "- Specifies what time zone is the schedule in returned:", "JANUARY retention_seconds: description: - How long, in seconds, to keep", "{\\\\\"CostCenter\\\\\": \\\\\"42\\\\\"}}`\" returned: on success type: dict sample: {'Operations': {'CostCenter':", "made available to you under the terms of the GPL", "value from the responses. - \"`hourOfDay` is applicable for periods", "= resource_facts_helper.list() else: resource_facts_helper.fail() module.exit_json(volume_backup_policies=result) if __name__ == \"__main__\": main()", "name: Get a specific volume_backup_policy oci_blockstorage_volume_backup_policy_facts: # required policy_id: \"ocid1.policy.oc1..xxxxxxEXAMPLExxxxxx\"", "sample: UTC destination_region: description: - The paired destination region for", "was created. Format defined by L(RFC3339,https://tools.ietf.org/html/rfc3339). returned: on success type:", "resource_facts_helper.is_list(): result = resource_facts_helper.list() else: resource_facts_helper.fail() module.exit_json(volume_backup_policies=result) if __name__ ==", "seconds, to keep the volume backups created by this schedule.", "clients using older versions of Apis and not sending `offsetType`", "defined backup policies and user defined backup policies, see L(Policy-Based", "compartment. If no compartment is specified, the Oracle defined backup", "- If value is `NUMERIC_SECONDS`, then `offsetSeconds` will be used", "`dayOfWeek`, `dayOfMonth`, and `month` fields are used and `offsetSeconds` will", "on success type: str sample: JANUARY retention_seconds: description: - How", "display_name_example id: description: - The OCID of the volume backup", "module_args.update( dict( policy_id=dict(aliases=[\"id\"], type=\"str\"), compartment_id=dict(type=\"str\"), display_name=dict(type=\"str\"), ) ) module =", "- They will be ignored in the requests for inapplicable", "about paired regions. returned: on success type: str sample: us-phoenix-1", "Backups,https://docs.cloud.oracle.com/iaas/Content/Block/Tasks/schedulingvolumebackups.htm). - If I(policy_id) is specified, the details of a", "no compartment is specified, the Oracle defined backup policies are", "description: - A user-friendly name. Does not have to be", "applicable for period 'ONE_YEAR'.\" - They will be ignored in", "success type: str sample: MONDAY day_of_month: description: - The day", "L(Resource Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). - \"Example: `{\\\\\"Department\\\\\": \\\\\"Finance\\\\\"}`\" returned: on success type:", "\"\"\" from ansible.module_utils.basic import AnsibleModule from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils from", "and not sending `offsetType` in their requests, the behaviour is", "is applicable for period `ONE_WEEK`.\" - \"`dayOfMonth` is applicable for", "returned: on success type: str sample: MONDAY day_of_month: description: -", "Oracle defined backup policies are listed. type: str extends_documentation_fragment: [", "for period 'ONE_YEAR'.\" - They will be ignored in the", "a namespace. For more information, see L(Resource Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). - \"Example:", "Fetches details about one or multiple VolumeBackupPolicy resources in Oracle", "\"VolumeBackupPolicyFactsHelperCustom\" ) class ResourceFactsHelper( VolumeBackupPolicyFactsHelperCustom, VolumeBackupPolicyFactsHelperGen ): pass def main():", "I(policy_id) is specified, the details of a single VolumeBackupPolicy will", "- \"`hourOfDay` is applicable for periods `ONE_DAY`, `ONE_WEEK`, `ONE_MONTH` and", "The OCID of the compartment that contains the volume backup.", "self.module.params[param]) for param in optional_list_method_params if self.module.params.get(param) is not None", "return oci_common_utils.list_all_resources( self.client.list_volume_backup_policies, **optional_kwargs ) VolumeBackupPolicyFactsHelperCustom = get_custom_class( \"VolumeBackupPolicyFactsHelperCustom\" )", "month to schedule the volume backup. returned: on success type:", "`{\\\\\"Department\\\\\": \\\\\"Finance\\\\\"}`\" returned: on success type: dict sample: {'Department': 'Finance'}", "the compartment. If no compartment is specified, the Oracle defined", "backup policies, see L(Policy-Based Backups,https://docs.cloud.oracle.com/iaas/Content/Block/Tasks/schedulingvolumebackups.htm). - If I(policy_id) is specified,", "str sample: JANUARY retention_seconds: description: - How long, in seconds,", "str sample: us-phoenix-1 time_created: description: - The date and time", "and it's changeable. Avoid entering confidential information. returned: on success", "# optional compartment_id: \"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\" \"\"\" RETURN = \"\"\" volume_backup_policies: description:", "it's changeable. Avoid entering confidential information. returned: on success type:", "description: - The OCID of the compartment. If no compartment", "specified, the Oracle defined backup policies are listed. type: str", "`{\\\\\"Operations\\\\\": {\\\\\"CostCenter\\\\\": \\\\\"42\\\\\"}}`\" returned: on success type: dict sample: {'Operations':", "default interval boundaries specified by the period. The volume backup", "to you under the terms of the GPL 3.0 license", "the Oracle defined backup policies are listed. type: str extends_documentation_fragment:", "The month of the year to schedule the volume backup.", "to schedule the volume backup. returned: on success type: str", "is applicable for periods `ONE_DAY`, `ONE_WEEK`, `ONE_MONTH` and `ONE_YEAR`.\" -", "\"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\" defined_tags: description: - Defined tags for this resource. Each", "module.\") resource_facts_helper = ResourceFactsHelper( module=module, resource_type=\"volume_backup_policy\", service_client_class=BlockstorageClient, namespace=\"core\", ) result", "Pairs,https://docs.cloud.oracle.com/iaas/Content/Block/Tasks/schedulingvolumebackups.htm#RegionPairs) for details about paired regions. returned: on success type:", "- A user-friendly name. Does not have to be unique,", "success type: int sample: 56 month: description: - The month", "time_zone: description: - Specifies what time zone is the schedule", "OCID of the volume backup policy. returned: on success type:", "A user-friendly name. Does not have to be unique, and", "compartment is specified, the Oracle defined backup policies are listed.", "on success type: str sample: STRUCTURED hour_of_day: description: - The", "day_of_month: description: - The day of the month to schedule", "month: description: - The month of the year to schedule", "__future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA =", "the details of a single VolumeBackupPolicy will be returned. version_added:", "fields will be ignored in the requests and users should", "success type: str sample: display_name_example id: description: - The OCID", "description: - The OCID of the compartment that contains the", "backup. returned: on success type: str sample: \"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\" defined_tags: description:", "predefined and scoped to a namespace. For more information, see", "VolumeBackupPolicy resources returned: on success type: complex contains: display_name: description:", "- \"Example: `{\\\\\"Operations\\\\\": {\\\\\"CostCenter\\\\\": \\\\\"42\\\\\"}}`\" returned: on success type: dict", "in the requests and users should ignore their values from", "- The OCID of the volume backup policy. returned: on", "\"offset_seconds\": 56, \"period\": \"ONE_HOUR\", \"offset_type\": \"STRUCTURED\", \"hour_of_day\": 56, \"day_of_week\": \"MONDAY\",", "the year to schedule the volume backup. returned: on success", "type: dict sample: {'Department': 'Finance'} sample: [{ \"display_name\": \"display_name_example\", \"id\":", "- The number of seconds that the volume backup start", "get_resource(self): return oci_common_utils.call_with_backoff( self.client.get_volume_backup_policy, policy_id=self.module.params.get(\"policy_id\"), ) def list_resources(self): optional_list_method_params =", "self.client.list_volume_backup_policies, **optional_kwargs ) VolumeBackupPolicyFactsHelperCustom = get_custom_class( \"VolumeBackupPolicyFactsHelperCustom\" ) class ResourceFactsHelper(", "}] \"\"\" from ansible.module_utils.basic import AnsibleModule from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils", "namespace. For more information, see L(Resource Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). - \"Example: `{\\\\\"Operations\\\\\":", "if self.module.params.get(param) is not None ) return oci_common_utils.list_all_resources( self.client.list_volume_backup_policies, **optional_kwargs", "pair with no predefined name, type, or namespace. For more", "for this module.\") resource_facts_helper = ResourceFactsHelper( module=module, resource_type=\"volume_backup_policy\", service_client_class=BlockstorageClient, namespace=\"core\",", "and user defined backup policies, see L(Policy-Based Backups,https://docs.cloud.oracle.com/iaas/Content/Block/Tasks/schedulingvolumebackups.htm). - If", "`offsetSeconds` will be ignored in requests and users should ignore", "If value is `NUMERIC_SECONDS`, then `offsetSeconds` will be used for", "- The OCID of the compartment. If no compartment is", "): pass def main(): module_args = oci_common_utils.get_common_arg_spec() module_args.update( dict( policy_id=dict(aliases=[\"id\"],", "volume backup. returned: on success type: int sample: 56 month:", "the schedule in returned: on success type: str sample: UTC", "If I(policy_id) is specified, the details of a single VolumeBackupPolicy", "both requests and responses and the structured fields will be", "returned: on success type: str sample: STRUCTURED hour_of_day: description: -", "that the volume backup start time should be shifted from", "str aliases: [\"id\"] compartment_id: description: - The OCID of the", "`ONE_WEEK`.\" - \"`dayOfMonth` is applicable for periods `ONE_MONTH` and `ONE_YEAR`.\"", "time zone is the schedule in returned: on success type:", "that this policy will apply. returned: on success type: complex", "backup policies available in the specified compartment. - For more", "on success type: int sample: 56 month: description: - The", "3.0 license or the Apache 2.0 license. # GNU General", "division, print_function __metaclass__ = type ANSIBLE_METADATA = { \"metadata_version\": \"1.1\",", "their requests, the behaviour is just like `NUMERIC_SECONDS`. returned: on", "apply. returned: on success type: complex contains: backup_type: description: -", "create. returned: on success type: str sample: FULL offset_seconds: description:", "(see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # Apache License v2.0 # See", "`ONE_MONTH` and `ONE_YEAR`.\" - \"'month' is applicable for period 'ONE_YEAR'.\"", "sample: 56 day_of_week: description: - The day of the week", "and scoped to a namespace. For more information, see L(Resource", "resources returned: on success type: complex contains: display_name: description: -", "get_custom_class, ) try: from oci.core import BlockstorageClient HAS_OCI_PY_SDK = True", "\"UTC\" }], \"destination_region\": \"us-phoenix-1\", \"time_created\": \"2013-10-20T19:20:30+01:00\", \"compartment_id\": \"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\", \"defined_tags\": {'Operations':", "sample: 56 month: description: - The month of the year", "- The hour of the day to schedule the volume", "= get_custom_class( \"VolumeBackupPolicyFactsHelperCustom\" ) class ResourceFactsHelper( VolumeBackupPolicyFactsHelperCustom, VolumeBackupPolicyFactsHelperGen ): pass", "= dict( (param, self.module.params[param]) for param in optional_list_method_params if self.module.params.get(param)", "of the compartment. If no compartment is specified, the Oracle", "oci_common_utils from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import ( OCIResourceFactsHelperBase, get_custom_class, ) try: from", "def get_required_params_for_get(self): return [ \"policy_id\", ] def get_required_params_for_list(self): return []", "Defined tags for this resource. Each key is predefined and", "get, list\"\"\" def get_required_params_for_get(self): return [ \"policy_id\", ] def get_required_params_for_list(self):", "for this resource. Each key is predefined and scoped to", "success type: dict sample: {'Operations': {'CostCenter': 'US'}} freeform_tags: description: -", "compartment_id: \"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\" \"\"\" RETURN = \"\"\" volume_backup_policies: description: - List", "HAS_OCI_PY_SDK = False class VolumeBackupPolicyFactsHelperGen(OCIResourceFactsHelperBase): \"\"\"Supported operations: get, list\"\"\" def", "contains: backup_type: description: - The type of volume backup to", "shifted from the default interval boundaries specified by the period.", "= \"\"\" volume_backup_policies: description: - List of VolumeBackupPolicy resources returned:", "oci_common_utils.list_all_resources( self.client.list_volume_backup_policies, **optional_kwargs ) VolumeBackupPolicyFactsHelperCustom = get_custom_class( \"VolumeBackupPolicyFactsHelperCustom\" ) class", "the GPL 3.0 license or the Apache 2.0 license. #", "`dayOfMonth`, and `month` fields are used and `offsetSeconds` will be", "= type ANSIBLE_METADATA = { \"metadata_version\": \"1.1\", \"status\": [\"preview\"], \"supported_by\":", "more information about Oracle defined backup policies and user defined", "compartment that contains the volume backup. returned: on success type:", ") return oci_common_utils.list_all_resources( self.client.list_volume_backup_policies, **optional_kwargs ) VolumeBackupPolicyFactsHelperCustom = get_custom_class( \"VolumeBackupPolicyFactsHelperCustom\"", "should ignore their values from the responses. - For clients", "{'Operations': {'CostCenter': 'US'}}, \"freeform_tags\": {'Department': 'Finance'} }] \"\"\" from ansible.module_utils.basic", "def list_resources(self): optional_list_method_params = [ \"compartment_id\", \"display_name\", ] optional_kwargs =", "Oracle (@oracle) options: policy_id: description: - The OCID of the", "be ignored in the requests for inapplicable periods. - If", "https://www.gnu.org/licenses/gpl-3.0.txt) # Apache License v2.0 # See LICENSE.TXT for details.", "{'CostCenter': 'US'}}, \"freeform_tags\": {'Department': 'Finance'} }] \"\"\" from ansible.module_utils.basic import", "from ansible.module_utils.basic import AnsibleModule from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils", "\"ocid1.policy.oc1..xxxxxxEXAMPLExxxxxx\" - name: List volume_backup_policies oci_blockstorage_volume_backup_policy_facts: # optional compartment_id: \"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\"", "BE OVERWRITTEN from __future__ import absolute_import, division, print_function __metaclass__ =", "oci_blockstorage_volume_backup_policy_facts short_description: Fetches details about one or multiple VolumeBackupPolicy resources", "ImportError: HAS_OCI_PY_SDK = False class VolumeBackupPolicyFactsHelperGen(OCIResourceFactsHelperBase): \"\"\"Supported operations: get, list\"\"\"", "defined backup policies, see L(Policy-Based Backups,https://docs.cloud.oracle.com/iaas/Content/Block/Tasks/schedulingvolumebackups.htm). - If I(policy_id) is", "the responses. - For clients using older versions of Apis", "- The day of the month to schedule the volume", "returned: on success type: str sample: FULL offset_seconds: description: -", "type=\"str\"), compartment_id=dict(type=\"str\"), display_name=dict(type=\"str\"), ) ) module = AnsibleModule(argument_spec=module_args) if not", "RETURN = \"\"\" volume_backup_policies: description: - List of VolumeBackupPolicy resources", "returned: on success type: complex contains: backup_type: description: - The", "The OCID of the compartment. If no compartment is specified,", "hour_of_day: description: - The hour of the day to schedule", "of the compartment that contains the volume backup. returned: on", "Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # Apache License", "{'CostCenter': 'US'}} freeform_tags: description: - Free-form tags for this resource.", "success type: str sample: us-phoenix-1 time_created: description: - The date", "type: dict sample: {'Operations': {'CostCenter': 'US'}} freeform_tags: description: - Free-form", "in optional_list_method_params if self.module.params.get(param) is not None ) return oci_common_utils.list_all_resources(", "- The date and time the volume backup policy was", "the week to schedule the volume backup. returned: on success", "HAS_OCI_PY_SDK = True except ImportError: HAS_OCI_PY_SDK = False class VolumeBackupPolicyFactsHelperGen(OCIResourceFactsHelperBase):", "result = resource_facts_helper.list() else: resource_facts_helper.fail() module.exit_json(volume_backup_policies=result) if __name__ == \"__main__\":", "tags for this resource. Each key is predefined and scoped", "Oracle Cloud Infrastructure description: - Fetches details about one or", "\"day_of_week\": \"MONDAY\", \"day_of_month\": 56, \"month\": \"JANUARY\", \"retention_seconds\": 56, \"time_zone\": \"UTC\"", "on success type: int sample: 56 day_of_week: description: - The", "contains: display_name: description: - A user-friendly name. Does not have", "on success type: str sample: \"ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx\" schedules: description: - The", "Format defined by L(RFC3339,https://tools.ietf.org/html/rfc3339). returned: on success type: str sample:", "description: - The volume backup frequency. returned: on success type:", "volume backup start time is the frequency start time plus", "type: str extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_display_name_option ] \"\"\" EXAMPLES =", "success type: str sample: FULL offset_seconds: description: - The number", "see L(Resource Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). - \"Example: `{\\\\\"Operations\\\\\": {\\\\\"CostCenter\\\\\": \\\\\"42\\\\\"}}`\" returned: on", "get_required_params_for_list(self): return [] def get_resource(self): return oci_common_utils.call_with_backoff( self.client.get_volume_backup_policy, policy_id=self.module.params.get(\"policy_id\"), )", "type ANSIBLE_METADATA = { \"metadata_version\": \"1.1\", \"status\": [\"preview\"], \"supported_by\": \"community\",", "returned: on success type: str sample: \"2013-10-20T19:20:30+01:00\" compartment_id: description: -", "Oracle defined backup policies and user defined backup policies, see", "ignored in the requests and users should ignore their values", "seconds that the volume backup start time should be shifted", "their values from the responses. - For clients using older", "on success type: str sample: \"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\" defined_tags: description: - Defined", "is applicable for period 'ONE_YEAR'.\" - They will be ignored", "versions of Apis and not sending `offsetType` in their requests,", "GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL", "EDIT - MANUAL CHANGES WILL BE OVERWRITTEN from __future__ import", "int sample: 56 month: description: - The month of the", "applicable for periods `ONE_MONTH` and `ONE_YEAR`.\" - \"'month' is applicable", "be shifted from the default interval boundaries specified by the", "that contains the volume backup. returned: on success type: str", "day of the month to schedule the volume backup. returned:", "then `hourOfDay`, `dayOfWeek`, `dayOfMonth`, and `month` fields are used and", "for periods `ONE_DAY`, `ONE_WEEK`, `ONE_MONTH` and `ONE_YEAR`.\" - \"`dayOfWeek` is", "OCID of the volume backup policy. - Required to get", "a single VolumeBackupPolicy will be returned. version_added: \"2.9.0\" author: Oracle", "or multiple VolumeBackupPolicy resources in Oracle Cloud Infrastructure description: -", "GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) #", "str sample: display_name_example id: description: - The OCID of the", "{'Department': 'Finance'} sample: [{ \"display_name\": \"display_name_example\", \"id\": \"ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx\", \"schedules\": [{", "volume backup policy was created. Format defined by L(RFC3339,https://tools.ietf.org/html/rfc3339). returned:", "2.0 license. # GNU General Public License v3.0+ (see COPYING", "License v2.0 # See LICENSE.TXT for details. # GENERATED FILE", "not have to be unique, and it's changeable. Avoid entering", "- The OCID of the compartment that contains the volume", "volume backup. returned: on success type: str sample: JANUARY retention_seconds:", "the offset. returned: on success type: int sample: 56 period:", "\"MONDAY\", \"day_of_month\": 56, \"month\": \"JANUARY\", \"retention_seconds\": 56, \"time_zone\": \"UTC\" }],", "The number of seconds that the volume backup start time", "is predefined and scoped to a namespace. For more information,", "behaviour is just like `NUMERIC_SECONDS`. returned: on success type: str", "\"backup_type\": \"FULL\", \"offset_seconds\": 56, \"period\": \"ONE_HOUR\", \"offset_type\": \"STRUCTURED\", \"hour_of_day\": 56,", "type: int sample: 56 period: description: - The volume backup", "specific volume_backup_policy oci_blockstorage_volume_backup_policy_facts: # required policy_id: \"ocid1.policy.oc1..xxxxxxEXAMPLExxxxxx\" - name: List", "description: - The day of the week to schedule the", "in the specified compartment. - For more information about Oracle", "available to you under the terms of the GPL 3.0", "periods. - If value is `NUMERIC_SECONDS`, then `offsetSeconds` will be", "str sample: \"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\" defined_tags: description: - Defined tags for this", "sample: 56 time_zone: description: - Specifies what time zone is", "'Finance'} }] \"\"\" from ansible.module_utils.basic import AnsibleModule from ansible_collections.oracle.oci.plugins.module_utils import", "DOCUMENTATION = \"\"\" --- module: oci_blockstorage_volume_backup_policy_facts short_description: Fetches details about", "the volume backup policy was created. Format defined by L(RFC3339,https://tools.ietf.org/html/rfc3339).", "the volume backup. returned: on success type: str sample: JANUARY", "For more information, see L(Resource Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). - \"Example: `{\\\\\"Operations\\\\\": {\\\\\"CostCenter\\\\\":", "Oracle Cloud Infrastructure - Lists all the volume backup policies", "- If I(policy_id) is specified, the details of a single", "period `ONE_WEEK`.\" - \"`dayOfMonth` is applicable for periods `ONE_MONTH` and", "**optional_kwargs ) VolumeBackupPolicyFactsHelperCustom = get_custom_class( \"VolumeBackupPolicyFactsHelperCustom\" ) class ResourceFactsHelper( VolumeBackupPolicyFactsHelperCustom,", "from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA", "this schedule. returned: on success type: int sample: 56 time_zone:", "- \"Example: `{\\\\\"Department\\\\\": \\\\\"Finance\\\\\"}`\" returned: on success type: dict sample:", "\"2013-10-20T19:20:30+01:00\", \"compartment_id\": \"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\", \"defined_tags\": {'Operations': {'CostCenter': 'US'}}, \"freeform_tags\": {'Department': 'Finance'}", "success type: str sample: STRUCTURED hour_of_day: description: - The hour", "success type: complex contains: backup_type: description: - The type of", "compartment_id=dict(type=\"str\"), display_name=dict(type=\"str\"), ) ) module = AnsibleModule(argument_spec=module_args) if not HAS_OCI_PY_SDK:", "backup policies and user defined backup policies, see L(Policy-Based Backups,https://docs.cloud.oracle.com/iaas/Content/Block/Tasks/schedulingvolumebackups.htm).", "will be ignored in the requests and users should ignore", "\"us-phoenix-1\", \"time_created\": \"2013-10-20T19:20:30+01:00\", \"compartment_id\": \"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\", \"defined_tags\": {'Operations': {'CostCenter': 'US'}}, \"freeform_tags\":", "return [] def get_resource(self): return oci_common_utils.call_with_backoff( self.client.get_volume_backup_policy, policy_id=self.module.params.get(\"policy_id\"), ) def", "\"compartment_id\", \"display_name\", ] optional_kwargs = dict( (param, self.module.params[param]) for param", "#!/usr/bin/python # Copyright (c) 2020, 2022 Oracle and/or its affiliates.", "print_function __metaclass__ = type ANSIBLE_METADATA = { \"metadata_version\": \"1.1\", \"status\":", "on success type: str sample: FULL offset_seconds: description: - The", "- \"`dayOfMonth` is applicable for periods `ONE_MONTH` and `ONE_YEAR`.\" -", "details of a single VolumeBackupPolicy will be returned. version_added: \"2.9.0\"", "56 period: description: - The volume backup frequency. returned: on", "returned: on success type: int sample: 56 period: description: -", "this module.\") resource_facts_helper = ResourceFactsHelper( module=module, resource_type=\"volume_backup_policy\", service_client_class=BlockstorageClient, namespace=\"core\", )", "structured fields will be ignored in the requests and users", "of the year to schedule the volume backup. returned: on", "is specified, the details of a single VolumeBackupPolicy will be", "oci_blockstorage_volume_backup_policy_facts: # optional compartment_id: \"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\" \"\"\" RETURN = \"\"\" volume_backup_policies:", "\"metadata_version\": \"1.1\", \"status\": [\"preview\"], \"supported_by\": \"community\", } DOCUMENTATION = \"\"\"", "56 day_of_week: description: - The day of the week to", "VolumeBackupPolicyFactsHelperCustom = get_custom_class( \"VolumeBackupPolicyFactsHelperCustom\" ) class ResourceFactsHelper( VolumeBackupPolicyFactsHelperCustom, VolumeBackupPolicyFactsHelperGen ):", "success type: str sample: \"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\" defined_tags: description: - Defined tags", "Each tag is a simple key-value pair with no predefined", "date and time the volume backup policy was created. Format", ") ) module = AnsibleModule(argument_spec=module_args) if not HAS_OCI_PY_SDK: module.fail_json(msg=\"oci python", "# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES", "periods `ONE_DAY`, `ONE_WEEK`, `ONE_MONTH` and `ONE_YEAR`.\" - \"`dayOfWeek` is applicable", "get a specific volume_backup_policy. type: str aliases: [\"id\"] compartment_id: description:", "policy_id=dict(aliases=[\"id\"], type=\"str\"), compartment_id=dict(type=\"str\"), display_name=dict(type=\"str\"), ) ) module = AnsibleModule(argument_spec=module_args) if", "type: int sample: 56 time_zone: description: - Specifies what time", "- name: List volume_backup_policies oci_blockstorage_volume_backup_policy_facts: # optional compartment_id: \"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\" \"\"\"", "class VolumeBackupPolicyFactsHelperGen(OCIResourceFactsHelperBase): \"\"\"Supported operations: get, list\"\"\" def get_required_params_for_get(self): return [", "if resource_facts_helper.is_get(): result = [resource_facts_helper.get()] elif resource_facts_helper.is_list(): result = resource_facts_helper.list()", "success type: str sample: \"ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx\" schedules: description: - The collection", "\"display_name\", ] optional_kwargs = dict( (param, self.module.params[param]) for param in", "module = AnsibleModule(argument_spec=module_args) if not HAS_OCI_PY_SDK: module.fail_json(msg=\"oci python sdk required", "have to be unique, and it's changeable. Avoid entering confidential", "= [ \"compartment_id\", \"display_name\", ] optional_kwargs = dict( (param, self.module.params[param])", "\"`hourOfDay` is applicable for periods `ONE_DAY`, `ONE_WEEK`, `ONE_MONTH` and `ONE_YEAR`.\"", "(param, self.module.params[param]) for param in optional_list_method_params if self.module.params.get(param) is not", "sample: display_name_example id: description: - The OCID of the volume", "terms of the GPL 3.0 license or the Apache 2.0", "ignore its value from the responses. - \"`hourOfDay` is applicable", "and users should ignore their values from the responses. -", "# Apache License v2.0 # See LICENSE.TXT for details. #", "For more information, see L(Resource Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). - \"Example: `{\\\\\"Department\\\\\": \\\\\"Finance\\\\\"}`\"", "\"`dayOfWeek` is applicable for period `ONE_WEEK`.\" - \"`dayOfMonth` is applicable", "[ \"compartment_id\", \"display_name\", ] optional_kwargs = dict( (param, self.module.params[param]) for", "policies and user defined backup policies, see L(Policy-Based Backups,https://docs.cloud.oracle.com/iaas/Content/Block/Tasks/schedulingvolumebackups.htm). -", "on success type: dict sample: {'Operations': {'CostCenter': 'US'}} freeform_tags: description:", "just like `NUMERIC_SECONDS`. returned: on success type: str sample: STRUCTURED", "available in the specified compartment. - For more information about", "about Oracle defined backup policies and user defined backup policies,", "EXAMPLES = \"\"\" - name: Get a specific volume_backup_policy oci_blockstorage_volume_backup_policy_facts:", "`STRUCTURED`, then `hourOfDay`, `dayOfWeek`, `dayOfMonth`, and `month` fields are used", "created by this schedule. returned: on success type: int sample:", "description: - Free-form tags for this resource. Each tag is", "module: oci_blockstorage_volume_backup_policy_facts short_description: Fetches details about one or multiple VolumeBackupPolicy", "= True except ImportError: HAS_OCI_PY_SDK = False class VolumeBackupPolicyFactsHelperGen(OCIResourceFactsHelperBase): \"\"\"Supported", "time should be shifted from the default interval boundaries specified", "56 month: description: - The month of the year to", "\"day_of_month\": 56, \"month\": \"JANUARY\", \"retention_seconds\": 56, \"time_zone\": \"UTC\" }], \"destination_region\":", "resource_facts_helper = ResourceFactsHelper( module=module, resource_type=\"volume_backup_policy\", service_client_class=BlockstorageClient, namespace=\"core\", ) result =", "scoped to a namespace. For more information, see L(Resource Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm).", "# Copyright (c) 2020, 2022 Oracle and/or its affiliates. #", "DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN from", "resources in Oracle Cloud Infrastructure description: - Fetches details about", "fields are used and `offsetSeconds` will be ignored in requests", "complex contains: backup_type: description: - The type of volume backup", "\"retention_seconds\": 56, \"time_zone\": \"UTC\" }], \"destination_region\": \"us-phoenix-1\", \"time_created\": \"2013-10-20T19:20:30+01:00\", \"compartment_id\":", "user-friendly name. Does not have to be unique, and it's", "value is `NUMERIC_SECONDS`, then `offsetSeconds` will be used for both", "ignore their values from the responses. - For clients using", "requests for inapplicable periods. - If value is `NUMERIC_SECONDS`, then", "from the default interval boundaries specified by the period. The", "type: str sample: STRUCTURED hour_of_day: description: - The hour of", "from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import ( OCIResourceFactsHelperBase, get_custom_class,", "and time the volume backup policy was created. Format defined", "period 'ONE_YEAR'.\" - They will be ignored in the requests", "Apache 2.0 license. # GNU General Public License v3.0+ (see", "and `month` fields are used and `offsetSeconds` will be ignored", "for both requests and responses and the structured fields will", ") try: from oci.core import BlockstorageClient HAS_OCI_PY_SDK = True except", "the volume backup. returned: on success type: str sample: \"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\"", "str sample: \"ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx\" schedules: description: - The collection of schedules", "- The day of the week to schedule the volume", "class ResourceFactsHelper( VolumeBackupPolicyFactsHelperCustom, VolumeBackupPolicyFactsHelperGen ): pass def main(): module_args =", "paired destination region for copying scheduled backups to. Example `us-ashburn-1`.", "information. returned: on success type: str sample: display_name_example id: description:", "not None ) return oci_common_utils.list_all_resources( self.client.list_volume_backup_policies, **optional_kwargs ) VolumeBackupPolicyFactsHelperCustom =", "the month to schedule the volume backup. returned: on success", "is the frequency start time plus the offset. returned: on", "success type: complex contains: display_name: description: - A user-friendly name.", "and `ONE_YEAR`.\" - \"'month' is applicable for period 'ONE_YEAR'.\" -", "\"policy_id\", ] def get_required_params_for_list(self): return [] def get_resource(self): return oci_common_utils.call_with_backoff(", "See LICENSE.TXT for details. # GENERATED FILE - DO NOT", "schedule the volume backup. returned: on success type: int sample:", "see L(Resource Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). - \"Example: `{\\\\\"Department\\\\\": \\\\\"Finance\\\\\"}`\" returned: on success", "time plus the offset. returned: on success type: int sample:", "software is made available to you under the terms of", "Specifies what time zone is the schedule in returned: on", "information, see L(Resource Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). - \"Example: `{\\\\\"Department\\\\\": \\\\\"Finance\\\\\"}`\" returned: on", "time is the frequency start time plus the offset. returned:", "name: List volume_backup_policies oci_blockstorage_volume_backup_policy_facts: # optional compartment_id: \"ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx\" \"\"\" RETURN", "more information, see L(Resource Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). - \"Example: `{\\\\\"Operations\\\\\": {\\\\\"CostCenter\\\\\": \\\\\"42\\\\\"}}`\"", "56, \"period\": \"ONE_HOUR\", \"offset_type\": \"STRUCTURED\", \"hour_of_day\": 56, \"day_of_week\": \"MONDAY\", \"day_of_month\":", "this resource. Each tag is a simple key-value pair with", "Oracle and/or its affiliates. # This software is made available", "type: str sample: us-phoenix-1 time_created: description: - The date and", "ignored in the requests for inapplicable periods. - If value", "requests and users should ignore their values from the responses.", "success type: str sample: UTC destination_region: description: - The paired", "VolumeBackupPolicy resources in Oracle Cloud Infrastructure description: - Fetches details", "success type: int sample: 56 day_of_week: description: - The day", "BlockstorageClient HAS_OCI_PY_SDK = True except ImportError: HAS_OCI_PY_SDK = False class", "sdk required for this module.\") resource_facts_helper = ResourceFactsHelper( module=module, resource_type=\"volume_backup_policy\",", "will be returned. version_added: \"2.9.0\" author: Oracle (@oracle) options: policy_id:", "for periods `ONE_MONTH` and `ONE_YEAR`.\" - \"'month' is applicable for", "\"display_name\": \"display_name_example\", \"id\": \"ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx\", \"schedules\": [{ \"backup_type\": \"FULL\", \"offset_seconds\": 56,", "\"month\": \"JANUARY\", \"retention_seconds\": 56, \"time_zone\": \"UTC\" }], \"destination_region\": \"us-phoenix-1\", \"time_created\":", "type: complex contains: backup_type: description: - The type of volume", "return [ \"policy_id\", ] def get_required_params_for_list(self): return [] def get_resource(self):", "\"defined_tags\": {'Operations': {'CostCenter': 'US'}}, \"freeform_tags\": {'Department': 'Finance'} }] \"\"\" from", "start time is the frequency start time plus the offset.", "Cloud Infrastructure description: - Fetches details about one or multiple", "is the schedule in returned: on success type: str sample:", "to. Example `us-ashburn-1`. See L(Region Pairs,https://docs.cloud.oracle.com/iaas/Content/Block/Tasks/schedulingvolumebackups.htm#RegionPairs) for details about paired", "display_name: description: - A user-friendly name. Does not have to", "- Required to get a specific volume_backup_policy. type: str aliases:", "in Oracle Cloud Infrastructure description: - Fetches details about one", "id: description: - The OCID of the volume backup policy.", "the compartment that contains the volume backup. returned: on success", "details. # GENERATED FILE - DO NOT EDIT - MANUAL", "type: str sample: UTC destination_region: description: - The paired destination", "- Free-form tags for this resource. Each tag is a", "this policy will apply. returned: on success type: complex contains:", "or the Apache 2.0 license. # GNU General Public License", "\"community\", } DOCUMENTATION = \"\"\" --- module: oci_blockstorage_volume_backup_policy_facts short_description: Fetches", "Avoid entering confidential information. returned: on success type: str sample:", "`offsetType` in their requests, the behaviour is just like `NUMERIC_SECONDS`.", "or multiple VolumeBackupPolicy resources in Oracle Cloud Infrastructure - Lists", "a simple key-value pair with no predefined name, type, or", "from oci.core import BlockstorageClient HAS_OCI_PY_SDK = True except ImportError: HAS_OCI_PY_SDK", "__metaclass__ = type ANSIBLE_METADATA = { \"metadata_version\": \"1.1\", \"status\": [\"preview\"],", "\"\"\" EXAMPLES = \"\"\" - name: Get a specific volume_backup_policy", "namespace. For more information, see L(Resource Tags,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). - \"Example: `{\\\\\"Department\\\\\":", "[resource_facts_helper.get()] elif resource_facts_helper.is_list(): result = resource_facts_helper.list() else: resource_facts_helper.fail() module.exit_json(volume_backup_policies=result) if", "`NUMERIC_SECONDS`. returned: on success type: str sample: STRUCTURED hour_of_day: description:", "a specific volume_backup_policy. type: str aliases: [\"id\"] compartment_id: description: -", "one or multiple VolumeBackupPolicy resources in Oracle Cloud Infrastructure description:", "type: str sample: \"ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx\" schedules: description: - The collection of", "{'Operations': {'CostCenter': 'US'}} freeform_tags: description: - Free-form tags for this", "year to schedule the volume backup. returned: on success type:", "policy_id: description: - The OCID of the volume backup policy.", "requests and users should ignore its value from the responses.", "volume backup. returned: on success type: str sample: MONDAY day_of_month:" ]
[ "<reponame>mhozza/pi-control<gh_stars>0 from django.apps import AppConfig class ServerStatsConfig(AppConfig): name = \"server_stats\"" ]
[ "# Every cache key will get prefixed with this value", "item in the sequence is a four item sequence. #", "data has been created. This is where any custom order", "# production. Best set to ``True`` in local_settings.py DEBUG =", "_ SECRET_KEY = <KEY>' ###################### # CARTRIDGE SETTINGS # ######################", "\"django.template.loaders.app_directories.Loader\", ], }, }, ] if DJANGO_VERSION < (1, 9):", "and payment steps. # SHOP_CHECKOUT_STEPS_SPLIT = True # If True,", "implements: # http://mezzanine.jupo.org/docs/configuration.html#default-settings # Controls the ordering and grouping of", "\"mezzanine.pages\", \"cartridge.shop\", \"mezzanine.blog\", \"mezzanine.forms\", \"mezzanine.galleries\", \"mezzanine.twitter\", # \"mezzanine.accounts\", 'corsheaders', 'rest_framework',", "to the same as your # system time zone. TIME_ZONE", "If running in a Windows environment this must be set", "created. This is where any custom order # processing should", "'UTC' # If you set this to True, Django will", "( # (\"Content\", (\"pages.Page\", \"blog.BlogPost\", # \"generic.ThreadedComment\", (_(\"Media Library\"), \"media-library\"),)),", "for selection when editing which # menus a page should", "= \"en_GB.UTF-8\" # Dotted package path and name of the", "sequence of fields that will be injected into Mezzanine's (or", "http://mezzanine.jupo.org/docs/configuration.html#default-settings # Controls the ordering and grouping of the admin", "from __future__ import absolute_import, unicode_literals import os from django import", "# Setting to turn on featured images for shop categories.", "the directory static files should be collected to. # Don't", "path to field. # \"mezzanine.blog.models.BlogPost.image\", # # Dotted path to", "applicable. We also allow this settings module to be imported", "to. # Don't put anything in this directory yourself; store", "as your # system time zone. TIME_ZONE = 'UTC' #", "# of settings Mezzanine implements: # http://mezzanine.jupo.org/docs/configuration.html#default-settings # Controls the", "os.path.exists(f): import sys import imp module_name = \"%s.local_settings\" % PROJECT_APP", "completion. # SHOP_CHECKOUT_STEPS_CONFIRMATION = True # Controls the formatting of", "( \"django.contrib.admin\", \"django.contrib.auth\", \"django.contrib.contenttypes\", \"django.contrib.redirects\", \"django.contrib.sessions\", \"django.contrib.sites\", \"django.contrib.sitemaps\", \"django.contrib.staticfiles\", \"mezzanine.boot\",", "localisation # 'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', \"cartridge.shop.middleware.ShopMiddleware\",", "\"mezzanine.conf\", \"mezzanine.core\", \"mezzanine.generic\", \"mezzanine.pages\", \"cartridge.shop\", \"mezzanine.blog\", \"mezzanine.forms\", \"mezzanine.galleries\", \"mezzanine.twitter\", #", "images for shop categories. Defaults to False. # SHOP_CATEGORY_USE_FEATURED_IMAGE =", "root urlpatterns from for the project. ROOT_URLCONF = \"%s.urls\" %", "using custom forks of them. PACKAGE_NAME_FILEBROWSER = \"filebrowser_safe\" PACKAGE_NAME_GRAPPELLI =", "'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', \"cartridge.shop.middleware.ShopMiddleware\", \"mezzanine.core.request.CurrentRequestMiddleware\", \"mezzanine.core.middleware.RedirectFallbackMiddleware\", \"mezzanine.core.middleware.AdminLoginInterfaceSelectorMiddleware\", \"mezzanine.core.middleware.SitePermissionMiddleware\", \"mezzanine.pages.middleware.PageMiddleware\",", "list # of settings Mezzanine implements: # http://mezzanine.jupo.org/docs/configuration.html#default-settings # Controls", "(\"Content\", (\"pages.Page\", \"blog.BlogPost\", # \"generic.ThreadedComment\", (_(\"Media Library\"), \"media-library\"),)), # (_(\"Shop\"),", "the SHOP_OPTION_TYPE_CHOICES setting that # control how the options should", "(\"Site\", (\"sites.Site\", \"redirects.Redirect\", \"conf.Setting\")), # (\"Users\", (\"auth.User\", \"auth.Group\",)), # )", "is where # integration with a payment gateway should be", "middleware classes will be applied in the order given, and", "called once an order is successful and all of the", "a final confirmation step before # completion. # SHOP_CHECKOUT_STEPS_CONFIRMATION =", "NOTE: Increasing the number of these will # require database", "= 1 # If you set this to False, Django", "selection when editing which # menus a page should appear", "\"templates\") ], \"OPTIONS\": { \"context_processors\": [ \"django.contrib.auth.context_processors.auth\", \"django.contrib.messages.context_processors.messages\", \"django.template.context_processors.debug\", \"django.template.context_processors.i18n\",", "production. Best set to ``True`` in local_settings.py DEBUG = True", "f sys.modules[module_name] = module exec(open(f, \"rb\").read()) #################### # DYNAMIC SETTINGS", "directory the project is in to try and use something", "# system time zone. TIME_ZONE = 'UTC' # If you", "of value/name pairs for types of product options, # eg", "path to the directory that will hold user-uploaded files. #", "# A three item sequence, each containing a sequence of", "PACKAGE_NAME_FILEBROWSER, PACKAGE_NAME_GRAPPELLI, ) ################## # LOCAL SETTINGS # ################## #", "its field # name to be added, and the dotted", "False. # SHOP_CATEGORY_USE_FEATURED_IMAGE = True # If True, the checkout", "################## # LOCAL SETTINGS # ################## # Allow any settings", "# without Mezzanine installed, as the case may be when", "the project. ROOT_URLCONF = \"%s.urls\" % PROJECT_APP TEMPLATES = [", "= 'UTC' # If you set this to True, Django", "\"%s.urls\" % PROJECT_APP TEMPLATES = [ { \"BACKEND\": \"django.template.backends.django.DjangoTemplates\", \"DIRS\":", "# Allow any settings to be defined in local_settings.py which", "# SHOP_HANDLER_PAYMENT = \"cartridge.shop.checkout.default_payment_handler\" # Sequence of value/name pairs for", "value/name pairs for types of product options, # eg Size,", "\"django.contrib.sites\", \"django.contrib.sitemaps\", \"django.contrib.staticfiles\", \"mezzanine.boot\", \"mezzanine.conf\", \"mezzanine.core\", \"mezzanine.generic\", \"mezzanine.pages\", \"cartridge.shop\", \"mezzanine.blog\",", "# 'rest_framework.authtoken', 'mezzanine_cartridge_api', ) # List of middleware classes to", "DATABASES # ############# DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3',", "page should appear in. Note that if a menu template", "django import VERSION as DJANGO_VERSION from django.utils.translation import ugettext_lazy as", "# OPTIONAL APPLICATIONS # ######################### # These will be added", "If True, the django-modeltranslation will be added to the #", "set to the same as your # system time zone.", "os.path.dirname(PROJECT_APP_PATH) # Every cache key will get prefixed with this", "\"django.contrib.auth.context_processors.auth\", \"django.contrib.messages.context_processors.messages\", \"django.template.context_processors.debug\", \"django.template.context_processors.i18n\", \"django.template.context_processors.static\", \"django.template.context_processors.media\", \"django.template.context_processors.request\", \"django.template.context_processors.tz\", \"mezzanine.conf.context_processors.settings\", \"mezzanine.pages.context_processors.page\",", "= os.path.dirname(PROJECT_APP_PATH) # Every cache key will get prefixed with", "the ``page_menu`` template tag. Each # item in the sequence", "for conveniently overriding. Please consult the settings # documentation for", "the request phase, # these middleware classes will be applied", "required if DEBUG is False # See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts ALLOWED_HOSTS =", "of available credit card types for payment. # SHOP_CARD_TYPES =", "for blog posts. Defaults to False. # # BLOG_USE_FEATURED_IMAGE =", "LOCAL SETTINGS # ################## # Allow any settings to be", "\"mezzanine.template.loaders.host_themes.Loader\", \"django.template.loaders.filesystem.Loader\", \"django.template.loaders.app_directories.Loader\", ], }, }, ] if DJANGO_VERSION <", "value - here we set it to # the name", "django-modeltranslation will be added to the # INSTALLED_APPS setting. USE_MODELTRANSLATION", "the case may be when using the # fabfile, where", "\"django.contrib.admin\", \"django.contrib.auth\", \"django.contrib.contenttypes\", \"django.contrib.redirects\", \"django.contrib.sessions\", \"django.contrib.sites\", \"django.contrib.sitemaps\", \"django.contrib.staticfiles\", \"mezzanine.boot\", \"mezzanine.conf\",", "use something # project specific. CACHE_MIDDLEWARE_KEY_PREFIX = PROJECT_APP # URL", "SHOP_CHECKOUT_STEPS_SPLIT = True # If True, the checkout process has", "\"mezzanine.galleries\", \"mezzanine.twitter\", # \"mezzanine.accounts\", 'corsheaders', 'rest_framework', 'rest_framework_api_key', 'drf_yasg', # 'oauth2_provider',", "(\"blog_tags.quick_blog\", \"mezzanine_tags.app_list\"), # (\"comment_tags.recent_comments\",), # (\"mezzanine_tags.recent_actions\",), # ) # A", "class. # (_(\"Image\"),), # # Keyword args for field class.", "# ), # ) # Setting to turn on featured", "\"cartridge.shop.checkout.default_order_handler\" # Dotted package path and name of the function", "consult the settings documentation for a full list # of", "called on submit of the payment checkout step. This is", "Absolute filesystem path to the directory that will hold user-uploaded", "\"django.template.context_processors.tz\", \"mezzanine.conf.context_processors.settings\", \"mezzanine.pages.context_processors.page\", ], \"builtins\": [ \"mezzanine.template.loader_tags\", ], \"loaders\": [", "###################### # MEZZANINE SETTINGS # ###################### # The following settings", "Mezzanine's apps, but are # common enough to be put", "order statuses. # SHOP_ORDER_STATUS_CHOICES = ( # (1, \"Unprocessed\"), #", "system. # If running in a Windows environment this must", "items are a sequence of positional # args and a", "< (1, 9): del TEMPLATES[0][\"OPTIONS\"][\"builtins\"] ################ # APPLICATIONS # ################", "# PAGE_MENU_TEMPLATES = ( # (1, _(\"Top navigation bar\"), \"pages/menus/dropdown.html\"),", "split into separate # billing/shipping and payment steps. # SHOP_CHECKOUT_STEPS_SPLIT", "\"OPTIONS\": { \"context_processors\": [ \"django.contrib.auth.context_processors.auth\", \"django.contrib.messages.context_processors.messages\", \"django.template.context_processors.debug\", \"django.template.context_processors.i18n\", \"django.template.context_processors.static\", \"django.template.context_processors.media\",", "dashboard. # # DASHBOARD_TAGS = ( # (\"blog_tags.quick_blog\", \"mezzanine_tags.app_list\"), #", "a full list # of settings Mezzanine implements: # http://mezzanine.jupo.org/docs/configuration.html#default-settings", "to Django's autoreload. f = os.path.join(PROJECT_APP_PATH, \"local_settings.py\") if os.path.exists(f): import", "'db.dev', } } ######### # PATHS # ######### # Full", "local_settings has full access to everything defined in this module.", "# to load the internationalization machinery. USE_I18N = False AUTHENTICATION_BACKENDS", "SECRET_KEY = <KEY>' ###################### # CARTRIDGE SETTINGS # ###################### #", "setting that # control how the options should be ordered", "are # common enough to be put here, commented out,", "for error pages. Should always be set to ``False`` in", "ordered in the admin, # eg for \"Colour\" then \"Size\"", "DASHBOARD_TAGS = ( # (\"blog_tags.quick_blog\", \"mezzanine_tags.app_list\"), # (\"comment_tags.recent_comments\",), # (\"mezzanine_tags.recent_actions\",),", "# See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts ALLOWED_HOSTS = ['*'] # Local time zone", "Example of adding a field to *all* of Mezzanine's content", "If True, the checkout process is split into separate #", "be put here, commented # out, for conveniently overriding. Please", "# (2, \"Colour\"), # ) # Sequence of indexes from", "Example: \"/home/media/media.lawrence.com/media/\" MEDIA_ROOT = os.path.join(PROJECT_ROOT, *MEDIA_URL.strip(\"/\").split(\"/\")) # Package/module name to", "\"shop.Order\")), # (\"Site\", (\"sites.Site\", \"redirects.Redirect\", \"conf.Setting\")), # (\"Users\", (\"auth.User\", \"auth.Group\",)),", "# billing/shipping and payment steps. # SHOP_CHECKOUT_STEPS_SPLIT = True #", "middleware will be applied in reverse order. MIDDLEWARE = (", "Please consult the settings documentation for a full list #", "# \"somelib.fields.ImageField\", # # Positional args for field class. #", "EXTRA_MODEL_FIELDS = ( # ( # # Dotted path to", "since # at the moment we are using custom forks", "is omitted. # (_(\"Another name\"),), # {\"blank\": True, \"default\": 1},", "tag. Each # item in the sequence is a three", "{ \"context_processors\": [ \"django.contrib.auth.context_processors.auth\", \"django.contrib.messages.context_processors.messages\", \"django.template.context_processors.debug\", \"django.template.context_processors.i18n\", \"django.template.context_processors.static\", \"django.template.context_processors.media\", \"django.template.context_processors.request\",", "checkout step. This is where # integration with a payment", "############# # DATABASES # ############# DATABASES = { 'default': {", "in # production. Best set to ``True`` in local_settings.py DEBUG", "import absolute_import, unicode_literals import os from django import VERSION as", "{\"blank\": True, \"default\": 1}, # ), # ) # Setting", "the same as your # system time zone. TIME_ZONE =", "for shop categories. Defaults to False. # SHOP_CATEGORY_USE_FEATURED_IMAGE = True", "= ( \"debug_toolbar\", \"django_extensions\", \"compressor\", PACKAGE_NAME_FILEBROWSER, PACKAGE_NAME_GRAPPELLI, ) ################## #", "session cookie expires when the Web browser is closed. SESSION_EXPIRE_AT_BROWSER_CLOSE", ") # Setting to turn on featured images for blog", "SESSION_EXPIRE_AT_BROWSER_CLOSE = True SITE_ID = 1 # If you set", "the billing/shipping checkout step. This # is where shipping calculation", "library's) models. Each item in the sequence is a four", "for settings to be # defined per machine. # Instead", "\"%s.local_settings\" % PROJECT_APP module = imp.new_module(module_name) module.__file__ = f sys.modules[module_name]", "# ignored in your version control system allowing for settings", "(2, \"Colour\"), # ) # Sequence of indexes from the", "for the template, and the template path. # These templates", "# http://cartridge.jupo.org/configuration.html#default-settings # Sequence of available credit card types for", "Each # item in the sequence is a three item", "a unique ID # for the template, a label for", "operating system. # If running in a Windows environment this", "DJANGO_VERSION from django.utils.translation import ugettext_lazy as _ SECRET_KEY = <KEY>'", "= [ { \"BACKEND\": \"django.template.backends.django.DjangoTemplates\", \"DIRS\": [ os.path.join(PROJECT_ROOT, \"templates\") ],", "exec(open(f, \"rb\").read()) #################### # DYNAMIC SETTINGS # #################### # set_dynamic_settings()", "True, Django will use timezone-aware datetimes. USE_TZ = True #", "# List of middleware classes to use. Order is important;", "Library\"), \"media-library\"),)), # (_(\"Shop\"), (\"shop.Product\", \"shop.ProductOption\", \"shop.DiscountCode\", # \"shop.Sale\", \"shop.Order\")),", "control how the options should be ordered in the admin,", "project specific. CACHE_MIDDLEWARE_KEY_PREFIX = PROJECT_APP # URL prefix for static", "class, the path # ``django.models.db.`` can be omitted for regular", "the sequence is a three item sequence, containing a unique", "to the model and its field # name to be", "the python standard library. If an empty string is #", "# URL prefix for static files. # Example: \"http://media.lawrence.com/static/\" STATIC_URL", "languages LANGUAGES = ( ('en', _('English')), ) # A boolean", "what has been # defined so far, in order to", "# require database migrations! # SHOP_OPTION_TYPE_CHOICES = ( # (1,", "full list # of settings Mezzanine implements: # http://mezzanine.jupo.org/docs/configuration.html#default-settings #", "third and fourth items are a sequence of positional #", "path is omitted. # (_(\"Another name\"),), # {\"blank\": True, \"default\":", "Mezzanine's (or any # library's) models. Each item in the", "forks of them. PACKAGE_NAME_FILEBROWSER = \"filebrowser_safe\" PACKAGE_NAME_GRAPPELLI = \"grappelli_safe\" #########################", "then \"Size\" given the above: # SHOP_OPTION_ADMIN_ORDER = (2, 1)", "fabfile, where setting the dynamic settings below isn't strictly #", "overriding. Please consult the settings documentation for a full list", "in. Note that if a menu template is used #", "field class to use for # the field. The third", "MIDDLEWARE # Store these package names here as they may", "'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'db.dev', } } ######### # PATHS #", "we use exec so that # local_settings has full access", "= PROJECT_APP # URL prefix for static files. # Example:", "grouping of the admin menu. # # ADMIN_MENU_ORDER = (", "has been # defined so far, in order to provide", "number of these will # require database migrations! # SHOP_OPTION_TYPE_CHOICES", "True SITE_ID = 1 # If you set this to", "# Uncomment if using internationalisation or localisation # 'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware',", "this site; required if DEBUG is False # See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts", "Make sure to use a # trailing slash. # Examples:", "try: from mezzanine.utils.conf import set_dynamic_settings except ImportError: pass else: set_dynamic_settings(globals())", "\"pages/menus/dropdown.html\"), # (2, _(\"Left-hand tree\"), \"pages/menus/tree.html\"), # (3, _(\"Footer\"), \"pages/menus/footer.html\"),", "zone for this installation. Choices can be found here: #", "# overriding. Please consult the settings documentation for a full", "######################### # OPTIONAL APPLICATIONS # ######################### # These will be", "is successful and all of the order # object's data", "media served from MEDIA_ROOT. Make sure to use a #", "will rewrite globals based on what has been # defined", "available for selection when editing which # menus a page", "enough to be put here, commented # out, for conveniently", "static files should be collected to. # Don't put anything", "'rest_framework', 'rest_framework_api_key', 'drf_yasg', # 'oauth2_provider', # 'rest_framework.authtoken', 'mezzanine_cartridge_api', ) #", "# ) # A three item sequence, each containing a", "value of None will cause Django to use the same", "the order # object's data has been created. This is", "# defined per machine. # Instead of doing \"from .local_settings", "the Web browser is closed. SESSION_EXPIRE_AT_BROWSER_CLOSE = True SITE_ID =", "\"django.contrib.messages.context_processors.messages\", \"django.template.context_processors.debug\", \"django.template.context_processors.i18n\", \"django.template.context_processors.static\", \"django.template.context_processors.media\", \"django.template.context_processors.request\", \"django.template.context_processors.tz\", \"mezzanine.conf.context_processors.settings\", \"mezzanine.pages.context_processors.page\", ],", "case may be when using the # fabfile, where setting", "# these middleware classes will be applied in the order", "the order given, and in the # response phase the", "# # Keyword args for field class. # {\"blank\": True,", "Colour. NOTE: Increasing the number of these will # require", "for field class. # (_(\"Image\"),), # # Keyword args for", "implemented. # SHOP_HANDLER_ORDER = \"cartridge.shop.checkout.default_order_handler\" # Dotted package path and", "args, to use when creating the # field instance. When", "( # (1, \"Unprocessed\"), # (2, \"Processed\"), # ) #", "be # a mode you'd pass directly to os.chmod. FILE_UPLOAD_PERMISSIONS", "in your version control system allowing for settings to be", "\"IntegerField\", # 'django.db.models.' is implied if path is omitted. #", "to turn on featured images for shop categories. Defaults to", "= True # If True, the checkout process is split", "the # function ``cartridge.shop.utils.set_shipping``. # SHOP_HANDLER_BILLING_SHIPPING = \\ # \"cartridge.shop.checkout.default_billship_handler\"", "\"django.contrib.sessions\", \"django.contrib.sites\", \"django.contrib.sitemaps\", \"django.contrib.staticfiles\", \"mezzanine.boot\", \"mezzanine.conf\", \"mezzanine.core\", \"mezzanine.generic\", \"mezzanine.pages\", \"cartridge.shop\",", "\"django.template.context_processors.debug\", \"django.template.context_processors.i18n\", \"django.template.context_processors.static\", \"django.template.context_processors.media\", \"django.template.context_processors.request\", \"django.template.context_processors.tz\", \"mezzanine.conf.context_processors.settings\", \"mezzanine.pages.context_processors.page\", ], \"builtins\":", "= True SITE_ID = 1 # If you set this", "performed and set using the # function ``cartridge.shop.utils.set_shipping``. # SHOP_HANDLER_BILLING_SHIPPING", "set this to True, Django will use timezone-aware datetimes. USE_TZ", "your version control system allowing for settings to be #", "sys.modules so it's visible to Django's autoreload. f = os.path.join(PROJECT_APP_PATH,", "( # (1, \"Size\"), # (2, \"Colour\"), # ) #", "PAGE_MENU_TEMPLATES = ( # (1, _(\"Top navigation bar\"), \"pages/menus/dropdown.html\"), #", "turn on featured images for shop categories. Defaults to False.", "Should always be set to ``False`` in # production. Best", "# (\"Site\", (\"sites.Site\", \"redirects.Redirect\", \"conf.Setting\")), # (\"Users\", (\"auth.User\", \"auth.Group\",)), #", "access to everything defined in this module. # Also force", "SHOP_HANDLER_PAYMENT = \"cartridge.shop.checkout.default_payment_handler\" # Sequence of value/name pairs for order", "be injected into Mezzanine's (or any # library's) models. Each", "on submit of the billing/shipping checkout step. This # is", "valid for this site; required if DEBUG is False #", "in STATICFILES_DIRS. # Example: \"/home/media/media.lawrence.com/static/\" STATIC_ROOT = os.path.join(PROJECT_ROOT, STATIC_URL.strip(\"/\")) #", "this directory yourself; store your static files # in apps'", "doing \"from .local_settings import *\", we use exec so that", "be implemented. # SHOP_HANDLER_PAYMENT = \"cartridge.shop.checkout.default_payment_handler\" # Sequence of value/name", "added, and the dotted path to the field class to", "that turns on/off debug mode. When set to ``True``, stack", "with a payment gateway should be implemented. # SHOP_HANDLER_PAYMENT =", "label for the template, and the template path. # These", "# APPLICATIONS # ################ INSTALLED_APPS = ( \"django.contrib.admin\", \"django.contrib.auth\", \"django.contrib.contenttypes\",", "Full filesystem path to the project. PROJECT_APP_PATH = os.path.dirname(os.path.abspath(__file__)) PROJECT_APP", "the checkout process has a final confirmation step before #", "PACKAGE_NAME_GRAPPELLI, ) ################## # LOCAL SETTINGS # ################## # Allow", "from django import VERSION as DJANGO_VERSION from django.utils.translation import ugettext_lazy", "so that # local_settings has full access to everything defined", "# LOCAL SETTINGS # ################## # Allow any settings to", "dynamic settings below isn't strictly # required. try: from mezzanine.utils.conf", "set newly-uploaded files to. The value should be # a", "urlpatterns from for the project. ROOT_URLCONF = \"%s.urls\" % PROJECT_APP", "SETTINGS # ######################## # Hosts/domain names that are valid for", "cartridge.shop.defaults # with default values, but are common enough to", "on featured images for blog posts. Defaults to False. #", "# 'django.db.models.' is implied if path is omitted. # (_(\"Another", "\"django.contrib.redirects\", \"django.contrib.sessions\", \"django.contrib.sites\", \"django.contrib.sitemaps\", \"django.contrib.staticfiles\", \"mezzanine.boot\", \"mezzanine.conf\", \"mezzanine.core\", \"mezzanine.generic\", \"mezzanine.pages\",", "os.path.join(PROJECT_ROOT, \"templates\") ], \"OPTIONS\": { \"context_processors\": [ \"django.contrib.auth.context_processors.auth\", \"django.contrib.messages.context_processors.messages\", \"django.template.context_processors.debug\",", "A boolean that turns on/off debug mode. When set to", "is important; in the request phase, # these middleware classes", "with default values, but are common enough to be put", "specific. CACHE_MIDDLEWARE_KEY_PREFIX = PROJECT_APP # URL prefix for static files.", "\"http://media.lawrence.com/media/\", \"http://example.com/media/\" MEDIA_URL = STATIC_URL + \"media/\" # Absolute filesystem", "directory static files should be collected to. # Don't put", "'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', \"cartridge.shop.middleware.ShopMiddleware\", \"mezzanine.core.request.CurrentRequestMiddleware\", \"mezzanine.core.middleware.RedirectFallbackMiddleware\", \"mezzanine.core.middleware.AdminLoginInterfaceSelectorMiddleware\", \"mezzanine.core.middleware.SitePermissionMiddleware\",", "can be performed and set using the # function ``cartridge.shop.utils.set_shipping``.", "should appear in. Note that if a menu template is", "this to True, Django will use timezone-aware datetimes. USE_TZ =", "PROJECT_ROOT = BASE_DIR = os.path.dirname(PROJECT_APP_PATH) # Every cache key will", "pages. Should always be set to ``False`` in # production.", "# # BLOG_USE_FEATURED_IMAGE = True # If True, the django-modeltranslation", "# that doesn't appear in this setting, all pages will", "Dotted path to field class. # \"somelib.fields.ImageField\", # # Positional", "``django.models.db.`` can be omitted for regular Django model fields. #", "browser is closed. SESSION_EXPIRE_AT_BROWSER_CLOSE = True SITE_ID = 1 #", "images for blog posts. Defaults to False. # # BLOG_USE_FEATURED_IMAGE", "\"mezzanine.core.middleware.UpdateCacheMiddleware\", 'django.contrib.sessions.middleware.SessionMiddleware', # Uncomment if using internationalisation or localisation #", "False, Django will make some optimizations so as not #", "STATICFILES_DIRS. # Example: \"/home/media/media.lawrence.com/static/\" STATIC_ROOT = os.path.join(PROJECT_ROOT, STATIC_URL.strip(\"/\")) # URL", "templates are then available for selection when editing which #", "(1, _(\"Top navigation bar\"), \"pages/menus/dropdown.html\"), # (2, _(\"Left-hand tree\"), \"pages/menus/tree.html\"),", "\"mezzanine.template.loader_tags\", ], \"loaders\": [ \"mezzanine.template.loaders.host_themes.Loader\", \"django.template.loaders.filesystem.Loader\", \"django.template.loaders.app_directories.Loader\", ], }, },", "###################### # The following settings are already defined with default", "the project. PROJECT_APP_PATH = os.path.dirname(os.path.abspath(__file__)) PROJECT_APP = os.path.basename(PROJECT_APP_PATH) PROJECT_ROOT =", "ALLOWED_HOSTS = ['*'] # Local time zone for this installation.", "system time zone. TIME_ZONE = 'UTC' # If you set", "\"django.template.loaders.filesystem.Loader\", \"django.template.loaders.app_directories.Loader\", ], }, }, ] if DJANGO_VERSION < (1,", "stack traces # are displayed for error pages. Should always", "set using the # function ``cartridge.shop.utils.set_shipping``. # SHOP_HANDLER_BILLING_SHIPPING = \\", "Instead of doing \"from .local_settings import *\", we use exec", "values accord to the locale # module in the python", "string is # used, will fall back to the system's", "the number of these will # require database migrations! #", "added to ``INSTALLED_APPS``, only if available. OPTIONAL_APPS = ( \"debug_toolbar\",", "traces # are displayed for error pages. Should always be", "files should be collected to. # Don't put anything in", "regular Django model fields. # # EXTRA_MODEL_FIELDS = ( #", "import VERSION as DJANGO_VERSION from django.utils.translation import ugettext_lazy as _", "to the directory static files should be collected to. #", "Dotted package path and name of the function that #", "positional # args and a dictionary of keyword args, to", "import ugettext_lazy as _ SECRET_KEY = <KEY>' ###################### # CARTRIDGE", "will get prefixed with this value - here we set", "order. MIDDLEWARE = ( \"mezzanine.core.middleware.UpdateCacheMiddleware\", 'django.contrib.sessions.middleware.SessionMiddleware', # Uncomment if using", "# If you set this to True, Django will use", "# SHOP_OPTION_TYPE_CHOICES = ( # (1, \"Size\"), # (2, \"Colour\"),", "applied in reverse order. MIDDLEWARE = ( \"mezzanine.core.middleware.UpdateCacheMiddleware\", 'django.contrib.sessions.middleware.SessionMiddleware', #", "True # Whether a user's session cookie expires when the", "set it to # the name of the directory the", "'NAME': 'db.dev', } } ######### # PATHS # ######### #", "pairs for order statuses. # SHOP_ORDER_STATUS_CHOICES = ( # (1,", "this installation. All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html", "version control system allowing for settings to be # defined", "(1, 10): MIDDLEWARE_CLASSES = MIDDLEWARE del MIDDLEWARE # Store these", "\"conf.Setting\")), # (\"Users\", (\"auth.User\", \"auth.Group\",)), # ) # A three", "\"mezzanine.core.middleware.AdminLoginInterfaceSelectorMiddleware\", \"mezzanine.core.middleware.SitePermissionMiddleware\", \"mezzanine.pages.middleware.PageMiddleware\", \"mezzanine.core.middleware.FetchFromCacheMiddleware\", 'corsheaders.middleware.CorsMiddleware', ) if DJANGO_VERSION < (1,", "to the # INSTALLED_APPS setting. USE_MODELTRANSLATION = False ######################## #", "The value should be # a mode you'd pass directly", "get prefixed with this value - here we set it", "The first two items are the dotted path to the", "'mezzanine_cartridge_api', ) # List of middleware classes to use. Order", "are valid for this site; required if DEBUG is False", "timezone-aware datetimes. USE_TZ = True # Language code for this", "= ( # (\"blog_tags.quick_blog\", \"mezzanine_tags.app_list\"), # (\"comment_tags.recent_comments\",), # (\"mezzanine_tags.recent_actions\",), #", "'drf_yasg', # 'oauth2_provider', # 'rest_framework.authtoken', 'mezzanine_cartridge_api', ) # List of", "= ( # (1, \"Size\"), # (2, \"Colour\"), # )", "True, the django-modeltranslation will be added to the # INSTALLED_APPS", "pages will appear in it. # PAGE_MENU_TEMPLATES = ( #", "are a sequence of positional # args and a dictionary", "to *all* of Mezzanine's content types: # ( # \"mezzanine.pages.models.Page.another_field\",", "The third and fourth items are a sequence of positional", "classes will be applied in the order given, and in", "be added to the # INSTALLED_APPS setting. USE_MODELTRANSLATION = False", "= f sys.modules[module_name] = module exec(open(f, \"rb\").read()) #################### # DYNAMIC", "will fall back to the system's locale. SHOP_CURRENCY_LOCALE = \"en_GB.UTF-8\"", "environment this must be set to the same as your", "order # object's data has been created. This is where", "is called once an order is successful and all of", "turn on featured images for blog posts. Defaults to False.", "name of the function that # is called on submit", "# If running in a Windows environment this must be", "may be when using the # fabfile, where setting the", "any custom order # processing should be implemented. # SHOP_HANDLER_ORDER", "SHOP_OPTION_ADMIN_ORDER = (2, 1) ###################### # MEZZANINE SETTINGS # ######################", "available credit card types for payment. # SHOP_CARD_TYPES = (\"Mastercard\",", "of keyword args, to use when creating the # field", "\"mezzanine.pages.models.Page.another_field\", # \"IntegerField\", # 'django.db.models.' is implied if path is", "# Also force into sys.modules so it's visible to Django's", "each containing a sequence of template tags # used to", "tags # used to render the admin dashboard. # #", "SHOP_OPTION_TYPE_CHOICES = ( # (1, \"Size\"), # (2, \"Colour\"), #", "so it's visible to Django's autoreload. f = os.path.join(PROJECT_APP_PATH, \"local_settings.py\")", "module = imp.new_module(module_name) module.__file__ = f sys.modules[module_name] = module exec(open(f,", "Mezzanine's content types: # ( # \"mezzanine.pages.models.Page.another_field\", # \"IntegerField\", #", "on/off debug mode. When set to ``True``, stack traces #", "ID # for the template, a label for the template,", "ordering and grouping of the admin menu. # # ADMIN_MENU_ORDER", "site; required if DEBUG is False # See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts ALLOWED_HOSTS", "implements: # http://cartridge.jupo.org/configuration.html#default-settings # Sequence of available credit card types", "# # Dotted path to field. # \"mezzanine.blog.models.BlogPost.image\", # #", "os from django import VERSION as DJANGO_VERSION from django.utils.translation import", "dictionary of keyword args, to use when creating the #", "of value/name pairs for order statuses. # SHOP_ORDER_STATUS_CHOICES = (", "module_name = \"%s.local_settings\" % PROJECT_APP module = imp.new_module(module_name) module.__file__ =", "be applied in the order given, and in the #", "if path is omitted. # (_(\"Another name\"),), # {\"blank\": True,", "separate # billing/shipping and payment steps. # SHOP_CHECKOUT_STEPS_SPLIT = True", "# \"mezzanine.accounts\", 'corsheaders', 'rest_framework', 'rest_framework_api_key', 'drf_yasg', # 'oauth2_provider', # 'rest_framework.authtoken',", "\"Processed\"), # ) # Sequence of value/name pairs for types", "Store these package names here as they may change in", "# defined so far, in order to provide some better", "Allow any settings to be defined in local_settings.py which should", "DYNAMIC SETTINGS # #################### # set_dynamic_settings() will rewrite globals based", "templates used by the ``page_menu`` template tag. Each # item", "\"/home/media/media.lawrence.com/media/\" MEDIA_ROOT = os.path.join(PROJECT_ROOT, *MEDIA_URL.strip(\"/\").split(\"/\")) # Package/module name to import", "django.utils.translation import ugettext_lazy as _ SECRET_KEY = <KEY>' ###################### #", "are already defined in cartridge.shop.defaults # with default values, but", "try and use something # project specific. CACHE_MIDDLEWARE_KEY_PREFIX = PROJECT_APP", "# Absolute filesystem path to the directory that will hold", "and grouping of the admin menu. # # ADMIN_MENU_ORDER =", "( # \"mezzanine.pages.models.Page.another_field\", # \"IntegerField\", # 'django.db.models.' is implied if", "= os.path.dirname(os.path.abspath(__file__)) PROJECT_APP = os.path.basename(PROJECT_APP_PATH) PROJECT_ROOT = BASE_DIR = os.path.dirname(PROJECT_APP_PATH)", "= STATIC_URL + \"media/\" # Absolute filesystem path to the", "implemented. # SHOP_HANDLER_PAYMENT = \"cartridge.shop.checkout.default_payment_handler\" # Sequence of value/name pairs", "[ \"django.contrib.auth.context_processors.auth\", \"django.contrib.messages.context_processors.messages\", \"django.template.context_processors.debug\", \"django.template.context_processors.i18n\", \"django.template.context_processors.static\", \"django.template.context_processors.media\", \"django.template.context_processors.request\", \"django.template.context_processors.tz\", \"mezzanine.conf.context_processors.settings\",", "# ############# DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME':", "change in the future since # at the moment we", "which should be # ignored in your version control system", "added to the # INSTALLED_APPS setting. USE_MODELTRANSLATION = False ########################", "# completion. # SHOP_CHECKOUT_STEPS_CONFIRMATION = True # Controls the formatting", "is closed. SESSION_EXPIRE_AT_BROWSER_CLOSE = True SITE_ID = 1 # If", "to use a # trailing slash. # Examples: \"http://media.lawrence.com/media/\", \"http://example.com/media/\"", "\"from .local_settings import *\", we use exec so that #", "# Package/module name to import the root urlpatterns from for", "from MEDIA_ROOT. Make sure to use a # trailing slash.", "menus a page should appear in. Note that if a", "(_(\"Image\"),), # # Keyword args for field class. # {\"blank\":", "Sequence of value/name pairs for order statuses. # SHOP_ORDER_STATUS_CHOICES =", "to ``True``, stack traces # are displayed for error pages.", "\"mezzanine_tags.app_list\"), # (\"comment_tags.recent_comments\",), # (\"mezzanine_tags.recent_actions\",), # ) # A sequence", "checkout step. This # is where shipping calculation can be", "the system's locale. SHOP_CURRENCY_LOCALE = \"en_GB.UTF-8\" # Dotted package path", "the above: # SHOP_OPTION_ADMIN_ORDER = (2, 1) ###################### # MEZZANINE", "\"mezzanine.generic\", \"mezzanine.pages\", \"cartridge.shop\", \"mezzanine.blog\", \"mezzanine.forms\", \"mezzanine.galleries\", \"mezzanine.twitter\", # \"mezzanine.accounts\", 'corsheaders',", "put here, commented # out, for conveniently overriding. Please consult", "= ( \"mezzanine.core.middleware.UpdateCacheMiddleware\", 'django.contrib.sessions.middleware.SessionMiddleware', # Uncomment if using internationalisation or", "this must be set to the same as your #", "to use for # the field. The third and fourth", "hold user-uploaded files. # Example: \"/home/media/media.lawrence.com/media/\" MEDIA_ROOT = os.path.join(PROJECT_ROOT, *MEDIA_URL.strip(\"/\").split(\"/\"))", "based on what has been # defined so far, in", "(\"shop.Product\", \"shop.ProductOption\", \"shop.DiscountCode\", # \"shop.Sale\", \"shop.Order\")), # (\"Site\", (\"sites.Site\", \"redirects.Redirect\",", "accord to the locale # module in the python standard", "of templates used by the ``page_menu`` template tag. Each #", "monetary values accord to the locale # module in the", "\"local_settings.py\") if os.path.exists(f): import sys import imp module_name = \"%s.local_settings\"", "# integration with a payment gateway should be implemented. #", "################ # APPLICATIONS # ################ INSTALLED_APPS = ( \"django.contrib.admin\", \"django.contrib.auth\",", "# Sequence of value/name pairs for types of product options,", "commented # out, for conveniently overriding. Please consult the settings", "shipping calculation can be performed and set using the #", "first two items are the dotted path to the model", "files. # Example: \"http://media.lawrence.com/static/\" STATIC_URL = \"/static/\" # Absolute path", "URL that handles the media served from MEDIA_ROOT. Make sure", "system's locale. SHOP_CURRENCY_LOCALE = \"en_GB.UTF-8\" # Dotted package path and", "numeric mode to set newly-uploaded files to. The value should", "given, and in the # response phase the middleware will", "two items are the dotted path to the model and", "although not all choices may be available on all operating", "# SHOP_CARD_TYPES = (\"Mastercard\", \"Visa\", \"Diners\", \"Amex\") # Setting to", "in local_settings.py which should be # ignored in your version", "# ) # Setting to turn on featured images for", "and the dotted path to the field class to use", "(2, _(\"Left-hand tree\"), \"pages/menus/tree.html\"), # (3, _(\"Footer\"), \"pages/menus/footer.html\"), # )", "in the order given, and in the # response phase", "navigation bar\"), \"pages/menus/dropdown.html\"), # (2, _(\"Left-hand tree\"), \"pages/menus/tree.html\"), # (3,", "(_(\"Shop\"), (\"shop.Product\", \"shop.ProductOption\", \"shop.DiscountCode\", # \"shop.Sale\", \"shop.Order\")), # (\"Site\", (\"sites.Site\",", "template, and the template path. # These templates are then", "into sys.modules so it's visible to Django's autoreload. f =", "\"django.contrib.auth\", \"django.contrib.contenttypes\", \"django.contrib.redirects\", \"django.contrib.sessions\", \"django.contrib.sites\", \"django.contrib.sitemaps\", \"django.contrib.staticfiles\", \"mezzanine.boot\", \"mezzanine.conf\", \"mezzanine.core\",", "# (\"mezzanine_tags.recent_actions\",), # ) # A sequence of templates used", "of settings Cartridge implements: # http://cartridge.jupo.org/configuration.html#default-settings # Sequence of available", "will # require database migrations! # SHOP_OPTION_TYPE_CHOICES = ( #", "conveniently # overriding. Please consult the settings documentation for a", "# INSTALLED_APPS setting. USE_MODELTRANSLATION = False ######################## # MAIN DJANGO", "in this setting, all pages will appear in it. #", "}, ] if DJANGO_VERSION < (1, 9): del TEMPLATES[0][\"OPTIONS\"][\"builtins\"] ################", "and use something # project specific. CACHE_MIDDLEWARE_KEY_PREFIX = PROJECT_APP #", "on submit of the payment checkout step. This is where", "pass directly to os.chmod. FILE_UPLOAD_PERMISSIONS = 0o644 ############# # DATABASES", "to render the admin dashboard. # # DASHBOARD_TAGS = (", "All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE =", "( \"mezzanine.core.middleware.UpdateCacheMiddleware\", 'django.contrib.sessions.middleware.SessionMiddleware', # Uncomment if using internationalisation or localisation", "be ordered in the admin, # eg for \"Colour\" then", "\"/home/media/media.lawrence.com/static/\" STATIC_ROOT = os.path.join(PROJECT_ROOT, STATIC_URL.strip(\"/\")) # URL that handles the", "where # integration with a payment gateway should be implemented.", "you set this to True, Django will use timezone-aware datetimes.", "field to *all* of Mezzanine's content types: # ( #", "INSTALLED_APPS setting. USE_MODELTRANSLATION = False ######################## # MAIN DJANGO SETTINGS", "above: # SHOP_OPTION_ADMIN_ORDER = (2, 1) ###################### # MEZZANINE SETTINGS", "% PROJECT_APP TEMPLATES = [ { \"BACKEND\": \"django.template.backends.django.DjangoTemplates\", \"DIRS\": [", "import the root urlpatterns from for the project. ROOT_URLCONF =", "cache key will get prefixed with this value - here", "order to provide some better defaults where # applicable. We", "value should be # a mode you'd pass directly to", "USE_I18N = False AUTHENTICATION_BACKENDS = (\"mezzanine.core.auth_backends.MezzanineBackend\",) # The numeric mode", "required. try: from mezzanine.utils.conf import set_dynamic_settings except ImportError: pass else:", "submit of the billing/shipping checkout step. This # is where", "as the operating system. # If running in a Windows", "the template, a label for the template, and the template", "use when creating the # field instance. When specifying the", "with this value - here we set it to #", "types for payment. # SHOP_CARD_TYPES = (\"Mastercard\", \"Visa\", \"Diners\", \"Amex\")", "when the Web browser is closed. SESSION_EXPIRE_AT_BROWSER_CLOSE = True SITE_ID", "import *\", we use exec so that # local_settings has", "the function that # is called once an order is", "\"django.template.context_processors.request\", \"django.template.context_processors.tz\", \"mezzanine.conf.context_processors.settings\", \"mezzanine.pages.context_processors.page\", ], \"builtins\": [ \"mezzanine.template.loader_tags\", ], \"loaders\":", "# SHOP_CHECKOUT_STEPS_SPLIT = True # If True, the checkout process", "step before # completion. # SHOP_CHECKOUT_STEPS_CONFIRMATION = True # Controls", "When specifying the field class, the path # ``django.models.db.`` can", "is implied if path is omitted. # (_(\"Another name\"),), #", "template tags # used to render the admin dashboard. #", "} } ######### # PATHS # ######### # Full filesystem", "# (\"blog_tags.quick_blog\", \"mezzanine_tags.app_list\"), # (\"comment_tags.recent_comments\",), # (\"mezzanine_tags.recent_actions\",), # ) #", "A three item sequence, each containing a sequence of template", "(or any # library's) models. Each item in the sequence", "or localisation # 'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware',", "in order to provide some better defaults where # applicable.", "is called on submit of the payment checkout step. This", "# PATHS # ######### # Full filesystem path to the", "# Absolute path to the directory static files should be", "ugettext_lazy as _ SECRET_KEY = <KEY>' ###################### # CARTRIDGE SETTINGS", "of Mezzanine's content types: # ( # \"mezzanine.pages.models.Page.another_field\", # \"IntegerField\",", "appear in this setting, all pages will appear in it.", "a full list of settings Cartridge implements: # http://cartridge.jupo.org/configuration.html#default-settings #", "( # (1, _(\"Top navigation bar\"), \"pages/menus/dropdown.html\"), # (2, _(\"Left-hand", "them. PACKAGE_NAME_FILEBROWSER = \"filebrowser_safe\" PACKAGE_NAME_GRAPPELLI = \"grappelli_safe\" ######################### # OPTIONAL", "\\ # \"cartridge.shop.checkout.default_billship_handler\" # Dotted package path and name of", "# \"mezzanine.pages.models.Page.another_field\", # \"IntegerField\", # 'django.db.models.' is implied if path", "card types for payment. # SHOP_CARD_TYPES = (\"Mastercard\", \"Visa\", \"Diners\",", "'corsheaders.middleware.CorsMiddleware', ) if DJANGO_VERSION < (1, 10): MIDDLEWARE_CLASSES = MIDDLEWARE", "default values, but are common enough to be put here,", "eg for \"Colour\" then \"Size\" given the above: # SHOP_OPTION_ADMIN_ORDER", "_(\"Left-hand tree\"), \"pages/menus/tree.html\"), # (3, _(\"Footer\"), \"pages/menus/footer.html\"), # ) #", "# with default values, but are common enough to be", "DEBUG = True # Whether a user's session cookie expires", "(2, 1) ###################### # MEZZANINE SETTINGS # ###################### # The", "DJANGO_VERSION < (1, 9): del TEMPLATES[0][\"OPTIONS\"][\"builtins\"] ################ # APPLICATIONS #", "the locale # module in the python standard library. If", "Mezzanine implements: # http://mezzanine.jupo.org/docs/configuration.html#default-settings # Controls the ordering and grouping", "to use when creating the # field instance. When specifying", "if available. OPTIONAL_APPS = ( \"debug_toolbar\", \"django_extensions\", \"compressor\", PACKAGE_NAME_FILEBROWSER, PACKAGE_NAME_GRAPPELLI,", "user-uploaded files. # Example: \"/home/media/media.lawrence.com/media/\" MEDIA_ROOT = os.path.join(PROJECT_ROOT, *MEDIA_URL.strip(\"/\").split(\"/\")) #", "settings Cartridge implements: # http://cartridge.jupo.org/configuration.html#default-settings # Sequence of available credit", "\"cartridge.shop.checkout.default_payment_handler\" # Sequence of value/name pairs for order statuses. #", "should be ordered in the admin, # eg for \"Colour\"", "be put here, commented out, for conveniently # overriding. Please", "for # the field. The third and fourth items are", "ROOT_URLCONF = \"%s.urls\" % PROJECT_APP TEMPLATES = [ { \"BACKEND\":", "'corsheaders', 'rest_framework', 'rest_framework_api_key', 'drf_yasg', # 'oauth2_provider', # 'rest_framework.authtoken', 'mezzanine_cartridge_api', )", "settings are already defined with default values in # the", "list of settings Cartridge implements: # http://cartridge.jupo.org/configuration.html#default-settings # Sequence of", "} ######### # PATHS # ######### # Full filesystem path", "= ( ('en', _('English')), ) # A boolean that turns", "use a # trailing slash. # Examples: \"http://media.lawrence.com/media/\", \"http://example.com/media/\" MEDIA_URL", "directory that will hold user-uploaded files. # Example: \"/home/media/media.lawrence.com/media/\" MEDIA_ROOT", "for the project. ROOT_URLCONF = \"%s.urls\" % PROJECT_APP TEMPLATES =", "imp module_name = \"%s.local_settings\" % PROJECT_APP module = imp.new_module(module_name) module.__file__", "item sequence. # The first two items are the dotted", "field class. # \"somelib.fields.ImageField\", # # Positional args for field", "the moment we are using custom forks of them. PACKAGE_NAME_FILEBROWSER", "= \"filebrowser_safe\" PACKAGE_NAME_GRAPPELLI = \"grappelli_safe\" ######################### # OPTIONAL APPLICATIONS #", "out, for conveniently # overriding. Please consult the settings documentation", "using internationalisation or localisation # 'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware',", "PACKAGE_NAME_FILEBROWSER = \"filebrowser_safe\" PACKAGE_NAME_GRAPPELLI = \"grappelli_safe\" ######################### # OPTIONAL APPLICATIONS", "] if DJANGO_VERSION < (1, 9): del TEMPLATES[0][\"OPTIONS\"][\"builtins\"] ################ #", "within each of Mezzanine's apps, but are # common enough", "= \"%s.local_settings\" % PROJECT_APP module = imp.new_module(module_name) module.__file__ = f", "# If True, the checkout process is split into separate", "\"cartridge.shop.middleware.ShopMiddleware\", \"mezzanine.core.request.CurrentRequestMiddleware\", \"mezzanine.core.middleware.RedirectFallbackMiddleware\", \"mezzanine.core.middleware.AdminLoginInterfaceSelectorMiddleware\", \"mezzanine.core.middleware.SitePermissionMiddleware\", \"mezzanine.pages.middleware.PageMiddleware\", \"mezzanine.core.middleware.FetchFromCacheMiddleware\", 'corsheaders.middleware.CorsMiddleware', ) if", "to ``INSTALLED_APPS``, only if available. OPTIONAL_APPS = ( \"debug_toolbar\", \"django_extensions\",", "# ######################## # Hosts/domain names that are valid for this", "can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all", "instance. When specifying the field class, the path # ``django.models.db.``", "False. # # BLOG_USE_FEATURED_IMAGE = True # If True, the", "final confirmation step before # completion. # SHOP_CHECKOUT_STEPS_CONFIRMATION = True", "False AUTHENTICATION_BACKENDS = (\"mezzanine.core.auth_backends.MezzanineBackend\",) # The numeric mode to set", "the media served from MEDIA_ROOT. Make sure to use a", "``defaults.py`` module within each of Mezzanine's apps, but are #", "TEMPLATES[0][\"OPTIONS\"][\"builtins\"] ################ # APPLICATIONS # ################ INSTALLED_APPS = ( \"django.contrib.admin\",", "['*'] # Local time zone for this installation. Choices can", "step. This is where # integration with a payment gateway", "mode. When set to ``True``, stack traces # are displayed", "for a full list of settings Cartridge implements: # http://cartridge.jupo.org/configuration.html#default-settings", "SHOP_CATEGORY_USE_FEATURED_IMAGE = True # If True, the checkout process is", "before # completion. # SHOP_CHECKOUT_STEPS_CONFIRMATION = True # Controls the", "payment steps. # SHOP_CHECKOUT_STEPS_SPLIT = True # If True, the", "Also force into sys.modules so it's visible to Django's autoreload.", "we are using custom forks of them. PACKAGE_NAME_FILEBROWSER = \"filebrowser_safe\"", "files. # Example: \"/home/media/media.lawrence.com/media/\" MEDIA_ROOT = os.path.join(PROJECT_ROOT, *MEDIA_URL.strip(\"/\").split(\"/\")) # Package/module", "Note that if a menu template is used # that", "as they may change in the future since # at", "cause Django to use the same # timezone as the", "time zone for this installation. Choices can be found here:", "a menu template is used # that doesn't appear in", "(1, 9): del TEMPLATES[0][\"OPTIONS\"][\"builtins\"] ################ # APPLICATIONS # ################ INSTALLED_APPS", "< (1, 10): MIDDLEWARE_CLASSES = MIDDLEWARE del MIDDLEWARE # Store", "# Full filesystem path to the project. PROJECT_APP_PATH = os.path.dirname(os.path.abspath(__file__))", "choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = \"en\"", "ignored in your version control system allowing for settings to", "then available for selection when editing which # menus a", "in it. # PAGE_MENU_TEMPLATES = ( # (1, _(\"Top navigation", "###################### # The following settings are already defined in cartridge.shop.defaults", "with default values in # the ``defaults.py`` module within each", "# MEZZANINE SETTINGS # ###################### # The following settings are", "STATIC_URL = \"/static/\" # Absolute path to the directory static", "# DYNAMIC SETTINGS # #################### # set_dynamic_settings() will rewrite globals", "SHOP_HANDLER_ORDER = \"cartridge.shop.checkout.default_order_handler\" # Dotted package path and name of", "been created. This is where any custom order # processing", "static files # in apps' \"static/\" subdirectories and in STATICFILES_DIRS.", "a four item sequence. # The first two items are", "these middleware classes will be applied in the order given,", "############# DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'db.dev',", "by the ``page_menu`` template tag. Each # item in the", "strictly # required. try: from mezzanine.utils.conf import set_dynamic_settings except ImportError:", "# ADMIN_MENU_ORDER = ( # (\"Content\", (\"pages.Page\", \"blog.BlogPost\", # \"generic.ThreadedComment\",", "# SHOP_CATEGORY_USE_FEATURED_IMAGE = True # If True, the checkout process", "defined with default values in # the ``defaults.py`` module within", "the path # ``django.models.db.`` can be omitted for regular Django", "systems. # On Unix systems, a value of None will", "project. PROJECT_APP_PATH = os.path.dirname(os.path.abspath(__file__)) PROJECT_APP = os.path.basename(PROJECT_APP_PATH) PROJECT_ROOT = BASE_DIR", "# Local time zone for this installation. Choices can be", "os.path.basename(PROJECT_APP_PATH) PROJECT_ROOT = BASE_DIR = os.path.dirname(PROJECT_APP_PATH) # Every cache key", "# Dotted package path and name of the function that", "= \"cartridge.shop.checkout.default_order_handler\" # Dotted package path and name of the", "SHOP_ORDER_STATUS_CHOICES = ( # (1, \"Unprocessed\"), # (2, \"Processed\"), #", "If you set this to False, Django will make some", "of middleware classes to use. Order is important; in the", "[ { \"BACKEND\": \"django.template.backends.django.DjangoTemplates\", \"DIRS\": [ os.path.join(PROJECT_ROOT, \"templates\") ], \"OPTIONS\":", "Sequence of indexes from the SHOP_OPTION_TYPE_CHOICES setting that # control", "the internationalization machinery. USE_I18N = False AUTHENTICATION_BACKENDS = (\"mezzanine.core.auth_backends.MezzanineBackend\",) #", "Size, Colour. NOTE: Increasing the number of these will #", "Sequence of available credit card types for payment. # SHOP_CARD_TYPES", "{ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'db.dev', } } #########", "the function that # is called on submit of the", "be set to the same as your # system time", "# SHOP_ORDER_STATUS_CHOICES = ( # (1, \"Unprocessed\"), # (2, \"Processed\"),", "something # project specific. CACHE_MIDDLEWARE_KEY_PREFIX = PROJECT_APP # URL prefix", "setting, all pages will appear in it. # PAGE_MENU_TEMPLATES =", "URL prefix for static files. # Example: \"http://media.lawrence.com/static/\" STATIC_URL =", "be # ignored in your version control system allowing for", "everything defined in this module. # Also force into sys.modules", "all pages will appear in it. # PAGE_MENU_TEMPLATES = (", "conveniently overriding. Please consult the settings # documentation for a", "allow this settings module to be imported # without Mezzanine", "so as not # to load the internationalization machinery. USE_I18N", "a mode you'd pass directly to os.chmod. FILE_UPLOAD_PERMISSIONS = 0o644", "from the SHOP_OPTION_TYPE_CHOICES setting that # control how the options", "# ), # # Example of adding a field to", "SETTINGS # ###################### # The following settings are already defined", "A sequence of fields that will be injected into Mezzanine's", "that if a menu template is used # that doesn't", "available on all operating systems. # On Unix systems, a", "= (2, 1) ###################### # MEZZANINE SETTINGS # ###################### #", "'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', \"cartridge.shop.middleware.ShopMiddleware\", \"mezzanine.core.request.CurrentRequestMiddleware\", \"mezzanine.core.middleware.RedirectFallbackMiddleware\",", "Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not", "], \"OPTIONS\": { \"context_processors\": [ \"django.contrib.auth.context_processors.auth\", \"django.contrib.messages.context_processors.messages\", \"django.template.context_processors.debug\", \"django.template.context_processors.i18n\", \"django.template.context_processors.static\",", "# Examples: \"http://media.lawrence.com/media/\", \"http://example.com/media/\" MEDIA_URL = STATIC_URL + \"media/\" #", "# Example: \"/home/media/media.lawrence.com/media/\" MEDIA_ROOT = os.path.join(PROJECT_ROOT, *MEDIA_URL.strip(\"/\").split(\"/\")) # Package/module name", "error pages. Should always be set to ``False`` in #", "is where any custom order # processing should be implemented.", "\"Amex\") # Setting to turn on featured images for shop", "admin dashboard. # # DASHBOARD_TAGS = ( # (\"blog_tags.quick_blog\", \"mezzanine_tags.app_list\"),", "template, a label for the template, and the template path.", "newly-uploaded files to. The value should be # a mode", "}, }, ] if DJANGO_VERSION < (1, 9): del TEMPLATES[0][\"OPTIONS\"][\"builtins\"]", "for static files. # Example: \"http://media.lawrence.com/static/\" STATIC_URL = \"/static/\" #", "slash. # Examples: \"http://media.lawrence.com/media/\", \"http://example.com/media/\" MEDIA_URL = STATIC_URL + \"media/\"", "where shipping calculation can be performed and set using the", "the template path. # These templates are then available for", "\"context_processors\": [ \"django.contrib.auth.context_processors.auth\", \"django.contrib.messages.context_processors.messages\", \"django.template.context_processors.debug\", \"django.template.context_processors.i18n\", \"django.template.context_processors.static\", \"django.template.context_processors.media\", \"django.template.context_processors.request\", \"django.template.context_processors.tz\",", "# ###################### # The following settings are already defined in", "an order is successful and all of the order #", "\"default\": 1}, # ), # ) # Setting to turn", "checkout process has a final confirmation step before # completion.", "omitted for regular Django model fields. # # EXTRA_MODEL_FIELDS =", "categories. Defaults to False. # SHOP_CATEGORY_USE_FEATURED_IMAGE = True # If", "to everything defined in this module. # Also force into", "will hold user-uploaded files. # Example: \"/home/media/media.lawrence.com/media/\" MEDIA_ROOT = os.path.join(PROJECT_ROOT,", "items are the dotted path to the model and its", "successful and all of the order # object's data has", "\"shop.Sale\", \"shop.Order\")), # (\"Site\", (\"sites.Site\", \"redirects.Redirect\", \"conf.Setting\")), # (\"Users\", (\"auth.User\",", "it's visible to Django's autoreload. f = os.path.join(PROJECT_APP_PATH, \"local_settings.py\") if", "= os.path.join(PROJECT_ROOT, STATIC_URL.strip(\"/\")) # URL that handles the media served", "# required. try: from mezzanine.utils.conf import set_dynamic_settings except ImportError: pass", "running in a Windows environment this must be set to", "Web browser is closed. SESSION_EXPIRE_AT_BROWSER_CLOSE = True SITE_ID = 1", "= True # Controls the formatting of monetary values accord", "is used # that doesn't appear in this setting, all", "{ 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'db.dev', } } ######### # PATHS", ") # A sequence of fields that will be injected", "static files. # Example: \"http://media.lawrence.com/static/\" STATIC_URL = \"/static/\" # Absolute", "using the # function ``cartridge.shop.utils.set_shipping``. # SHOP_HANDLER_BILLING_SHIPPING = \\ #", "each of Mezzanine's apps, but are # common enough to", "(\"mezzanine.core.auth_backends.MezzanineBackend\",) # The numeric mode to set newly-uploaded files to.", "for this site; required if DEBUG is False # See", "some better defaults where # applicable. We also allow this", "to be # defined per machine. # Instead of doing", "= False AUTHENTICATION_BACKENDS = (\"mezzanine.core.auth_backends.MezzanineBackend\",) # The numeric mode to", "# Don't put anything in this directory yourself; store your", "but are # common enough to be put here, commented", "to import the root urlpatterns from for the project. ROOT_URLCONF", "field # name to be added, and the dotted path", "to # the name of the directory the project is", "Cartridge implements: # http://cartridge.jupo.org/configuration.html#default-settings # Sequence of available credit card", "[ \"mezzanine.template.loader_tags\", ], \"loaders\": [ \"mezzanine.template.loaders.host_themes.Loader\", \"django.template.loaders.filesystem.Loader\", \"django.template.loaders.app_directories.Loader\", ], },", "# the name of the directory the project is in", "the admin dashboard. # # DASHBOARD_TAGS = ( # (\"blog_tags.quick_blog\",", "Example: \"http://media.lawrence.com/static/\" STATIC_URL = \"/static/\" # Absolute path to the", "the django-modeltranslation will be added to the # INSTALLED_APPS setting.", "for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name", "are then available for selection when editing which # menus", "put anything in this directory yourself; store your static files", "names here as they may change in the future since", "has been created. This is where any custom order #", "that doesn't appear in this setting, all pages will appear", "PROJECT_APP = os.path.basename(PROJECT_APP_PATH) PROJECT_ROOT = BASE_DIR = os.path.dirname(PROJECT_APP_PATH) # Every", "your static files # in apps' \"static/\" subdirectories and in", "be found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = \"en\" # Supported", "of fields that will be injected into Mezzanine's (or any", "phase, # these middleware classes will be applied in the", "# ################## # Allow any settings to be defined in", "called on submit of the billing/shipping checkout step. This #", "Supported languages LANGUAGES = ( ('en', _('English')), ) # A", "are the dotted path to the model and its field", "prefix for static files. # Example: \"http://media.lawrence.com/static/\" STATIC_URL = \"/static/\"", "on all operating systems. # On Unix systems, a value", "(\"mezzanine_tags.recent_actions\",), # ) # A sequence of templates used by", "This is where # integration with a payment gateway should", "settings below isn't strictly # required. try: from mezzanine.utils.conf import", "trailing slash. # Examples: \"http://media.lawrence.com/media/\", \"http://example.com/media/\" MEDIA_URL = STATIC_URL +", "of the admin menu. # # ADMIN_MENU_ORDER = ( #", "the options should be ordered in the admin, # eg", "apps, but are # common enough to be put here,", "# Hosts/domain names that are valid for this site; required", "mode you'd pass directly to os.chmod. FILE_UPLOAD_PERMISSIONS = 0o644 #############", "directory yourself; store your static files # in apps' \"static/\"", "# set_dynamic_settings() will rewrite globals based on what has been", "\"django.template.context_processors.i18n\", \"django.template.context_processors.static\", \"django.template.context_processors.media\", \"django.template.context_processors.request\", \"django.template.context_processors.tz\", \"mezzanine.conf.context_processors.settings\", \"mezzanine.pages.context_processors.page\", ], \"builtins\": [", "used to render the admin dashboard. # # DASHBOARD_TAGS =", "three item sequence, each containing a sequence of template tags", "integration with a payment gateway should be implemented. # SHOP_HANDLER_PAYMENT", "for conveniently # overriding. Please consult the settings documentation for", "sys import imp module_name = \"%s.local_settings\" % PROJECT_APP module =", "= ( # (\"Content\", (\"pages.Page\", \"blog.BlogPost\", # \"generic.ThreadedComment\", (_(\"Media Library\"),", "\"Visa\", \"Diners\", \"Amex\") # Setting to turn on featured images", "Django's autoreload. f = os.path.join(PROJECT_APP_PATH, \"local_settings.py\") if os.path.exists(f): import sys", "project is in to try and use something # project", "= \"/static/\" # Absolute path to the directory static files", "# the field. The third and fourth items are a", "# are displayed for error pages. Should always be set", "= ( # (1, _(\"Top navigation bar\"), \"pages/menus/dropdown.html\"), # (2,", "cookie expires when the Web browser is closed. SESSION_EXPIRE_AT_BROWSER_CLOSE =", "# # ADMIN_MENU_ORDER = ( # (\"Content\", (\"pages.Page\", \"blog.BlogPost\", #", "the dotted path to the model and its field #", "following settings are already defined in cartridge.shop.defaults # with default", "omitted. # (_(\"Another name\"),), # {\"blank\": True, \"default\": 1}, #", "\"mezzanine.conf.context_processors.settings\", \"mezzanine.pages.context_processors.page\", ], \"builtins\": [ \"mezzanine.template.loader_tags\", ], \"loaders\": [ \"mezzanine.template.loaders.host_themes.Loader\",", "this value - here we set it to # the", "If an empty string is # used, will fall back", "MEDIA_URL = STATIC_URL + \"media/\" # Absolute filesystem path to", "you set this to False, Django will make some optimizations", "'oauth2_provider', # 'rest_framework.authtoken', 'mezzanine_cartridge_api', ) # List of middleware classes", "\"blog\"}, # ), # # Example of adding a field", "are displayed for error pages. Should always be set to", "in the sequence is a four item sequence. # The", "\"mezzanine.accounts\", 'corsheaders', 'rest_framework', 'rest_framework_api_key', 'drf_yasg', # 'oauth2_provider', # 'rest_framework.authtoken', 'mezzanine_cartridge_api',", "VERSION as DJANGO_VERSION from django.utils.translation import ugettext_lazy as _ SECRET_KEY", "Examples: \"http://media.lawrence.com/media/\", \"http://example.com/media/\" MEDIA_URL = STATIC_URL + \"media/\" # Absolute", "if DEBUG is False # See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts ALLOWED_HOSTS = ['*']", "submit of the payment checkout step. This is where #", "if using internationalisation or localisation # 'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware',", "DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'db.dev', }", "Setting to turn on featured images for shop categories. Defaults", "######################## # Hosts/domain names that are valid for this site;", "\"media/\" # Absolute filesystem path to the directory that will", "middleware classes to use. Order is important; in the request", "billing/shipping and payment steps. # SHOP_CHECKOUT_STEPS_SPLIT = True # If", "# trailing slash. # Examples: \"http://media.lawrence.com/media/\", \"http://example.com/media/\" MEDIA_URL = STATIC_URL", "# The first two items are the dotted path to", "os.path.join(PROJECT_APP_PATH, \"local_settings.py\") if os.path.exists(f): import sys import imp module_name =", "Windows environment this must be set to the same as", "settings module to be imported # without Mezzanine installed, as", "True, \"default\": 1}, # ), # ) # Setting to", "function ``cartridge.shop.utils.set_shipping``. # SHOP_HANDLER_BILLING_SHIPPING = \\ # \"cartridge.shop.checkout.default_billship_handler\" # Dotted", "# function ``cartridge.shop.utils.set_shipping``. # SHOP_HANDLER_BILLING_SHIPPING = \\ # \"cartridge.shop.checkout.default_billship_handler\" #", "the # field instance. When specifying the field class, the", "to False. # # BLOG_USE_FEATURED_IMAGE = True # If True,", "will be added to the # INSTALLED_APPS setting. USE_MODELTRANSLATION =", "del TEMPLATES[0][\"OPTIONS\"][\"builtins\"] ################ # APPLICATIONS # ################ INSTALLED_APPS = (", "The following settings are already defined with default values in", "of the payment checkout step. This is where # integration", "values, but are common enough to be put here, commented", "rewrite globals based on what has been # defined so", "a dictionary of keyword args, to use when creating the", "are using custom forks of them. PACKAGE_NAME_FILEBROWSER = \"filebrowser_safe\" PACKAGE_NAME_GRAPPELLI", "can be omitted for regular Django model fields. # #", "# Supported languages LANGUAGES = ( ('en', _('English')), ) #", "(\"auth.User\", \"auth.Group\",)), # ) # A three item sequence, each", "######################## # MAIN DJANGO SETTINGS # ######################## # Hosts/domain names", "per machine. # Instead of doing \"from .local_settings import *\",", "False ######################## # MAIN DJANGO SETTINGS # ######################## # Hosts/domain", "DJANGO SETTINGS # ######################## # Hosts/domain names that are valid", "# DASHBOARD_TAGS = ( # (\"blog_tags.quick_blog\", \"mezzanine_tags.app_list\"), # (\"comment_tags.recent_comments\",), #", "render the admin dashboard. # # DASHBOARD_TAGS = ( #", "menu. # # ADMIN_MENU_ORDER = ( # (\"Content\", (\"pages.Page\", \"blog.BlogPost\",", "= ['*'] # Local time zone for this installation. Choices", "out, for conveniently overriding. Please consult the settings # documentation", "the same # timezone as the operating system. # If", "PROJECT_APP # URL prefix for static files. # Example: \"http://media.lawrence.com/static/\"", "os.path.join(PROJECT_ROOT, *MEDIA_URL.strip(\"/\").split(\"/\")) # Package/module name to import the root urlpatterns", "% PROJECT_APP module = imp.new_module(module_name) module.__file__ = f sys.modules[module_name] =", "we set it to # the name of the directory", "to the system's locale. SHOP_CURRENCY_LOCALE = \"en_GB.UTF-8\" # Dotted package", "*MEDIA_URL.strip(\"/\").split(\"/\")) # Package/module name to import the root urlpatterns from", "= { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'db.dev', } }", "path # ``django.models.db.`` can be omitted for regular Django model", ") # A boolean that turns on/off debug mode. When", "to ``True`` in local_settings.py DEBUG = True # Whether a", "as _ SECRET_KEY = <KEY>' ###################### # CARTRIDGE SETTINGS #", "only if available. OPTIONAL_APPS = ( \"debug_toolbar\", \"django_extensions\", \"compressor\", PACKAGE_NAME_FILEBROWSER,", "\"grappelli_safe\" ######################### # OPTIONAL APPLICATIONS # ######################### # These will", "installed, as the case may be when using the #", "available. OPTIONAL_APPS = ( \"debug_toolbar\", \"django_extensions\", \"compressor\", PACKAGE_NAME_FILEBROWSER, PACKAGE_NAME_GRAPPELLI, )", "# menus a page should appear in. Note that if", "using the # fabfile, where setting the dynamic settings below", "= ( # ( # # Dotted path to field.", "# ``django.models.db.`` can be omitted for regular Django model fields.", "Django will use timezone-aware datetimes. USE_TZ = True # Language", "which # menus a page should appear in. Note that", "# used, will fall back to the system's locale. SHOP_CURRENCY_LOCALE", "# If True, the django-modeltranslation will be added to the", "from for the project. ROOT_URLCONF = \"%s.urls\" % PROJECT_APP TEMPLATES", "ADMIN_MENU_ORDER = ( # (\"Content\", (\"pages.Page\", \"blog.BlogPost\", # \"generic.ThreadedComment\", (_(\"Media", "in to try and use something # project specific. CACHE_MIDDLEWARE_KEY_PREFIX", "# The numeric mode to set newly-uploaded files to. The", "INSTALLED_APPS = ( \"django.contrib.admin\", \"django.contrib.auth\", \"django.contrib.contenttypes\", \"django.contrib.redirects\", \"django.contrib.sessions\", \"django.contrib.sites\", \"django.contrib.sitemaps\",", "value/name pairs for order statuses. # SHOP_ORDER_STATUS_CHOICES = ( #", "fields. # # EXTRA_MODEL_FIELDS = ( # ( # #", "in the request phase, # these middleware classes will be", "as not # to load the internationalization machinery. USE_I18N =", "and name of the function that # is called once", "always be set to ``False`` in # production. Best set", "in apps' \"static/\" subdirectories and in STATICFILES_DIRS. # Example: \"/home/media/media.lawrence.com/static/\"", "function that # is called once an order is successful", "a sequence of template tags # used to render the", "this to False, Django will make some optimizations so as", "\"BACKEND\": \"django.template.backends.django.DjangoTemplates\", \"DIRS\": [ os.path.join(PROJECT_ROOT, \"templates\") ], \"OPTIONS\": { \"context_processors\":", "# control how the options should be ordered in the", "BASE_DIR = os.path.dirname(PROJECT_APP_PATH) # Every cache key will get prefixed", "should be collected to. # Don't put anything in this", "for a full list # of settings Mezzanine implements: #", "# processing should be implemented. # SHOP_HANDLER_ORDER = \"cartridge.shop.checkout.default_order_handler\" #", "processing should be implemented. # SHOP_HANDLER_ORDER = \"cartridge.shop.checkout.default_order_handler\" # Dotted", "( \"debug_toolbar\", \"django_extensions\", \"compressor\", PACKAGE_NAME_FILEBROWSER, PACKAGE_NAME_GRAPPELLI, ) ################## # LOCAL", "subdirectories and in STATICFILES_DIRS. # Example: \"/home/media/media.lawrence.com/static/\" STATIC_ROOT = os.path.join(PROJECT_ROOT,", "they may change in the future since # at the", "datetimes. USE_TZ = True # Language code for this installation.", "use exec so that # local_settings has full access to", "= \"cartridge.shop.checkout.default_payment_handler\" # Sequence of value/name pairs for order statuses.", "and the template path. # These templates are then available", "= True # Language code for this installation. All choices", "collected to. # Don't put anything in this directory yourself;", "9): del TEMPLATES[0][\"OPTIONS\"][\"builtins\"] ################ # APPLICATIONS # ################ INSTALLED_APPS =", "checkout process is split into separate # billing/shipping and payment", "\"loaders\": [ \"mezzanine.template.loaders.host_themes.Loader\", \"django.template.loaders.filesystem.Loader\", \"django.template.loaders.app_directories.Loader\", ], }, }, ] if", "calculation can be performed and set using the # function", "not # to load the internationalization machinery. USE_I18N = False", "gateway should be implemented. # SHOP_HANDLER_PAYMENT = \"cartridge.shop.checkout.default_payment_handler\" # Sequence", "Mezzanine installed, as the case may be when using the", "True, \"upload_to\": \"blog\"}, # ), # # Example of adding", "# On Unix systems, a value of None will cause", "are common enough to be put here, commented # out,", "\"compressor\", PACKAGE_NAME_FILEBROWSER, PACKAGE_NAME_GRAPPELLI, ) ################## # LOCAL SETTINGS # ##################", "in the python standard library. If an empty string is", "the directory that will hold user-uploaded files. # Example: \"/home/media/media.lawrence.com/media/\"", "http://cartridge.jupo.org/configuration.html#default-settings # Sequence of available credit card types for payment.", "Controls the ordering and grouping of the admin menu. #", "at the moment we are using custom forks of them.", "sequence of templates used by the ``page_menu`` template tag. Each", "are already defined with default values in # the ``defaults.py``", "make some optimizations so as not # to load the", "\"Diners\", \"Amex\") # Setting to turn on featured images for", "will be applied in the order given, and in the", "to be defined in local_settings.py which should be # ignored", "to the directory that will hold user-uploaded files. # Example:", "that # is called on submit of the billing/shipping checkout", "in the sequence is a three item sequence, containing a", "0o644 ############# # DATABASES # ############# DATABASES = { 'default':", "\"en_GB.UTF-8\" # Dotted package path and name of the function", "Setting to turn on featured images for blog posts. Defaults", "the ``defaults.py`` module within each of Mezzanine's apps, but are", "put here, commented out, for conveniently # overriding. Please consult", "``cartridge.shop.utils.set_shipping``. # SHOP_HANDLER_BILLING_SHIPPING = \\ # \"cartridge.shop.checkout.default_billship_handler\" # Dotted package", "appear in it. # PAGE_MENU_TEMPLATES = ( # (1, _(\"Top", "path and name of the function that # is called", "defined so far, in order to provide some better defaults", "given the above: # SHOP_OPTION_ADMIN_ORDER = (2, 1) ###################### #", "# \"cartridge.shop.checkout.default_billship_handler\" # Dotted package path and name of the", "# http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = \"en\" # Supported languages LANGUAGES =", "should be # ignored in your version control system allowing", "# item in the sequence is a three item sequence,", "the name of the directory the project is in to", "\"django.contrib.staticfiles\", \"mezzanine.boot\", \"mezzanine.conf\", \"mezzanine.core\", \"mezzanine.generic\", \"mezzanine.pages\", \"cartridge.shop\", \"mezzanine.blog\", \"mezzanine.forms\", \"mezzanine.galleries\",", "used # that doesn't appear in this setting, all pages", "content types: # ( # \"mezzanine.pages.models.Page.another_field\", # \"IntegerField\", # 'django.db.models.'", "\"shop.ProductOption\", \"shop.DiscountCode\", # \"shop.Sale\", \"shop.Order\")), # (\"Site\", (\"sites.Site\", \"redirects.Redirect\", \"conf.Setting\")),", "True, the checkout process is split into separate # billing/shipping", "require database migrations! # SHOP_OPTION_TYPE_CHOICES = ( # (1, \"Size\"),", "creating the # field instance. When specifying the field class,", "path to the field class to use for # the", "on what has been # defined so far, in order", "# (\"comment_tags.recent_comments\",), # (\"mezzanine_tags.recent_actions\",), # ) # A sequence of", "is # used, will fall back to the system's locale.", "( # # Dotted path to field. # \"mezzanine.blog.models.BlogPost.image\", #", "set this to False, Django will make some optimizations so", "``page_menu`` template tag. Each # item in the sequence is", "# Example: \"http://media.lawrence.com/static/\" STATIC_URL = \"/static/\" # Absolute path to", ") # Sequence of value/name pairs for types of product", "(3, _(\"Footer\"), \"pages/menus/footer.html\"), # ) # A sequence of fields", "the formatting of monetary values accord to the locale #", "installation. All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE", "set to ``False`` in # production. Best set to ``True``", "autoreload. f = os.path.join(PROJECT_APP_PATH, \"local_settings.py\") if os.path.exists(f): import sys import", "# Controls the ordering and grouping of the admin menu.", "the future since # at the moment we are using", "filesystem path to the directory that will hold user-uploaded files.", "# {\"blank\": True, \"default\": 1}, # ), # ) #", ".local_settings import *\", we use exec so that # local_settings", "when using the # fabfile, where setting the dynamic settings", "\"redirects.Redirect\", \"conf.Setting\")), # (\"Users\", (\"auth.User\", \"auth.Group\",)), # ) # A", "sure to use a # trailing slash. # Examples: \"http://media.lawrence.com/media/\",", "\"upload_to\": \"blog\"}, # ), # # Example of adding a", "{\"blank\": True, \"upload_to\": \"blog\"}, # ), # # Example of", "operating systems. # On Unix systems, a value of None", "in the admin, # eg for \"Colour\" then \"Size\" given", "payment. # SHOP_CARD_TYPES = (\"Mastercard\", \"Visa\", \"Diners\", \"Amex\") # Setting", "STATIC_ROOT = os.path.join(PROJECT_ROOT, STATIC_URL.strip(\"/\")) # URL that handles the media", "import imp module_name = \"%s.local_settings\" % PROJECT_APP module = imp.new_module(module_name)", "# A boolean that turns on/off debug mode. When set", "force into sys.modules so it's visible to Django's autoreload. f", "editing which # menus a page should appear in. Note", "to ``False`` in # production. Best set to ``True`` in", "containing a sequence of template tags # used to render", "Example: \"/home/media/media.lawrence.com/static/\" STATIC_ROOT = os.path.join(PROJECT_ROOT, STATIC_URL.strip(\"/\")) # URL that handles", "SITE_ID = 1 # If you set this to False,", "# The following settings are already defined in cartridge.shop.defaults #", "*all* of Mezzanine's content types: # ( # \"mezzanine.pages.models.Page.another_field\", #", "http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = \"en\" # Supported languages LANGUAGES = (", "= \\ # \"cartridge.shop.checkout.default_billship_handler\" # Dotted package path and name", "OPTIONAL_APPS = ( \"debug_toolbar\", \"django_extensions\", \"compressor\", PACKAGE_NAME_FILEBROWSER, PACKAGE_NAME_GRAPPELLI, ) ##################", "payment checkout step. This is where # integration with a", "field instance. When specifying the field class, the path #", "# (2, _(\"Left-hand tree\"), \"pages/menus/tree.html\"), # (3, _(\"Footer\"), \"pages/menus/footer.html\"), #", "be performed and set using the # function ``cartridge.shop.utils.set_shipping``. #", "'rest_framework.authtoken', 'mezzanine_cartridge_api', ) # List of middleware classes to use.", "as DJANGO_VERSION from django.utils.translation import ugettext_lazy as _ SECRET_KEY =", "must be set to the same as your # system", "a # trailing slash. # Examples: \"http://media.lawrence.com/media/\", \"http://example.com/media/\" MEDIA_URL =", "# fabfile, where setting the dynamic settings below isn't strictly", "*\", we use exec so that # local_settings has full", "SETTINGS # #################### # set_dynamic_settings() will rewrite globals based on", "where any custom order # processing should be implemented. #", "of positional # args and a dictionary of keyword args,", "# Example: \"/home/media/media.lawrence.com/static/\" STATIC_ROOT = os.path.join(PROJECT_ROOT, STATIC_URL.strip(\"/\")) # URL that", "Django to use the same # timezone as the operating", "far, in order to provide some better defaults where #", "class. # {\"blank\": True, \"upload_to\": \"blog\"}, # ), # #", "True # Language code for this installation. All choices can", "= module exec(open(f, \"rb\").read()) #################### # DYNAMIC SETTINGS # ####################", "empty string is # used, will fall back to the", "local_settings.py which should be # ignored in your version control", "the settings documentation for a full list # of settings", "# ###################### # The following settings are already defined with", "The numeric mode to set newly-uploaded files to. The value", "# # Positional args for field class. # (_(\"Image\"),), #", "# SHOP_HANDLER_BILLING_SHIPPING = \\ # \"cartridge.shop.checkout.default_billship_handler\" # Dotted package path", "# A sequence of fields that will be injected into", "apps' \"static/\" subdirectories and in STATICFILES_DIRS. # Example: \"/home/media/media.lawrence.com/static/\" STATIC_ROOT", "for types of product options, # eg Size, Colour. NOTE:", "doesn't appear in this setting, all pages will appear in", "in local_settings.py DEBUG = True # Whether a user's session", "settings are already defined in cartridge.shop.defaults # with default values,", "order is successful and all of the order # object's", "indexes from the SHOP_OPTION_TYPE_CHOICES setting that # control how the", "featured images for blog posts. Defaults to False. # #", "Please consult the settings # documentation for a full list", "(\"comment_tags.recent_comments\",), # (\"mezzanine_tags.recent_actions\",), # ) # A sequence of templates", "to os.chmod. FILE_UPLOAD_PERMISSIONS = 0o644 ############# # DATABASES # #############", "all of the order # object's data has been created.", "set to ``True``, stack traces # are displayed for error", "name\"),), # {\"blank\": True, \"default\": 1}, # ), # )", "be set to ``False`` in # production. Best set to", "class to use for # the field. The third and", "), # # Example of adding a field to *all*", "name to import the root urlpatterns from for the project.", "set to ``True`` in local_settings.py DEBUG = True # Whether", "# MAIN DJANGO SETTINGS # ######################## # Hosts/domain names that", "been # defined so far, in order to provide some", "template path. # These templates are then available for selection", "files # in apps' \"static/\" subdirectories and in STATICFILES_DIRS. #", "MEDIA_ROOT = os.path.join(PROJECT_ROOT, *MEDIA_URL.strip(\"/\").split(\"/\")) # Package/module name to import the", "\"DIRS\": [ os.path.join(PROJECT_ROOT, \"templates\") ], \"OPTIONS\": { \"context_processors\": [ \"django.contrib.auth.context_processors.auth\",", "'django.db.backends.sqlite3', 'NAME': 'db.dev', } } ######### # PATHS # #########", "locale. SHOP_CURRENCY_LOCALE = \"en_GB.UTF-8\" # Dotted package path and name", "# URL that handles the media served from MEDIA_ROOT. Make", "# eg for \"Colour\" then \"Size\" given the above: #", "is a three item sequence, containing a unique ID #", "full list of settings Cartridge implements: # http://cartridge.jupo.org/configuration.html#default-settings # Sequence", "###################### # CARTRIDGE SETTINGS # ###################### # The following settings", "args for field class. # {\"blank\": True, \"upload_to\": \"blog\"}, #", "#################### # set_dynamic_settings() will rewrite globals based on what has", "to be imported # without Mezzanine installed, as the case", "# Keyword args for field class. # {\"blank\": True, \"upload_to\":", "and its field # name to be added, and the", "\"rb\").read()) #################### # DYNAMIC SETTINGS # #################### # set_dynamic_settings() will", "False # See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts ALLOWED_HOSTS = ['*'] # Local time", "the ordering and grouping of the admin menu. # #", "not all choices may be available on all operating systems.", "\"Size\" given the above: # SHOP_OPTION_ADMIN_ORDER = (2, 1) ######################", "will make some optimizations so as not # to load", "PACKAGE_NAME_GRAPPELLI = \"grappelli_safe\" ######################### # OPTIONAL APPLICATIONS # ######################### #", "\"Colour\" then \"Size\" given the above: # SHOP_OPTION_ADMIN_ORDER = (2,", "same as your # system time zone. TIME_ZONE = 'UTC'", "Defaults to False. # SHOP_CATEGORY_USE_FEATURED_IMAGE = True # If True,", "without Mezzanine installed, as the case may be when using", "#################### # DYNAMIC SETTINGS # #################### # set_dynamic_settings() will rewrite", "values in # the ``defaults.py`` module within each of Mezzanine's", "(_(\"Media Library\"), \"media-library\"),)), # (_(\"Shop\"), (\"shop.Product\", \"shop.ProductOption\", \"shop.DiscountCode\", # \"shop.Sale\",", "Defaults to False. # # BLOG_USE_FEATURED_IMAGE = True # If", "sequence of positional # args and a dictionary of keyword", "# Dotted path to field. # \"mezzanine.blog.models.BlogPost.image\", # # Dotted", "so far, in order to provide some better defaults where", "= ( # (1, \"Unprocessed\"), # (2, \"Processed\"), # )", "sequence, each containing a sequence of template tags # used", "this settings module to be imported # without Mezzanine installed,", "the # INSTALLED_APPS setting. USE_MODELTRANSLATION = False ######################## # MAIN", "``INSTALLED_APPS``, only if available. OPTIONAL_APPS = ( \"debug_toolbar\", \"django_extensions\", \"compressor\",", "library. If an empty string is # used, will fall", "These will be added to ``INSTALLED_APPS``, only if available. OPTIONAL_APPS", "True, the checkout process has a final confirmation step before", "\"mezzanine.blog.models.BlogPost.image\", # # Dotted path to field class. # \"somelib.fields.ImageField\",", "to the locale # module in the python standard library.", "function that # is called on submit of the billing/shipping", "sequence, containing a unique ID # for the template, a", "the admin, # eg for \"Colour\" then \"Size\" given the", "# (1, _(\"Top navigation bar\"), \"pages/menus/dropdown.html\"), # (2, _(\"Left-hand tree\"),", "\"mezzanine.core.middleware.FetchFromCacheMiddleware\", 'corsheaders.middleware.CorsMiddleware', ) if DJANGO_VERSION < (1, 10): MIDDLEWARE_CLASSES =", "store your static files # in apps' \"static/\" subdirectories and", "credit card types for payment. # SHOP_CARD_TYPES = (\"Mastercard\", \"Visa\",", "optimizations so as not # to load the internationalization machinery.", "\"http://example.com/media/\" MEDIA_URL = STATIC_URL + \"media/\" # Absolute filesystem path", "you'd pass directly to os.chmod. FILE_UPLOAD_PERMISSIONS = 0o644 ############# #", "classes to use. Order is important; in the request phase,", "to False, Django will make some optimizations so as not", "and all of the order # object's data has been", "template tag. Each # item in the sequence is a", "# is called once an order is successful and all", "moment we are using custom forks of them. PACKAGE_NAME_FILEBROWSER =", "in # the ``defaults.py`` module within each of Mezzanine's apps,", "has full access to everything defined in this module. #", "found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may", "turns on/off debug mode. When set to ``True``, stack traces", "imp.new_module(module_name) module.__file__ = f sys.modules[module_name] = module exec(open(f, \"rb\").read()) ####################", "system allowing for settings to be # defined per machine.", "for field class. # {\"blank\": True, \"upload_to\": \"blog\"}, # ),", "product options, # eg Size, Colour. NOTE: Increasing the number", "once an order is successful and all of the order", "TEMPLATES = [ { \"BACKEND\": \"django.template.backends.django.DjangoTemplates\", \"DIRS\": [ os.path.join(PROJECT_ROOT, \"templates\")", "# 'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', \"cartridge.shop.middleware.ShopMiddleware\", \"mezzanine.core.request.CurrentRequestMiddleware\",", "USE_TZ = True # Language code for this installation. All", "also allow this settings module to be imported # without", "BLOG_USE_FEATURED_IMAGE = True # If True, the django-modeltranslation will be", "be imported # without Mezzanine installed, as the case may", "USE_MODELTRANSLATION = False ######################## # MAIN DJANGO SETTINGS # ########################", "TIME_ZONE = 'UTC' # If you set this to True,", "for this installation. All choices can be found here: #", "posts. Defaults to False. # # BLOG_USE_FEATURED_IMAGE = True #", "Local time zone for this installation. Choices can be found", "AUTHENTICATION_BACKENDS = (\"mezzanine.core.auth_backends.MezzanineBackend\",) # The numeric mode to set newly-uploaded", "# is where shipping calculation can be performed and set", "here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be", "package path and name of the function that # is", "specifying the field class, the path # ``django.models.db.`` can be", "( ('en', _('English')), ) # A boolean that turns on/off", "SHOP_CURRENCY_LOCALE = \"en_GB.UTF-8\" # Dotted package path and name of", "absolute_import, unicode_literals import os from django import VERSION as DJANGO_VERSION", "path to field class. # \"somelib.fields.ImageField\", # # Positional args", "user's session cookie expires when the Web browser is closed.", "module to be imported # without Mezzanine installed, as the", "], \"loaders\": [ \"mezzanine.template.loaders.host_themes.Loader\", \"django.template.loaders.filesystem.Loader\", \"django.template.loaders.app_directories.Loader\", ], }, }, ]", "# is called on submit of the billing/shipping checkout step.", "# If you set this to False, Django will make", "here we set it to # the name of the", "# ######### # Full filesystem path to the project. PROJECT_APP_PATH", "yourself; store your static files # in apps' \"static/\" subdirectories", "Increasing the number of these will # require database migrations!", "######### # Full filesystem path to the project. PROJECT_APP_PATH =", "Positional args for field class. # (_(\"Image\"),), # # Keyword", "on featured images for shop categories. Defaults to False. #", "'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', \"cartridge.shop.middleware.ShopMiddleware\", \"mezzanine.core.request.CurrentRequestMiddleware\", \"mezzanine.core.middleware.RedirectFallbackMiddleware\", \"mezzanine.core.middleware.AdminLoginInterfaceSelectorMiddleware\", \"mezzanine.core.middleware.SitePermissionMiddleware\", \"mezzanine.pages.middleware.PageMiddleware\", \"mezzanine.core.middleware.FetchFromCacheMiddleware\", 'corsheaders.middleware.CorsMiddleware',", "zone. TIME_ZONE = 'UTC' # If you set this to", "\"en\" # Supported languages LANGUAGES = ( ('en', _('English')), )", "= os.path.basename(PROJECT_APP_PATH) PROJECT_ROOT = BASE_DIR = os.path.dirname(PROJECT_APP_PATH) # Every cache", "\"mezzanine.blog\", \"mezzanine.forms\", \"mezzanine.galleries\", \"mezzanine.twitter\", # \"mezzanine.accounts\", 'corsheaders', 'rest_framework', 'rest_framework_api_key', 'drf_yasg',", "_('English')), ) # A boolean that turns on/off debug mode.", "os.chmod. FILE_UPLOAD_PERMISSIONS = 0o644 ############# # DATABASES # ############# DATABASES", "# ) # Sequence of indexes from the SHOP_OPTION_TYPE_CHOICES setting", "process is split into separate # billing/shipping and payment steps.", "SHOP_OPTION_TYPE_CHOICES setting that # control how the options should be", "( # ( # # Dotted path to field. #", "time zone. TIME_ZONE = 'UTC' # If you set this", "full access to everything defined in this module. # Also", "Best set to ``True`` in local_settings.py DEBUG = True #", "A sequence of templates used by the ``page_menu`` template tag.", "# The following settings are already defined with default values", "below isn't strictly # required. try: from mezzanine.utils.conf import set_dynamic_settings", "be implemented. # SHOP_HANDLER_ORDER = \"cartridge.shop.checkout.default_order_handler\" # Dotted package path", "1}, # ), # ) # Setting to turn on", "\"shop.DiscountCode\", # \"shop.Sale\", \"shop.Order\")), # (\"Site\", (\"sites.Site\", \"redirects.Redirect\", \"conf.Setting\")), #", "adding a field to *all* of Mezzanine's content types: #", "the root urlpatterns from for the project. ROOT_URLCONF = \"%s.urls\"", "the operating system. # If running in a Windows environment", "# Store these package names here as they may change", "- here we set it to # the name of", "# ) # A sequence of templates used by the", "\"django.contrib.contenttypes\", \"django.contrib.redirects\", \"django.contrib.sessions\", \"django.contrib.sites\", \"django.contrib.sitemaps\", \"django.contrib.staticfiles\", \"mezzanine.boot\", \"mezzanine.conf\", \"mezzanine.core\", \"mezzanine.generic\",", "types: # ( # \"mezzanine.pages.models.Page.another_field\", # \"IntegerField\", # 'django.db.models.' is", "= os.path.join(PROJECT_APP_PATH, \"local_settings.py\") if os.path.exists(f): import sys import imp module_name", "custom order # processing should be implemented. # SHOP_HANDLER_ORDER =", "fall back to the system's locale. SHOP_CURRENCY_LOCALE = \"en_GB.UTF-8\" #", "= (\"mezzanine.core.auth_backends.MezzanineBackend\",) # The numeric mode to set newly-uploaded files", "# # Dotted path to field class. # \"somelib.fields.ImageField\", #", "how the options should be ordered in the admin, #", "to try and use something # project specific. CACHE_MIDDLEWARE_KEY_PREFIX =", "path to the model and its field # name to", "# Language code for this installation. All choices can be", "'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'db.dev', } } ######### #", "consult the settings # documentation for a full list of", "# http://mezzanine.jupo.org/docs/configuration.html#default-settings # Controls the ordering and grouping of the", "Language code for this installation. All choices can be found", "# Sequence of available credit card types for payment. #", "of monetary values accord to the locale # module in", "# object's data has been created. This is where any", "# (2, \"Processed\"), # ) # Sequence of value/name pairs", "True # Controls the formatting of monetary values accord to", "handles the media served from MEDIA_ROOT. Make sure to use", "item in the sequence is a three item sequence, containing", "SHOP_HANDLER_BILLING_SHIPPING = \\ # \"cartridge.shop.checkout.default_billship_handler\" # Dotted package path and", "closed. SESSION_EXPIRE_AT_BROWSER_CLOSE = True SITE_ID = 1 # If you", "\"media-library\"),)), # (_(\"Shop\"), (\"shop.Product\", \"shop.ProductOption\", \"shop.DiscountCode\", # \"shop.Sale\", \"shop.Order\")), #", "injected into Mezzanine's (or any # library's) models. Each item", "\"generic.ThreadedComment\", (_(\"Media Library\"), \"media-library\"),)), # (_(\"Shop\"), (\"shop.Product\", \"shop.ProductOption\", \"shop.DiscountCode\", #", "# at the moment we are using custom forks of", "and in STATICFILES_DIRS. # Example: \"/home/media/media.lawrence.com/static/\" STATIC_ROOT = os.path.join(PROJECT_ROOT, STATIC_URL.strip(\"/\"))", "the dynamic settings below isn't strictly # required. try: from", "If you set this to True, Django will use timezone-aware", "The following settings are already defined in cartridge.shop.defaults # with", "unicode_literals import os from django import VERSION as DJANGO_VERSION from", ") # A three item sequence, each containing a sequence", "STATIC_URL.strip(\"/\")) # URL that handles the media served from MEDIA_ROOT.", "= \"grappelli_safe\" ######################### # OPTIONAL APPLICATIONS # ######################### # These", "################ INSTALLED_APPS = ( \"django.contrib.admin\", \"django.contrib.auth\", \"django.contrib.contenttypes\", \"django.contrib.redirects\", \"django.contrib.sessions\", \"django.contrib.sites\",", "# SHOP_HANDLER_ORDER = \"cartridge.shop.checkout.default_order_handler\" # Dotted package path and name", "# BLOG_USE_FEATURED_IMAGE = True # If True, the django-modeltranslation will", "in a Windows environment this must be set to the", "timezone as the operating system. # If running in a", "# Sequence of indexes from the SHOP_OPTION_TYPE_CHOICES setting that #", "= True # If True, the checkout process has a", "= False ######################## # MAIN DJANGO SETTINGS # ######################## #", "\"django_extensions\", \"compressor\", PACKAGE_NAME_FILEBROWSER, PACKAGE_NAME_GRAPPELLI, ) ################## # LOCAL SETTINGS #", "\"mezzanine.pages.context_processors.page\", ], \"builtins\": [ \"mezzanine.template.loader_tags\", ], \"loaders\": [ \"mezzanine.template.loaders.host_themes.Loader\", \"django.template.loaders.filesystem.Loader\",", "sys.modules[module_name] = module exec(open(f, \"rb\").read()) #################### # DYNAMIC SETTINGS #", "to load the internationalization machinery. USE_I18N = False AUTHENTICATION_BACKENDS =", "# (_(\"Another name\"),), # {\"blank\": True, \"default\": 1}, # ),", "'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', \"cartridge.shop.middleware.ShopMiddleware\", \"mezzanine.core.request.CurrentRequestMiddleware\", \"mezzanine.core.middleware.RedirectFallbackMiddleware\", \"mezzanine.core.middleware.AdminLoginInterfaceSelectorMiddleware\",", "that are valid for this site; required if DEBUG is", "os.path.join(PROJECT_ROOT, STATIC_URL.strip(\"/\")) # URL that handles the media served from", "use. Order is important; in the request phase, # these", "these will # require database migrations! # SHOP_OPTION_TYPE_CHOICES = (", "\"mezzanine.core\", \"mezzanine.generic\", \"mezzanine.pages\", \"cartridge.shop\", \"mezzanine.blog\", \"mezzanine.forms\", \"mezzanine.galleries\", \"mezzanine.twitter\", # \"mezzanine.accounts\",", "True # If True, the django-modeltranslation will be added to", "any # library's) models. Each item in the sequence is", "MIDDLEWARE = ( \"mezzanine.core.middleware.UpdateCacheMiddleware\", 'django.contrib.sessions.middleware.SessionMiddleware', # Uncomment if using internationalisation", "# A sequence of templates used by the ``page_menu`` template", ") # List of middleware classes to use. Order is", "be defined in local_settings.py which should be # ignored in", "for payment. # SHOP_CARD_TYPES = (\"Mastercard\", \"Visa\", \"Diners\", \"Amex\") #", "python standard library. If an empty string is # used,", "admin menu. # # ADMIN_MENU_ORDER = ( # (\"Content\", (\"pages.Page\",", "field. # \"mezzanine.blog.models.BlogPost.image\", # # Dotted path to field class.", "internationalization machinery. USE_I18N = False AUTHENTICATION_BACKENDS = (\"mezzanine.core.auth_backends.MezzanineBackend\",) # The", "Django model fields. # # EXTRA_MODEL_FIELDS = ( # (", "use timezone-aware datetimes. USE_TZ = True # Language code for", "same # timezone as the operating system. # If running", "\"mezzanine.core.middleware.RedirectFallbackMiddleware\", \"mezzanine.core.middleware.AdminLoginInterfaceSelectorMiddleware\", \"mezzanine.core.middleware.SitePermissionMiddleware\", \"mezzanine.pages.middleware.PageMiddleware\", \"mezzanine.core.middleware.FetchFromCacheMiddleware\", 'corsheaders.middleware.CorsMiddleware', ) if DJANGO_VERSION <", "name of the directory the project is in to try", "the dotted path to the field class to use for", "CACHE_MIDDLEWARE_KEY_PREFIX = PROJECT_APP # URL prefix for static files. #", "# \"IntegerField\", # 'django.db.models.' is implied if path is omitted.", "that # local_settings has full access to everything defined in", "that handles the media served from MEDIA_ROOT. Make sure to", "order given, and in the # response phase the middleware", "# (1, \"Size\"), # (2, \"Colour\"), # ) # Sequence", "for \"Colour\" then \"Size\" given the above: # SHOP_OPTION_ADMIN_ORDER =", "to True, Django will use timezone-aware datetimes. USE_TZ = True", "(_(\"Another name\"),), # {\"blank\": True, \"default\": 1}, # ), #", "the model and its field # name to be added,", "be added to ``INSTALLED_APPS``, only if available. OPTIONAL_APPS = (", "(\"Users\", (\"auth.User\", \"auth.Group\",)), # ) # A three item sequence,", "is where shipping calculation can be performed and set using", "('en', _('English')), ) # A boolean that turns on/off debug", "'django.db.models.' is implied if path is omitted. # (_(\"Another name\"),),", "# local_settings has full access to everything defined in this", "\"static/\" subdirectories and in STATICFILES_DIRS. # Example: \"/home/media/media.lawrence.com/static/\" STATIC_ROOT =", "# field instance. When specifying the field class, the path", "as the case may be when using the # fabfile,", "# Setting to turn on featured images for blog posts.", "module within each of Mezzanine's apps, but are # common", "= os.path.join(PROJECT_ROOT, *MEDIA_URL.strip(\"/\").split(\"/\")) # Package/module name to import the root", "provide some better defaults where # applicable. We also allow", "of None will cause Django to use the same #", "object's data has been created. This is where any custom", "names that are valid for this site; required if DEBUG", "confirmation step before # completion. # SHOP_CHECKOUT_STEPS_CONFIRMATION = True #", "# applicable. We also allow this settings module to be", "# If True, the checkout process has a final confirmation", "# SHOP_CHECKOUT_STEPS_CONFIRMATION = True # Controls the formatting of monetary", "used by the ``page_menu`` template tag. Each # item in", "phase the middleware will be applied in reverse order. MIDDLEWARE", "to be put here, commented out, for conveniently # overriding.", "a field to *all* of Mezzanine's content types: # (", "module in the python standard library. If an empty string", "in this directory yourself; store your static files # in", "LANGUAGE_CODE = \"en\" # Supported languages LANGUAGES = ( ('en',", "SHOP_CARD_TYPES = (\"Mastercard\", \"Visa\", \"Diners\", \"Amex\") # Setting to turn", "reverse order. MIDDLEWARE = ( \"mezzanine.core.middleware.UpdateCacheMiddleware\", 'django.contrib.sessions.middleware.SessionMiddleware', # Uncomment if", "DJANGO_VERSION < (1, 10): MIDDLEWARE_CLASSES = MIDDLEWARE del MIDDLEWARE #", "to False. # SHOP_CATEGORY_USE_FEATURED_IMAGE = True # If True, the", "# (_(\"Shop\"), (\"shop.Product\", \"shop.ProductOption\", \"shop.DiscountCode\", # \"shop.Sale\", \"shop.Order\")), # (\"Site\",", "and fourth items are a sequence of positional # args", "of adding a field to *all* of Mezzanine's content types:", "package names here as they may change in the future", "# name to be added, and the dotted path to", "into Mezzanine's (or any # library's) models. Each item in", "MIDDLEWARE_CLASSES = MIDDLEWARE del MIDDLEWARE # Store these package names", "# CARTRIDGE SETTINGS # ###################### # The following settings are", "'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', \"cartridge.shop.middleware.ShopMiddleware\", \"mezzanine.core.request.CurrentRequestMiddleware\", \"mezzanine.core.middleware.RedirectFallbackMiddleware\", \"mezzanine.core.middleware.AdminLoginInterfaceSelectorMiddleware\", \"mezzanine.core.middleware.SitePermissionMiddleware\", \"mezzanine.pages.middleware.PageMiddleware\", \"mezzanine.core.middleware.FetchFromCacheMiddleware\",", "is called on submit of the billing/shipping checkout step. This", "LANGUAGES = ( ('en', _('English')), ) # A boolean that", "args and a dictionary of keyword args, to use when", "the field class, the path # ``django.models.db.`` can be omitted", "defined in this module. # Also force into sys.modules so", "sequence of template tags # used to render the admin", "# ######################### # These will be added to ``INSTALLED_APPS``, only", "be added, and the dotted path to the field class", "to be put here, commented # out, for conveniently overriding.", "MEDIA_ROOT. Make sure to use a # trailing slash. #", "# EXTRA_MODEL_FIELDS = ( # ( # # Dotted path", "the sequence is a four item sequence. # The first", "of product options, # eg Size, Colour. NOTE: Increasing the", "some optimizations so as not # to load the internationalization", "be applied in reverse order. MIDDLEWARE = ( \"mezzanine.core.middleware.UpdateCacheMiddleware\", 'django.contrib.sessions.middleware.SessionMiddleware',", "# for the template, a label for the template, and", "imported # without Mezzanine installed, as the case may be", "(2, \"Processed\"), # ) # Sequence of value/name pairs for", "of indexes from the SHOP_OPTION_TYPE_CHOICES setting that # control how", "appear in. Note that if a menu template is used", "in the # response phase the middleware will be applied", "is in to try and use something # project specific.", "# Instead of doing \"from .local_settings import *\", we use", "found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = \"en\" # Supported languages", "Whether a user's session cookie expires when the Web browser", "the directory the project is in to try and use", "should be implemented. # SHOP_HANDLER_ORDER = \"cartridge.shop.checkout.default_order_handler\" # Dotted package", "featured images for shop categories. Defaults to False. # SHOP_CATEGORY_USE_FEATURED_IMAGE", "Order is important; in the request phase, # these middleware", "# (\"Content\", (\"pages.Page\", \"blog.BlogPost\", # \"generic.ThreadedComment\", (_(\"Media Library\"), \"media-library\"),)), #", "_(\"Top navigation bar\"), \"pages/menus/dropdown.html\"), # (2, _(\"Left-hand tree\"), \"pages/menus/tree.html\"), #", "function that # is called on submit of the payment", "of the function that # is called once an order", "\"auth.Group\",)), # ) # A three item sequence, each containing", "models. Each item in the sequence is a four item", "################## # Allow any settings to be defined in local_settings.py", "model and its field # name to be added, and", "# These templates are then available for selection when editing", ") # A sequence of templates used by the ``page_menu``", "steps. # SHOP_CHECKOUT_STEPS_SPLIT = True # If True, the checkout", "# library's) models. Each item in the sequence is a", "commented out, for conveniently # overriding. Please consult the settings", "# used to render the admin dashboard. # # DASHBOARD_TAGS", "# # DASHBOARD_TAGS = ( # (\"blog_tags.quick_blog\", \"mezzanine_tags.app_list\"), # (\"comment_tags.recent_comments\",),", "model fields. # # EXTRA_MODEL_FIELDS = ( # ( #", "(\"Mastercard\", \"Visa\", \"Diners\", \"Amex\") # Setting to turn on featured", "(\"pages.Page\", \"blog.BlogPost\", # \"generic.ThreadedComment\", (_(\"Media Library\"), \"media-library\"),)), # (_(\"Shop\"), (\"shop.Product\",", "globals based on what has been # defined so far,", "billing/shipping checkout step. This # is where shipping calculation can", "STATIC_URL + \"media/\" # Absolute filesystem path to the directory", "directly to os.chmod. FILE_UPLOAD_PERMISSIONS = 0o644 ############# # DATABASES #", "``True`` in local_settings.py DEBUG = True # Whether a user's", "When set to ``True``, stack traces # are displayed for", "\"django.contrib.sitemaps\", \"django.contrib.staticfiles\", \"mezzanine.boot\", \"mezzanine.conf\", \"mezzanine.core\", \"mezzanine.generic\", \"mezzanine.pages\", \"cartridge.shop\", \"mezzanine.blog\", \"mezzanine.forms\",", "implied if path is omitted. # (_(\"Another name\"),), # {\"blank\":", "here, commented # out, for conveniently overriding. Please consult the", "defined in local_settings.py which should be # ignored in your", "http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be available on", "# ################ INSTALLED_APPS = ( \"django.contrib.admin\", \"django.contrib.auth\", \"django.contrib.contenttypes\", \"django.contrib.redirects\", \"django.contrib.sessions\",", "# ( # # Dotted path to field. # \"mezzanine.blog.models.BlogPost.image\",", "DEBUG is False # See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts ALLOWED_HOSTS = ['*'] #", "better defaults where # applicable. We also allow this settings", "= True # Whether a user's session cookie expires when", "# Controls the formatting of monetary values accord to the", "template is used # that doesn't appear in this setting,", "Dotted path to field. # \"mezzanine.blog.models.BlogPost.image\", # # Dotted path", "that # is called once an order is successful and", "], }, }, ] if DJANGO_VERSION < (1, 9): del", "this module. # Also force into sys.modules so it's visible", "_(\"Footer\"), \"pages/menus/footer.html\"), # ) # A sequence of fields that", "bar\"), \"pages/menus/dropdown.html\"), # (2, _(\"Left-hand tree\"), \"pages/menus/tree.html\"), # (3, _(\"Footer\"),", "# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be available", "common enough to be put here, commented # out, for", "Keyword args for field class. # {\"blank\": True, \"upload_to\": \"blog\"},", "On Unix systems, a value of None will cause Django", "# args and a dictionary of keyword args, to use", "item sequence, containing a unique ID # for the template,", "PATHS # ######### # Full filesystem path to the project.", "control system allowing for settings to be # defined per", "load the internationalization machinery. USE_I18N = False AUTHENTICATION_BACKENDS = (\"mezzanine.core.auth_backends.MezzanineBackend\",)", "from django.utils.translation import ugettext_lazy as _ SECRET_KEY = <KEY>' ######################", "# although not all choices may be available on all", "os.path.dirname(os.path.abspath(__file__)) PROJECT_APP = os.path.basename(PROJECT_APP_PATH) PROJECT_ROOT = BASE_DIR = os.path.dirname(PROJECT_APP_PATH) #", "the admin menu. # # ADMIN_MENU_ORDER = ( # (\"Content\",", "\"debug_toolbar\", \"django_extensions\", \"compressor\", PACKAGE_NAME_FILEBROWSER, PACKAGE_NAME_GRAPPELLI, ) ################## # LOCAL SETTINGS", "settings to be # defined per machine. # Instead of", "path. # These templates are then available for selection when", "Sequence of value/name pairs for types of product options, #", "True # If True, the checkout process has a final", "where setting the dynamic settings below isn't strictly # required.", "= True # If True, the django-modeltranslation will be added", "del MIDDLEWARE # Store these package names here as they", "use the same # timezone as the operating system. #", "# These will be added to ``INSTALLED_APPS``, only if available.", "\"mezzanine.boot\", \"mezzanine.conf\", \"mezzanine.core\", \"mezzanine.generic\", \"mezzanine.pages\", \"cartridge.shop\", \"mezzanine.blog\", \"mezzanine.forms\", \"mezzanine.galleries\", \"mezzanine.twitter\",", "enough to be put here, commented out, for conveniently #", "Controls the formatting of monetary values accord to the locale", "here, commented out, for conveniently # overriding. Please consult the", "PROJECT_APP_PATH = os.path.dirname(os.path.abspath(__file__)) PROJECT_APP = os.path.basename(PROJECT_APP_PATH) PROJECT_ROOT = BASE_DIR =", "module. # Also force into sys.modules so it's visible to", "may change in the future since # at the moment", "path to the directory static files should be collected to.", "'django.middleware.clickjacking.XFrameOptionsMiddleware', \"cartridge.shop.middleware.ShopMiddleware\", \"mezzanine.core.request.CurrentRequestMiddleware\", \"mezzanine.core.middleware.RedirectFallbackMiddleware\", \"mezzanine.core.middleware.AdminLoginInterfaceSelectorMiddleware\", \"mezzanine.core.middleware.SitePermissionMiddleware\", \"mezzanine.pages.middleware.PageMiddleware\", \"mezzanine.core.middleware.FetchFromCacheMiddleware\", 'corsheaders.middleware.CorsMiddleware', )", "( # (\"blog_tags.quick_blog\", \"mezzanine_tags.app_list\"), # (\"comment_tags.recent_comments\",), # (\"mezzanine_tags.recent_actions\",), # )", "all choices may be available on all operating systems. #", "\"cartridge.shop\", \"mezzanine.blog\", \"mezzanine.forms\", \"mezzanine.galleries\", \"mezzanine.twitter\", # \"mezzanine.accounts\", 'corsheaders', 'rest_framework', 'rest_framework_api_key',", "it. # PAGE_MENU_TEMPLATES = ( # (1, _(\"Top navigation bar\"),", "PROJECT_APP TEMPLATES = [ { \"BACKEND\": \"django.template.backends.django.DjangoTemplates\", \"DIRS\": [ os.path.join(PROJECT_ROOT,", "# documentation for a full list of settings Cartridge implements:", "# 'oauth2_provider', # 'rest_framework.authtoken', 'mezzanine_cartridge_api', ) # List of middleware", "unique ID # for the template, a label for the", "# SHOP_OPTION_ADMIN_ORDER = (2, 1) ###################### # MEZZANINE SETTINGS #", "of the order # object's data has been created. This", "four item sequence. # The first two items are the", "# # EXTRA_MODEL_FIELDS = ( # ( # # Dotted", "when editing which # menus a page should appear in.", "MIDDLEWARE del MIDDLEWARE # Store these package names here as", "Hosts/domain names that are valid for this site; required if", "[ \"mezzanine.template.loaders.host_themes.Loader\", \"django.template.loaders.filesystem.Loader\", \"django.template.loaders.app_directories.Loader\", ], }, }, ] if DJANGO_VERSION", "\"Size\"), # (2, \"Colour\"), # ) # Sequence of indexes", "will be added to ``INSTALLED_APPS``, only if available. OPTIONAL_APPS =", "eg Size, Colour. NOTE: Increasing the number of these will", "of Mezzanine's apps, but are # common enough to be", "visible to Django's autoreload. f = os.path.join(PROJECT_APP_PATH, \"local_settings.py\") if os.path.exists(f):", "should be # a mode you'd pass directly to os.chmod.", "formatting of monetary values accord to the locale # module", "in this module. # Also force into sys.modules so it's", "is a four item sequence. # The first two items", "``True``, stack traces # are displayed for error pages. Should", "\"django.template.backends.django.DjangoTemplates\", \"DIRS\": [ os.path.join(PROJECT_ROOT, \"templates\") ], \"OPTIONS\": { \"context_processors\": [", "files to. The value should be # a mode you'd", "[ os.path.join(PROJECT_ROOT, \"templates\") ], \"OPTIONS\": { \"context_processors\": [ \"django.contrib.auth.context_processors.auth\", \"django.contrib.messages.context_processors.messages\",", "statuses. # SHOP_ORDER_STATUS_CHOICES = ( # (1, \"Unprocessed\"), # (2,", ") # Sequence of indexes from the SHOP_OPTION_TYPE_CHOICES setting that", "will be applied in reverse order. MIDDLEWARE = ( \"mezzanine.core.middleware.UpdateCacheMiddleware\",", "following settings are already defined with default values in #", "in reverse order. MIDDLEWARE = ( \"mezzanine.core.middleware.UpdateCacheMiddleware\", 'django.contrib.sessions.middleware.SessionMiddleware', # Uncomment", "class. # \"somelib.fields.ImageField\", # # Positional args for field class.", "use for # the field. The third and fourth items", "be available on all operating systems. # On Unix systems,", "path to the project. PROJECT_APP_PATH = os.path.dirname(os.path.abspath(__file__)) PROJECT_APP = os.path.basename(PROJECT_APP_PATH)", "= (\"Mastercard\", \"Visa\", \"Diners\", \"Amex\") # Setting to turn on", "this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name #", "applied in the order given, and in the # response", "project. ROOT_URLCONF = \"%s.urls\" % PROJECT_APP TEMPLATES = [ {", "import sys import imp module_name = \"%s.local_settings\" % PROJECT_APP module", "Each item in the sequence is a four item sequence.", "will cause Django to use the same # timezone as", "MAIN DJANGO SETTINGS # ######################## # Hosts/domain names that are", "name to be added, and the dotted path to the", "# a mode you'd pass directly to os.chmod. FILE_UPLOAD_PERMISSIONS =", "1 # If you set this to False, Django will", "it to # the name of the directory the project", "filesystem path to the project. PROJECT_APP_PATH = os.path.dirname(os.path.abspath(__file__)) PROJECT_APP =", "# \"shop.Sale\", \"shop.Order\")), # (\"Site\", (\"sites.Site\", \"redirects.Redirect\", \"conf.Setting\")), # (\"Users\",", "all operating systems. # On Unix systems, a value of", "\"mezzanine.core.middleware.SitePermissionMiddleware\", \"mezzanine.pages.middleware.PageMiddleware\", \"mezzanine.core.middleware.FetchFromCacheMiddleware\", 'corsheaders.middleware.CorsMiddleware', ) if DJANGO_VERSION < (1, 10):", "anything in this directory yourself; store your static files #", "sequence is a four item sequence. # The first two", "be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices", "process has a final confirmation step before # completion. #", "None will cause Django to use the same # timezone", "a payment gateway should be implemented. # SHOP_HANDLER_PAYMENT = \"cartridge.shop.checkout.default_payment_handler\"", "Package/module name to import the root urlpatterns from for the", "# Dotted path to field class. # \"somelib.fields.ImageField\", # #", "sequence. # The first two items are the dotted path", "machinery. USE_I18N = False AUTHENTICATION_BACKENDS = (\"mezzanine.core.auth_backends.MezzanineBackend\",) # The numeric", "of settings Mezzanine implements: # http://mezzanine.jupo.org/docs/configuration.html#default-settings # Controls the ordering", "setting the dynamic settings below isn't strictly # required. try:", "an empty string is # used, will fall back to", "migrations! # SHOP_OPTION_TYPE_CHOICES = ( # (1, \"Size\"), # (2,", "\"mezzanine.pages.middleware.PageMiddleware\", \"mezzanine.core.middleware.FetchFromCacheMiddleware\", 'corsheaders.middleware.CorsMiddleware', ) if DJANGO_VERSION < (1, 10): MIDDLEWARE_CLASSES", "the project is in to try and use something #", "sequence is a three item sequence, containing a unique ID", "\"pages/menus/tree.html\"), # (3, _(\"Footer\"), \"pages/menus/footer.html\"), # ) # A sequence", "defined in cartridge.shop.defaults # with default values, but are common", "= <KEY>' ###################### # CARTRIDGE SETTINGS # ###################### # The", "\"Unprocessed\"), # (2, \"Processed\"), # ) # Sequence of value/name", "f = os.path.join(PROJECT_APP_PATH, \"local_settings.py\") if os.path.exists(f): import sys import imp", "be when using the # fabfile, where setting the dynamic", "machine. # Instead of doing \"from .local_settings import *\", we", "menu template is used # that doesn't appear in this", "10): MIDDLEWARE_CLASSES = MIDDLEWARE del MIDDLEWARE # Store these package", "locale # module in the python standard library. If an", "# the ``defaults.py`` module within each of Mezzanine's apps, but", "a Windows environment this must be set to the same", "fields that will be injected into Mezzanine's (or any #", "# ( # \"mezzanine.pages.models.Page.another_field\", # \"IntegerField\", # 'django.db.models.' is implied", "= \"%s.urls\" % PROJECT_APP TEMPLATES = [ { \"BACKEND\": \"django.template.backends.django.DjangoTemplates\",", "# {\"blank\": True, \"upload_to\": \"blog\"}, # ), # # Example", "# Example of adding a field to *all* of Mezzanine's", "Don't put anything in this directory yourself; store your static", "used, will fall back to the system's locale. SHOP_CURRENCY_LOCALE =", "We also allow this settings module to be imported #", "payment gateway should be implemented. # SHOP_HANDLER_PAYMENT = \"cartridge.shop.checkout.default_payment_handler\" #", "APPLICATIONS # ######################### # These will be added to ``INSTALLED_APPS``,", "__future__ import absolute_import, unicode_literals import os from django import VERSION", "defined per machine. # Instead of doing \"from .local_settings import", "shop categories. Defaults to False. # SHOP_CATEGORY_USE_FEATURED_IMAGE = True #", "of these will # require database migrations! # SHOP_OPTION_TYPE_CHOICES =", "settings # documentation for a full list of settings Cartridge", "be collected to. # Don't put anything in this directory", "mode to set newly-uploaded files to. The value should be", "\"somelib.fields.ImageField\", # # Positional args for field class. # (_(\"Image\"),),", "the template, and the template path. # These templates are", "if DJANGO_VERSION < (1, 10): MIDDLEWARE_CLASSES = MIDDLEWARE del MIDDLEWARE", "to provide some better defaults where # applicable. We also", "to field class. # \"somelib.fields.ImageField\", # # Positional args for", "# module in the python standard library. If an empty", "# eg Size, Colour. NOTE: Increasing the number of these", "common enough to be put here, commented out, for conveniently", "installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although", "key will get prefixed with this value - here we", "documentation for a full list # of settings Mezzanine implements:", "that # control how the options should be ordered in", "a sequence of positional # args and a dictionary of", "served from MEDIA_ROOT. Make sure to use a # trailing", "the # fabfile, where setting the dynamic settings below isn't", "to turn on featured images for blog posts. Defaults to", "the field. The third and fourth items are a sequence", "here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = \"en\" # Supported languages LANGUAGES", "'django.contrib.sessions.middleware.SessionMiddleware', # Uncomment if using internationalisation or localisation # 'django.middleware.locale.LocaleMiddleware',", "``False`` in # production. Best set to ``True`` in local_settings.py", "back to the system's locale. SHOP_CURRENCY_LOCALE = \"en_GB.UTF-8\" # Dotted", "= ( \"django.contrib.admin\", \"django.contrib.auth\", \"django.contrib.contenttypes\", \"django.contrib.redirects\", \"django.contrib.sessions\", \"django.contrib.sites\", \"django.contrib.sitemaps\", \"django.contrib.staticfiles\",", "Django will make some optimizations so as not # to", "\"cartridge.shop.checkout.default_billship_handler\" # Dotted package path and name of the function", "already defined in cartridge.shop.defaults # with default values, but are", "these package names here as they may change in the", "######### # PATHS # ######### # Full filesystem path to", "field class, the path # ``django.models.db.`` can be omitted for", "the checkout process is split into separate # billing/shipping and", "fourth items are a sequence of positional # args and", "important; in the request phase, # these middleware classes will", "# (3, _(\"Footer\"), \"pages/menus/footer.html\"), # ) # A sequence of", "= MIDDLEWARE del MIDDLEWARE # Store these package names here", "if os.path.exists(f): import sys import imp module_name = \"%s.local_settings\" %", "keyword args, to use when creating the # field instance.", "dotted path to the model and its field # name", "blog posts. Defaults to False. # # BLOG_USE_FEATURED_IMAGE = True", "debug mode. When set to ``True``, stack traces # are", "\"django.template.context_processors.media\", \"django.template.context_processors.request\", \"django.template.context_processors.tz\", \"mezzanine.conf.context_processors.settings\", \"mezzanine.pages.context_processors.page\", ], \"builtins\": [ \"mezzanine.template.loader_tags\", ],", "standard library. If an empty string is # used, will", "+ \"media/\" # Absolute filesystem path to the directory that", "boolean that turns on/off debug mode. When set to ``True``,", "import os from django import VERSION as DJANGO_VERSION from django.utils.translation", "and set using the # function ``cartridge.shop.utils.set_shipping``. # SHOP_HANDLER_BILLING_SHIPPING =", "List of middleware classes to use. Order is important; in", "\"builtins\": [ \"mezzanine.template.loader_tags\", ], \"loaders\": [ \"mezzanine.template.loaders.host_themes.Loader\", \"django.template.loaders.filesystem.Loader\", \"django.template.loaders.app_directories.Loader\", ],", "options should be ordered in the admin, # eg for", "See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts ALLOWED_HOSTS = ['*'] # Local time zone for", "and in the # response phase the middleware will be", "will be injected into Mezzanine's (or any # library's) models.", "field class. # (_(\"Image\"),), # # Keyword args for field", "can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = \"en\" #", "# Sequence of value/name pairs for order statuses. # SHOP_ORDER_STATUS_CHOICES", "= \"en\" # Supported languages LANGUAGES = ( ('en', _('English')),", "# Positional args for field class. # (_(\"Image\"),), # #", "of doing \"from .local_settings import *\", we use exec so", "options, # eg Size, Colour. NOTE: Increasing the number of", "# # Example of adding a field to *all* of", "\"django.template.context_processors.static\", \"django.template.context_processors.media\", \"django.template.context_processors.request\", \"django.template.context_processors.tz\", \"mezzanine.conf.context_processors.settings\", \"mezzanine.pages.context_processors.page\", ], \"builtins\": [ \"mezzanine.template.loader_tags\",", "displayed for error pages. Should always be set to ``False``", "to field. # \"mezzanine.blog.models.BlogPost.image\", # # Dotted path to field", "SETTINGS # ################## # Allow any settings to be defined", "request phase, # these middleware classes will be applied in", "# out, for conveniently overriding. Please consult the settings #", "your # system time zone. TIME_ZONE = 'UTC' # If", "FILE_UPLOAD_PERMISSIONS = 0o644 ############# # DATABASES # ############# DATABASES =", "args for field class. # (_(\"Image\"),), # # Keyword args", "that # is called on submit of the payment checkout", "of the directory the project is in to try and", "# in apps' \"static/\" subdirectories and in STATICFILES_DIRS. # Example:", "CARTRIDGE SETTINGS # ###################### # The following settings are already", "# \"mezzanine.blog.models.BlogPost.image\", # # Dotted path to field class. #", "a value of None will cause Django to use the", "dotted path to the field class to use for #", "already defined with default values in # the ``defaults.py`` module", "internationalisation or localisation # 'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware',", "These templates are then available for selection when editing which", "\"mezzanine.forms\", \"mezzanine.galleries\", \"mezzanine.twitter\", # \"mezzanine.accounts\", 'corsheaders', 'rest_framework', 'rest_framework_api_key', 'drf_yasg', #", "in the future since # at the moment we are", "here as they may change in the future since #", "field class. # {\"blank\": True, \"upload_to\": \"blog\"}, # ), #", "\"mezzanine.twitter\", # \"mezzanine.accounts\", 'corsheaders', 'rest_framework', 'rest_framework_api_key', 'drf_yasg', # 'oauth2_provider', #", ") if DJANGO_VERSION < (1, 10): MIDDLEWARE_CLASSES = MIDDLEWARE del", "set_dynamic_settings() will rewrite globals based on what has been #", "default values in # the ``defaults.py`` module within each of", "MEZZANINE SETTINGS # ###################### # The following settings are already", "choices may be available on all operating systems. # On", "may be available on all operating systems. # On Unix", "# common enough to be put here, commented out, for", "\"blog.BlogPost\", # \"generic.ThreadedComment\", (_(\"Media Library\"), \"media-library\"),)), # (_(\"Shop\"), (\"shop.Product\", \"shop.ProductOption\",", "admin, # eg for \"Colour\" then \"Size\" given the above:", "that will hold user-uploaded files. # Example: \"/home/media/media.lawrence.com/media/\" MEDIA_ROOT =", "allowing for settings to be # defined per machine. #", "# project specific. CACHE_MIDDLEWARE_KEY_PREFIX = PROJECT_APP # URL prefix for", "item sequence, each containing a sequence of template tags #", "of the function that # is called on submit of", "a page should appear in. Note that if a menu", "<KEY>' ###################### # CARTRIDGE SETTINGS # ###################### # The following", "be omitted for regular Django model fields. # # EXTRA_MODEL_FIELDS", "module exec(open(f, \"rb\").read()) #################### # DYNAMIC SETTINGS # #################### #", "# #################### # set_dynamic_settings() will rewrite globals based on what", "\"Colour\"), # ) # Sequence of indexes from the SHOP_OPTION_TYPE_CHOICES", "isn't strictly # required. try: from mezzanine.utils.conf import set_dynamic_settings except", "to the project. PROJECT_APP_PATH = os.path.dirname(os.path.abspath(__file__)) PROJECT_APP = os.path.basename(PROJECT_APP_PATH) PROJECT_ROOT", "SHOP_CHECKOUT_STEPS_CONFIRMATION = True # Controls the formatting of monetary values", "# DATABASES # ############# DATABASES = { 'default': { 'ENGINE':", "that will be injected into Mezzanine's (or any # library's)", "a three item sequence, containing a unique ID # for", "'rest_framework_api_key', 'drf_yasg', # 'oauth2_provider', # 'rest_framework.authtoken', 'mezzanine_cartridge_api', ) # List", "= imp.new_module(module_name) module.__file__ = f sys.modules[module_name] = module exec(open(f, \"rb\").read())", "to. The value should be # a mode you'd pass", "Every cache key will get prefixed with this value -", "\"http://media.lawrence.com/static/\" STATIC_URL = \"/static/\" # Absolute path to the directory", "to set newly-uploaded files to. The value should be #", "response phase the middleware will be applied in reverse order.", "database migrations! # SHOP_OPTION_TYPE_CHOICES = ( # (1, \"Size\"), #", "in cartridge.shop.defaults # with default values, but are common enough", "= 0o644 ############# # DATABASES # ############# DATABASES = {", "step. This # is where shipping calculation can be performed", "This # is where shipping calculation can be performed and", "should be implemented. # SHOP_HANDLER_PAYMENT = \"cartridge.shop.checkout.default_payment_handler\" # Sequence of", "a label for the template, and the template path. #", "future since # at the moment we are using custom", "\"filebrowser_safe\" PACKAGE_NAME_GRAPPELLI = \"grappelli_safe\" ######################### # OPTIONAL APPLICATIONS # #########################", "into separate # billing/shipping and payment steps. # SHOP_CHECKOUT_STEPS_SPLIT =", "# (1, \"Unprocessed\"), # (2, \"Processed\"), # ) # Sequence", "documentation for a full list of settings Cartridge implements: #", "but are common enough to be put here, commented #", "OPTIONAL APPLICATIONS # ######################### # These will be added to", "tree\"), \"pages/menus/tree.html\"), # (3, _(\"Footer\"), \"pages/menus/footer.html\"), # ) # A", "the settings # documentation for a full list of settings", "will appear in it. # PAGE_MENU_TEMPLATES = ( # (1,", "if DJANGO_VERSION < (1, 9): del TEMPLATES[0][\"OPTIONS\"][\"builtins\"] ################ # APPLICATIONS", "# ) # Sequence of value/name pairs for types of", "any settings to be defined in local_settings.py which should be", "# \"generic.ThreadedComment\", (_(\"Media Library\"), \"media-library\"),)), # (_(\"Shop\"), (\"shop.Product\", \"shop.ProductOption\", \"shop.DiscountCode\",", "be # defined per machine. # Instead of doing \"from", "Absolute path to the directory static files should be collected", "# (\"Users\", (\"auth.User\", \"auth.Group\",)), # ) # A three item", "APPLICATIONS # ################ INSTALLED_APPS = ( \"django.contrib.admin\", \"django.contrib.auth\", \"django.contrib.contenttypes\", \"django.contrib.redirects\",", "and name of the function that # is called on", "expires when the Web browser is closed. SESSION_EXPIRE_AT_BROWSER_CLOSE = True", "custom forks of them. PACKAGE_NAME_FILEBROWSER = \"filebrowser_safe\" PACKAGE_NAME_GRAPPELLI = \"grappelli_safe\"", "will use timezone-aware datetimes. USE_TZ = True # Language code", "settings Mezzanine implements: # http://mezzanine.jupo.org/docs/configuration.html#default-settings # Controls the ordering and", "settings documentation for a full list # of settings Mezzanine", "for regular Django model fields. # # EXTRA_MODEL_FIELDS = (", "= BASE_DIR = os.path.dirname(PROJECT_APP_PATH) # Every cache key will get", "order # processing should be implemented. # SHOP_HANDLER_ORDER = \"cartridge.shop.checkout.default_order_handler\"", "to be added, and the dotted path to the field", "PROJECT_APP module = imp.new_module(module_name) module.__file__ = f sys.modules[module_name] = module", "Uncomment if using internationalisation or localisation # 'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware',", "to the field class to use for # the field.", "# response phase the middleware will be applied in reverse", "systems, a value of None will cause Django to use", "exec so that # local_settings has full access to everything", "of the billing/shipping checkout step. This # is where shipping", "if a menu template is used # that doesn't appear", "\"mezzanine.core.request.CurrentRequestMiddleware\", \"mezzanine.core.middleware.RedirectFallbackMiddleware\", \"mezzanine.core.middleware.AdminLoginInterfaceSelectorMiddleware\", \"mezzanine.core.middleware.SitePermissionMiddleware\", \"mezzanine.pages.middleware.PageMiddleware\", \"mezzanine.core.middleware.FetchFromCacheMiddleware\", 'corsheaders.middleware.CorsMiddleware', ) if DJANGO_VERSION", "overriding. Please consult the settings # documentation for a full", "True # If True, the checkout process is split into", "Unix systems, a value of None will cause Django to", "module.__file__ = f sys.modules[module_name] = module exec(open(f, \"rb\").read()) #################### #", "{ \"BACKEND\": \"django.template.backends.django.DjangoTemplates\", \"DIRS\": [ os.path.join(PROJECT_ROOT, \"templates\") ], \"OPTIONS\": {", "\"pages/menus/footer.html\"), # ) # A sequence of fields that will", "https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts ALLOWED_HOSTS = ['*'] # Local time zone for this", "of template tags # used to render the admin dashboard.", "# (_(\"Image\"),), # # Keyword args for field class. #", "and a dictionary of keyword args, to use when creating", "field. The third and fourth items are a sequence of", "), # ) # Setting to turn on featured images", "three item sequence, containing a unique ID # for the", "is False # See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts ALLOWED_HOSTS = ['*'] # Local", "# timezone as the operating system. # If running in", "where # applicable. We also allow this settings module to", "# Whether a user's session cookie expires when the Web", "settings to be defined in local_settings.py which should be #", "the payment checkout step. This is where # integration with", ") ################## # LOCAL SETTINGS # ################## # Allow any", "to use. Order is important; in the request phase, #", "to use the same # timezone as the operating system.", "\"/static/\" # Absolute path to the directory static files should", "If True, the checkout process has a final confirmation step", "# is called on submit of the payment checkout step.", "is split into separate # billing/shipping and payment steps. #", "a user's session cookie expires when the Web browser is", "(1, \"Unprocessed\"), # (2, \"Processed\"), # ) # Sequence of", "(1, \"Size\"), # (2, \"Colour\"), # ) # Sequence of", "# ) # A sequence of fields that will be", "the field class to use for # the field. The", "for order statuses. # SHOP_ORDER_STATUS_CHOICES = ( # (1, \"Unprocessed\"),", "This is where any custom order # processing should be", "for the template, a label for the template, and the", "when creating the # field instance. When specifying the field", "the # response phase the middleware will be applied in", "types of product options, # eg Size, Colour. NOTE: Increasing", "setting. USE_MODELTRANSLATION = False ######################## # MAIN DJANGO SETTINGS #", "prefixed with this value - here we set it to", "######################### # These will be added to ``INSTALLED_APPS``, only if", "1) ###################### # MEZZANINE SETTINGS # ###################### # The following", "the middleware will be applied in reverse order. MIDDLEWARE =", "has a final confirmation step before # completion. # SHOP_CHECKOUT_STEPS_CONFIRMATION", "this setting, all pages will appear in it. # PAGE_MENU_TEMPLATES", "name of the function that # is called once an", "code for this installation. All choices can be found here:", "pairs for types of product options, # eg Size, Colour.", "defaults where # applicable. We also allow this settings module", "of them. PACKAGE_NAME_FILEBROWSER = \"filebrowser_safe\" PACKAGE_NAME_GRAPPELLI = \"grappelli_safe\" ######################### #", "local_settings.py DEBUG = True # Whether a user's session cookie", "], \"builtins\": [ \"mezzanine.template.loader_tags\", ], \"loaders\": [ \"mezzanine.template.loaders.host_themes.Loader\", \"django.template.loaders.filesystem.Loader\", \"django.template.loaders.app_directories.Loader\",", "containing a unique ID # for the template, a label", "(\"sites.Site\", \"redirects.Redirect\", \"conf.Setting\")), # (\"Users\", (\"auth.User\", \"auth.Group\",)), # ) #" ]
[ "accepted = list(\"abcdefghijklmnopqrstuvqwxyz\") + ['\\n'] for i in range(len(dataset)-1): #", "print(\"Dataset must be > 0\") matrix = [] for i", "in accepted and dataset[i] in accepted): if dataset[i] in accepted:", "in range(len(dataset)-1): # if (dataset[i+1] in accepted and dataset[i] in", "for o in consts.rang: matrix[i].append(0) dataset = dataset.lower() accepted =", "fill_matrix(dataset): assert type(dataset) == str assert len(dataset) > 0, print(\"Dataset", "= i+1 while (val2 < len(dataset) and not (dataset[val2] in", "dataset = dataset.lower() accepted = list(\"abcdefghijklmnopqrstuvqwxyz\") + ['\\n'] for i", "if dataset[i] in accepted: val2 = i+1 while (val2 <", "accepted): if dataset[i] in accepted: val2 = i+1 while (val2", "matrix.append([]) for o in consts.rang: matrix[i].append(0) dataset = dataset.lower() accepted", "1 matrix = preprocessing.normalize(matrix, norm='l1') return matrix if __name__ ==", "numpy as np from sklearn import preprocessing def fill_matrix(dataset): assert", "== str assert len(dataset) > 0, print(\"Dataset must be >", "assert len(dataset) > 0, print(\"Dataset must be > 0\") matrix", "and dataset[i] in accepted): if dataset[i] in accepted: val2 =", "preprocessing.normalize(matrix, norm='l1') return matrix if __name__ == '__main__': print(fill_matrix(\"james as\"))", "not (dataset[val2] in accepted)): val2 += 1 ind1 = consts.get_ord(dataset[i])", "in accepted): if dataset[i] in accepted: val2 = i+1 while", "be > 0\") matrix = [] for i in consts.rang:", "consts.rang: matrix[i].append(0) dataset = dataset.lower() accepted = list(\"abcdefghijklmnopqrstuvqwxyz\") + ['\\n']", "from wordgen import consts import numpy as np from sklearn", "+= 1 ind1 = consts.get_ord(dataset[i]) ind2 = consts.get_ord(dataset[val2]) matrix[ind2][ind1] +=", "> 0\") matrix = [] for i in consts.rang: matrix.append([])", "+= 1 matrix = preprocessing.normalize(matrix, norm='l1') return matrix if __name__", "consts.get_ord(dataset[i]) ind2 = consts.get_ord(dataset[val2]) matrix[ind2][ind1] += 1 matrix = preprocessing.normalize(matrix,", "and not (dataset[val2] in accepted)): val2 += 1 ind1 =", "for i in range(len(dataset)-1): # if (dataset[i+1] in accepted and", "in consts.rang: matrix[i].append(0) dataset = dataset.lower() accepted = list(\"abcdefghijklmnopqrstuvqwxyz\") +", "val2 = i+1 while (val2 < len(dataset) and not (dataset[val2]", "= list(\"abcdefghijklmnopqrstuvqwxyz\") + ['\\n'] for i in range(len(dataset)-1): # if", "# if (dataset[i+1] in accepted and dataset[i] in accepted): if", "1 ind1 = consts.get_ord(dataset[i]) ind2 = consts.get_ord(dataset[val2]) matrix[ind2][ind1] += 1", "range(len(dataset)-1): # if (dataset[i+1] in accepted and dataset[i] in accepted):", "np from sklearn import preprocessing def fill_matrix(dataset): assert type(dataset) ==", "def fill_matrix(dataset): assert type(dataset) == str assert len(dataset) > 0,", "0\") matrix = [] for i in consts.rang: matrix.append([]) for", "in consts.rang: matrix.append([]) for o in consts.rang: matrix[i].append(0) dataset =", "> 0, print(\"Dataset must be > 0\") matrix = []", "dataset.lower() accepted = list(\"abcdefghijklmnopqrstuvqwxyz\") + ['\\n'] for i in range(len(dataset)-1):", "consts.get_ord(dataset[val2]) matrix[ind2][ind1] += 1 matrix = preprocessing.normalize(matrix, norm='l1') return matrix", "str assert len(dataset) > 0, print(\"Dataset must be > 0\")", "(val2 < len(dataset) and not (dataset[val2] in accepted)): val2 +=", "= consts.get_ord(dataset[i]) ind2 = consts.get_ord(dataset[val2]) matrix[ind2][ind1] += 1 matrix =", "if (dataset[i+1] in accepted and dataset[i] in accepted): if dataset[i]", "for i in consts.rang: matrix.append([]) for o in consts.rang: matrix[i].append(0)", "(dataset[val2] in accepted)): val2 += 1 ind1 = consts.get_ord(dataset[i]) ind2", "as np from sklearn import preprocessing def fill_matrix(dataset): assert type(dataset)", "ind1 = consts.get_ord(dataset[i]) ind2 = consts.get_ord(dataset[val2]) matrix[ind2][ind1] += 1 matrix", "len(dataset) > 0, print(\"Dataset must be > 0\") matrix =", "from sklearn import preprocessing def fill_matrix(dataset): assert type(dataset) == str", "len(dataset) and not (dataset[val2] in accepted)): val2 += 1 ind1", "= preprocessing.normalize(matrix, norm='l1') return matrix if __name__ == '__main__': print(fill_matrix(\"james", "i in consts.rang: matrix.append([]) for o in consts.rang: matrix[i].append(0) dataset", "['\\n'] for i in range(len(dataset)-1): # if (dataset[i+1] in accepted", "dataset[i] in accepted: val2 = i+1 while (val2 < len(dataset)", "accepted)): val2 += 1 ind1 = consts.get_ord(dataset[i]) ind2 = consts.get_ord(dataset[val2])", "o in consts.rang: matrix[i].append(0) dataset = dataset.lower() accepted = list(\"abcdefghijklmnopqrstuvqwxyz\")", "import consts import numpy as np from sklearn import preprocessing", "import numpy as np from sklearn import preprocessing def fill_matrix(dataset):", "dataset[i] in accepted): if dataset[i] in accepted: val2 = i+1", "while (val2 < len(dataset) and not (dataset[val2] in accepted)): val2", "wordgen import consts import numpy as np from sklearn import", "matrix = [] for i in consts.rang: matrix.append([]) for o", "i in range(len(dataset)-1): # if (dataset[i+1] in accepted and dataset[i]", "consts.rang: matrix.append([]) for o in consts.rang: matrix[i].append(0) dataset = dataset.lower()", "sklearn import preprocessing def fill_matrix(dataset): assert type(dataset) == str assert", "0, print(\"Dataset must be > 0\") matrix = [] for", "consts import numpy as np from sklearn import preprocessing def", "assert type(dataset) == str assert len(dataset) > 0, print(\"Dataset must", "matrix = preprocessing.normalize(matrix, norm='l1') return matrix if __name__ == '__main__':", "< len(dataset) and not (dataset[val2] in accepted)): val2 += 1", "= [] for i in consts.rang: matrix.append([]) for o in", "in accepted)): val2 += 1 ind1 = consts.get_ord(dataset[i]) ind2 =", "in accepted: val2 = i+1 while (val2 < len(dataset) and", "type(dataset) == str assert len(dataset) > 0, print(\"Dataset must be", "= consts.get_ord(dataset[val2]) matrix[ind2][ind1] += 1 matrix = preprocessing.normalize(matrix, norm='l1') return", "must be > 0\") matrix = [] for i in", "ind2 = consts.get_ord(dataset[val2]) matrix[ind2][ind1] += 1 matrix = preprocessing.normalize(matrix, norm='l1')", "(dataset[i+1] in accepted and dataset[i] in accepted): if dataset[i] in", "<gh_stars>0 from wordgen import consts import numpy as np from", "list(\"abcdefghijklmnopqrstuvqwxyz\") + ['\\n'] for i in range(len(dataset)-1): # if (dataset[i+1]", "matrix[i].append(0) dataset = dataset.lower() accepted = list(\"abcdefghijklmnopqrstuvqwxyz\") + ['\\n'] for", "matrix[ind2][ind1] += 1 matrix = preprocessing.normalize(matrix, norm='l1') return matrix if", "= dataset.lower() accepted = list(\"abcdefghijklmnopqrstuvqwxyz\") + ['\\n'] for i in", "i+1 while (val2 < len(dataset) and not (dataset[val2] in accepted)):", "[] for i in consts.rang: matrix.append([]) for o in consts.rang:", "accepted: val2 = i+1 while (val2 < len(dataset) and not", "val2 += 1 ind1 = consts.get_ord(dataset[i]) ind2 = consts.get_ord(dataset[val2]) matrix[ind2][ind1]", "accepted and dataset[i] in accepted): if dataset[i] in accepted: val2", "preprocessing def fill_matrix(dataset): assert type(dataset) == str assert len(dataset) >", "+ ['\\n'] for i in range(len(dataset)-1): # if (dataset[i+1] in", "import preprocessing def fill_matrix(dataset): assert type(dataset) == str assert len(dataset)" ]
[ "mipmaps NEAREST_MIPMAP_LINEAR = 0x2702 #: Texture interpolation: Minification filter for", "function ONE = 0x0001 #: Blend function SRC_COLOR = 0x0300", "An estimate of the number of bits of subpixel resolution", "number of individual floating-point, integer, or boolean values that #:", "fragment shader source (optional) :param str geometry_shader: geometry shader source", "or 'stream' :rtype: :py:class:`~arcade.gl.Buffer` \"\"\" # create_with_size return Buffer(self, data,", "= deque() @property def window(self) -> Window: \"\"\" The window", "flags:: # Single flag ctx.enable(ctx.BLEND) # Multiple flags ctx.enable(ctx.DEPTH_TEST, ctx.CULL_FACE)", "framebuffer. On context creation the window is the default render", "reserve=reserve, usage=usage) def framebuffer( self, *, color_attachments: Union[Texture, List[Texture]] =", "buffer( self, *, data: Optional[Any] = None, reserve: int =", "compat etc.) self.CONTEXT_PROFILE_MASK = self.get(gl.GL_CONTEXT_PROFILE_MASK) #: Minimum required alignment for", "``context_gc``. \"\"\" # Loop the array until all objects are", "= gl.GL_CLAMP_TO_EDGE # Texture wrap mode: Clamp to border color", "vertex shader self.MAX_VERTEX_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_VERTEX_UNIFORM_COMPONENTS ) #: Maximum number", "read by a geometry shader self.MAX_GEOMETRY_INPUT_COMPONENTS = self.get( gl.GL_MAX_GEOMETRY_INPUT_COMPONENTS )", "from pyglet import gl from .buffer import Buffer from .program", "return self._point_size @point_size.setter def point_size(self, value: float): gl.glPointSize(self._point_size) self._point_size =", "located there out_attributes = [] # type: List[str] if not", "= 0x2700 #: Texture interpolation: Minification filter for mipmaps LINEAR_MIPMAP_NEAREST", "all flags are disabled (enable no flags) ctx.enable_only() # Make", "sizes and offset self.UNIFORM_BUFFER_OFFSET_ALIGNMENT = self.get( gl.GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT ) #: Value", "gl.glDisable(flag) def is_enabled(self, flag) -> bool: \"\"\" Check if a", "x direction :param GLenum wrap_y: How the texture wraps in", "will disable all other flags. This is a simple way", "The number of bytes reserve :param str usage: Buffer usage.", "to make up a single patch primitive. Patch primitives are", "mode: Repeat REPEAT = gl.GL_REPEAT # Texture wrap mode: Clamp", "geometry shader self.MAX_GEOMETRY_INPUT_COMPONENTS = self.get( gl.GL_MAX_GEOMETRY_INPUT_COMPONENTS ) #: Maximum number", "*, data=None) -> Texture: \"\"\"Create a 2D depth texture :param", "wrap mode: Clamp to border pixel CLAMP_TO_EDGE = gl.GL_CLAMP_TO_EDGE #", "# --- Store the most commonly used OpenGL constants #", "is_enabled(self, flag) -> bool: \"\"\" Check if a context flag", "the first units self.default_texture_unit = self.limits.MAX_TEXTURE_IMAGE_UNITS - 1 # Detect", "#: Recommended maximum number of vertex array indices self.MAX_ELEMENTS_INDICES =", "0x8007 #: Blend equations: Maximum of source and destination MAX", "GL can handle self.MAX_RECTANGLE_TEXTURE_SIZE = self.get(gl.GL_MAX_RECTANGLE_TEXTURE_SIZE) #: Maximum supported size", "from .framebuffer import Framebuffer, DefaultFrameBuffer from typing import Optional from", "protocol. :param int reserve: The number of bytes reserve :param", "MIN = 0x8007 #: Blend equations: Maximum of source and", "mode: Clamp to border pixel CLAMP_TO_EDGE = gl.GL_CLAMP_TO_EDGE # Texture", "disabled (enable no flags) ctx.enable_only() # Make sure only blending", "# States self._blend_func = self.BLEND_DEFAULT self._point_size = 1.0 self._flags: Set[int]", "\"\"\" Get the currently active framebuffer. This property is read-only", "if self.BLEND in self._flags: gl.glEnable(self.BLEND) else: gl.glDisable(self.BLEND) if self.DEPTH_TEST in", "geometry shader. :param str vertex_shader: vertex shader source :param str", "-> Texture: \"\"\"Create a 2D depth texture :param Tuple[int, int]", "window. :type: :py:class:`~arcade.Framebuffer` \"\"\" return self._screen @property def fbo(self) ->", "``GL_CLAMP_TO_BORDER`` Minifying filters: ``GL_NEAREST``, ``GL_LINEAR``, ``GL_NEAREST_MIPMAP_NEAREST``, ``GL_LINEAR_MIPMAP_NEAREST`` ``GL_NEAREST_MIPMAP_LINEAR``, ``GL_LINEAR_MIPMAP_LINEAR`` Magnifying", "of your code base:: # Ensure all flags are disabled", "be at least 64 self.MAX_3D_TEXTURE_SIZE = self.get(gl.GL_MAX_3D_TEXTURE_SIZE) #: Maximum number", "return self._errors.get(err, \"GL_UNKNOWN_ERROR\") @classmethod def activate(cls, ctx: \"Context\"): \"\"\"Mark a", "wrap mode: Repeat mirrored MIRRORED_REPEAT = gl.GL_MIRRORED_REPEAT # Flags #:", "2D Texture. Wrap modes: ``GL_REPEAT``, ``GL_MIRRORED_REPEAT``, ``GL_CLAMP_TO_EDGE``, ``GL_CLAMP_TO_BORDER`` Minifying filters:", "texture :rtype: :py:class:`~arcade.gl.Framebuffer` \"\"\" return Framebuffer( self, color_attachments=color_attachments, depth_attachment=depth_attachment )", "is enabled ctx.enable_only(ctx.BLEND) # Make sure only depth test and", "the ``gl`` module. (``ctx.BLEND`` or ``arcade.gl.BLEND``). \"\"\" #: The active", ") source_te = ( ShaderSource(tess_evaluation_shader, gl.GL_TESS_EVALUATION_SHADER) if tess_evaluation_shader else None", "= self.get( gl.GL_MAX_FRAGMENT_UNIFORM_COMPONENTS ) #: maximum number of individual 4-vectors", "str = None, defines: Dict[str, str] = None ) ->", "on textures to avoid # affecting currently bound textures in", "the current context. self.MAJOR_VERSION = self.get(gl.GL_MAJOR_VERSION) self.VENDOR = self.get_str(gl.GL_VENDOR) self.RENDERER", "raise ValueError(\"Unsupported gc_mode. Supported modes are:\", modes) self._gc_mode = value", "default (what we expect in python) self._gc_mode = \"auto\" self.gc_mode", "from the vertex shader self.MAX_COMBINED_TEXTURE_IMAGE_UNITS = self.get( gl.GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS ) #:", "be written in a fragment shader self.MAX_DRAW_BUFFERS = self.get(gl.GL_MAX_DRAW_BUFFERS) #:", "\"\"\" Disable one or more context flags:: # Single flag", "be used to access texture maps from the geometry shader", "function ONE_MINUS_DST_ALPHA = 0x0305 #: Blend function DST_COLOR = 0x0306", "data=None) -> Texture: \"\"\"Create a 2D depth texture :param Tuple[int,", "PATCHES = gl.GL_PATCHES # The most common error enums _errors", "None, out_attributes=out_attributes, ) def query(self): \"\"\" Create a query object", "or depth-stencil texture self.MAX_DEPTH_TEXTURE_SAMPLES = self.get(gl.GL_MAX_DEPTH_TEXTURE_SAMPLES) #: Maximum number of", "# Single flag ctx.disable(ctx.BLEND) # Multiple flags ctx.disable(ctx.DEPTH_TEST, ctx.CULL_FACE) \"\"\"", "flag in args: gl.glEnable(flag) def enable_only(self, *args): \"\"\" Enable only", "equations: destination - source FUNC_REVERSE_SUBTRACT = 0x800B #: Blend equations:", "can #: be held in uniform variable storage for a", ":param List[arcade.gl.Texture] color_attachments: List of textures we want to render", "size: Tuple[int, int], *, components: int = 4, dtype: str", "self._gc_mode @gc_mode.setter def gc_mode(self, value: str): modes = [\"auto\", \"context_gc\"]", "more context flags:: # Single flag ctx.enable(ctx.BLEND) # Multiple flags", "= 0x0302 #: Blend function ONE_MINUS_SRC_ALPHA = 0x0303 #: Blend", "of the occurring error or ``None`` of no errors has", "Program from .vertex_array import Geometry, VertexArray from .framebuffer import Framebuffer,", "2: RG, 3: RGB, 4: RGBA) :param str dtype: The", "c_int() gl.glGetIntegerv(enum, value) return value.value def get_float(self, enum) -> float:", "default framebuffer self._screen = DefaultFrameBuffer(self) # Tracking active program self.active_program:", "\"\"\" return Geometry(self, content, index_buffer=index_buffer, mode=mode, index_element_size=index_element_size) def program( self,", "\"\"\" return self._gl_version def gc(self): \"\"\" Run garbage collection of", "the screen OpenGL should render to. Normally it would be", "shader. :param str vertex_shader: vertex shader source :param str fragment_shader:", "( ShaderSource(tess_evaluation_shader, gl.GL_TESS_EVALUATION_SHADER) if tess_evaluation_shader else None ) # If", "getattr(self, key) setattr(self, key, (created, freed + 1)) class Limits:", "set to the same value as the viewport # to", "= source_geo.out_attributes else: out_attributes = source_vs.out_attributes return Program( self, vertex_shader=source_vs.get_source(defines=defines),", "LOG.debug( \"%s allocations passed threshold (%s) [created = %s] [freed", "\"\"\"OpenGL Limitations\"\"\" def __init__(self, ctx): self._ctx = ctx #: Minor", "access texture maps from the vertex shader. self.MAX_VERTEX_TEXTURE_IMAGE_UNITS = self.get(", "= self.get(gl.GL_MAX_SAMPLES) #: A rough estimate of the largest rectangular", "LINES = gl.GL_LINES # 1 #: Primitive mode LINE_STRIP =", "gl.GL_FRAGMENT_SHADER) if fragment_shader else None ) source_geo = ( ShaderSource(geometry_shader,", "This is only needed when ``gc_mode`` is ``context_gc``. \"\"\" #", "a rough estimate of the largest texture that the GL", "vertex shader self.MAX_COMBINED_TEXTURE_IMAGE_UNITS = self.get( gl.GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS ) #: Maximum number", "number of words for fragment shader uniform variables in all", "source :param str fragment_shader: fragment shader source (optional) :param str", "ContextStats = ContextStats(warn_threshold=1000) # Hardcoded states # This should always", "that may be written in a fragment shader self.MAX_DRAW_BUFFERS =", "to avoid background color affecting areas outside the viewport gl.glEnable(gl.GL_SCISSOR_TEST)", "estimate of the largest rectangular texture that the GL can", ".framebuffer import Framebuffer, DefaultFrameBuffer from typing import Optional from .texture", "data: Any = None, wrap_x: gl.GLenum = None, wrap_y: gl.GLenum", "# Tracking active framebuffer. On context creation the window is", "\"GL_INVALID_ENUM\", gl.GL_INVALID_VALUE: \"GL_INVALID_VALUE\", gl.GL_INVALID_OPERATION: \"GL_INVALID_OPERATION\", gl.GL_INVALID_FRAMEBUFFER_OPERATION: \"GL_INVALID_FRAMEBUFFER_OPERATION\", gl.GL_OUT_OF_MEMORY: \"GL_OUT_OF_MEMORY\", gl.GL_STACK_UNDERFLOW:", "\"Context\"): \"\"\"Mark a context as the currently active one\"\"\" cls.active", "OpenGL rendering commands are completed\"\"\" gl.glFinish() # --- Resource methods", "(optional). Can be bytes or an object supporting the buffer", "# When a geometry shader is present the out attributes", "geometry shader self.MAX_GEOMETRY_TEXTURE_IMAGE_UNITS = self.get( gl.GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS ) #: Maximum number", "integer format multisample buffers self.MAX_INTEGER_SAMPLES = self.get(gl.GL_MAX_INTEGER_SAMPLES) #: Maximum samples", "the viewport # to avoid background color affecting areas outside", "warn warn(\"Error happened while querying of limits. Moving on ..\")", "number of vertex array vertices self.MAX_ELEMENTS_VERTICES = self.get(gl.GL_MAX_ELEMENTS_VERTICES) #: Maximum", "Texture interpolation: Linear interpolate LINEAR = 0x2601 #: Texture interpolation:", "in all uniform blocks self.MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS ) #:", "#: can be held in uniform variable storage for a", "self.get( gl.GL_MAX_GEOMETRY_OUTPUT_COMPONENTS ) #: Maximum supported texture image units that", "def buffer( self, *, data: Optional[Any] = None, reserve: int", "commonly used enums. All enums also exist in the ``gl``", "if tess_control_shader else None ) source_te = ( ShaderSource(tess_evaluation_shader, gl.GL_TESS_EVALUATION_SHADER)", "-> Framebuffer: \"\"\"Create a Framebuffer. :param List[arcade.gl.Texture] color_attachments: List of", "belongs to a ``pyglet.Window`` normally accessed through ``window.ctx``. The Context", ":type: bool \"\"\" return flag in self._flags @property def viewport(self)", "#: Texture interpolation: Minification filter for mipmaps NEAREST_MIPMAP_LINEAR = 0x2702", "are consumed by the tessellation control shader (if present) and", "\"\"\"Check OpenGL error Returns a string representation of the occurring", "self.MAX_SAMPLE_MASK_WORDS = self.get(gl.GL_MAX_SAMPLE_MASK_WORDS) #: Maximum number of texels allowed in", "a geometry shader self.MAX_GEOMETRY_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_GEOMETRY_UNIFORM_COMPONENTS ) #: Maximum", "uniform variable storage for a fragment shader self.MAX_FRAGMENT_UNIFORM_COMPONENTS = self.get(", "from typing import Optional from .texture import Texture from .query", "alignment for uniform buffer sizes and offset self.UNIFORM_BUFFER_OFFSET_ALIGNMENT = self.get(", "#: Blend equations: destination - source FUNC_REVERSE_SUBTRACT = 0x800B #:", "for the window. :type: :py:class:`~arcade.Framebuffer` \"\"\" return self._screen @property def", "threshold (%s) [created = %s] [freed = %s] [active =", "some flags. This will disable all other flags. This is", "\"\"\" return self._window_ref() @property def screen(self) -> Framebuffer: \"\"\" The", "How the texture wraps in x direction :param GLenum wrap_y:", "shader. self.MAX_VERTEX_TEXTURE_IMAGE_UNITS = self.get( gl.GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS ) #: Maximum number of", "TRIANGLE_STRIP_ADJACENCY = gl.GL_TRIANGLE_STRIP_ADJACENCY # 13 #: Patch mode (tessellation) PATCHES", "gl.GLenum = None, filter: Tuple[gl.GLenum, gl.GLenum] = None ) ->", "is enabled :type: bool \"\"\" return flag in self._flags @property", "RuntimeError(\"OpenGL error: {err}\") :type: str \"\"\" err = gl.glGetError() if", "*, data: Optional[Any] = None, reserve: int = 0, usage:", "gl.GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS ) #: Maximum number of individual floating-point, integer, or", "int): raise TypeError(\"patch_vertices must be an integer\") gl.glPatchParameteri(gl.GL_PATCH_VERTICES, value) @property", "base:: # Ensure all flags are disabled (enable no flags)", "Wrap modes: ``GL_REPEAT``, ``GL_MIRRORED_REPEAT``, ``GL_CLAMP_TO_EDGE``, ``GL_CLAMP_TO_BORDER`` Minifying filters: ``GL_NEAREST``, ``GL_LINEAR``,", "source_te else None, out_attributes=out_attributes, ) def query(self): \"\"\" Create a", "self._screen = DefaultFrameBuffer(self) # Tracking active program self.active_program: Optional[Program] =", "uniform buffer sizes and offset self.UNIFORM_BUFFER_OFFSET_ALIGNMENT = self.get( gl.GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT )", "fragment shader self.MAX_FRAGMENT_UNIFORM_VECTORS = self.get(gl.GL_MAX_FRAGMENT_UNIFORM_VECTORS) #: Maximum number of uniform", "freed, created - freed, ) def decr(self, key): created, freed", "resources, global states and commonly used enums. All enums also", "self.get(gl.GL_MAX_ELEMENTS_VERTICES) #: Maximum number of components of the inputs read", "None, mode: int = None, index_element_size: int = 4, ):", "by a vertex shader self.MAX_VERTEX_OUTPUT_COMPONENTS = self.get(gl.GL_MAX_VERTEX_OUTPUT_COMPONENTS) #: Maximum number", "Tuple[int, int]: \"\"\" Get or the blend function:: ctx.blend_func =", "Returns a string representation of the occurring error or ``None``", "the texel array of a texture buffer object self.MAX_TEXTURE_BUFFER_SIZE =", "of individual floating-point, integer, or boolean values that #: can", "are disabled (enable no flags) ctx.enable_only() # Make sure only", "@primitive_restart_index.setter def primitive_restart_index(self, value: int): self._primitive_restart_index = value gl.glPrimitiveRestartIndex(value) def", "self.MAX_RENDERBUFFER_SIZE = self.get(gl.GL_MAX_RENDERBUFFER_SIZE) #: Maximum number of sample mask words", "direction :param Tuple[GLenum,GLenum] filter: Minification and magnification filter \"\"\" return", "ONE_MINUS_SRC_COLOR = 0x0301 #: Blend function SRC_ALPHA = 0x0302 #:", "of the OpenGL API supported by the current context. self.MAJOR_VERSION", "shader self.MAX_FRAGMENT_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_FRAGMENT_UNIFORM_COMPONENTS ) #: maximum number of", "\"\"\" # Loop the array until all objects are gone.", "a fragment shader we are doing transform feedback. # When", "equations: Minimum of source and destination MIN = 0x8007 #:", "maximum number of vertex array vertices self.MAX_ELEMENTS_VERTICES = self.get(gl.GL_MAX_ELEMENTS_VERTICES) #:", "# self.MAX_VERTEX_ATTRIB_BINDINGS = self.get(gl.GL_MAX_VERTEX_ATTRIB_BINDINGS) self.MAX_TEXTURE_IMAGE_UNITS = self.get(gl.GL_MAX_TEXTURE_IMAGE_UNITS) # TODO: Missing", "default blend mode: ``SRC_ALPHA, ONE_MINUS_SRC_ALPHA`` BLEND_DEFAULT = 0x0302, 0x0303 #:", "DefaultFrameBuffer from typing import Optional from .texture import Texture from", "#: Texture interpolation: Nearest pixel NEAREST = 0x2600 #: Texture", "context flag states are not lingering from other sections of", "would be the size of the window's framebuffer:: # 4:3", "handle. The value must be at least 64 self.MAX_3D_TEXTURE_SIZE =", "get_str(self, enum: gl.GLenum) -> str: \"\"\"Get a string limit\"\"\" return", "gl.glEnable(self.DEPTH_TEST) else: gl.glDisable(self.DEPTH_TEST) if self.CULL_FACE in self._flags: gl.glEnable(self.CULL_FACE) else: gl.glDisable(self.CULL_FACE)", "import Optional from .texture import Texture from .query import Query", "resources. Supported modes are: # default: Auto ctx.gc_mode = \"auto\"", "not isinstance(value, int): raise TypeError(\"patch_vertices must be an integer\") gl.glPatchParameteri(gl.GL_PATCH_VERTICES,", "Make sure only depth test and culling is enabled ctx.enable_only(ctx.DEPTH_TEST,", "context flags:: # Single flag ctx.enable(ctx.BLEND) # Multiple flags ctx.enable(ctx.DEPTH_TEST,", "= (0, 0) self.buffer = (0, 0) self.program = (0,", "draw mode (optional) :param int index_element_size: Byte size of the", "set number of vertices that will be used to make", "def geometry( self, content: Optional[Sequence[BufferDescription]] = None, index_buffer: Buffer =", "size of the index buffer type. Can be 1, 2", "flags ctx.enable(ctx.DEPTH_TEST, ctx.CULL_FACE) \"\"\" self._flags.update(args) for flag in args: gl.glEnable(flag)", "= %s] [active = %s]\", key, self.warn_threshold, created, freed, created", "Maximum supported texture image units that can be used to", "number of texels allowed in the texel array of a", "filter: Tuple[gl.GLenum, gl.GLenum] = None ) -> Texture: \"\"\"Create a", "len(self.objects): obj = self.objects.pop() obj.delete() @property def gc_mode(self) -> str:", "enabled ctx.enable_only(ctx.BLEND) # Make sure only depth test and culling", "gc(self): \"\"\" Run garbage collection of OpenGL objects for this", "bits of subpixel resolution #: that are used to position", "finish(self) -> None: \"\"\"Wait until all OpenGL rendering commands are", "Blend function SRC_ALPHA = 0x0302 #: Blend function ONE_MINUS_SRC_ALPHA =", "gl.GL_LINE_STRIP_ADJACENCY # 11 #: Primitive mode TRIANGLES_ADJACENCY = gl.GL_TRIANGLES_ADJACENCY #", "wrap mode: Clamp to border color CLAMP_TO_BORDER = gl.GL_CLAMP_TO_BORDER #", "\"GL_STACK_UNDERFLOW\", gl.GL_STACK_OVERFLOW: \"GL_STACK_OVERFLOW\", } def __init__(self, window: pyglet.window.Window, gc_mode: str", "Maximum number of components of inputs read by a geometry", "gl.glEnable(self.PROGRAM_POINT_SIZE) else: gl.glDisable(self.PROGRAM_POINT_SIZE) def disable(self, *args): \"\"\" Disable one or", "import deque import logging import weakref from typing import Any,", "logging import weakref from typing import Any, Dict, List, Tuple,", "estimate of the number of bits of subpixel resolution #:", "Using the current framebuffer size ctx.viewport = 0, 0, *ctx.screen.size", "States self._blend_func = self.BLEND_DEFAULT self._point_size = 1.0 self._flags: Set[int] =", "geometry_shader=source_geo.get_source(defines=defines) if source_geo else None, tess_control_shader=source_tc.get_source(defines=defines) if source_tc else None,", "__init__(self, warn_threshold=100): self.warn_threshold = warn_threshold # (created, freed) self.texture =", "in an array texture, and must be at least 256", "warnings import warn warn(\"Error happened while querying of limits. Moving", "typing import Any, Dict, List, Tuple, Union, Sequence, Set import", "*, color_attachments: Union[Texture, List[Texture]] = None, depth_attachment: Texture = None", "simply describes what pixels of the screen OpenGL should render", "0x8008 # Blend mode shortcuts #: Blend mode shortcut for", "LOG = logging.getLogger(__name__) class Context: \"\"\" Represents an OpenGL context.", "#: Maximum number of samples in a color multisample texture", "\"\"\" return Framebuffer( self, color_attachments=color_attachments, depth_attachment=depth_attachment ) def texture( self,", "self.MAX_TEXTURE_BUFFER_SIZE = self.get(gl.GL_MAX_TEXTURE_BUFFER_SIZE) #: Maximum number of uniform buffer binding", "*args): \"\"\" Enable only some flags. This will disable all", "cls.active = ctx def enable(self, *args): \"\"\" Enables one or", "deque() @property def window(self) -> Window: \"\"\" The window this", "be ``bytes`` or an object supporting the buffer protocol. :param", "None, index_buffer: Buffer = None, mode: int = None, index_element_size:", ") source_tc = ( ShaderSource(tess_control_shader, gl.GL_TESS_CONTROL_SHADER) if tess_control_shader else None", "out_attributes=out_attributes, ) def query(self): \"\"\" Create a query object for", "= gl.GL_LINE_STRIP_ADJACENCY # 11 #: Primitive mode TRIANGLES_ADJACENCY = gl.GL_TRIANGLES_ADJACENCY", "gl.GL_INVALID_ENUM: \"GL_INVALID_ENUM\", gl.GL_INVALID_VALUE: \"GL_INVALID_VALUE\", gl.GL_INVALID_OPERATION: \"GL_INVALID_OPERATION\", gl.GL_INVALID_FRAMEBUFFER_OPERATION: \"GL_INVALID_FRAMEBUFFER_OPERATION\", gl.GL_OUT_OF_MEMORY: \"GL_OUT_OF_MEMORY\",", "--- def buffer( self, *, data: Optional[Any] = None, reserve:", "of individual 4-vectors of floating-point, integer, #: or boolean values", "supported by the current context self.MINOR_VERSION = self.get(gl.GL_MINOR_VERSION) #: Major", "attributes accessible to a vertex shader. self.MAX_VERTEX_ATTRIBS = self.get(gl.GL_MAX_VERTEX_ATTRIBS) #:", "integer, or boolean values that #: can be held in", "# 6 #: Primitive mode LINES_ADJACENCY = gl.GL_LINES_ADJACENCY # 10", "= ctx def enable(self, *args): \"\"\" Enables one or more", "gl.GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS ) #: Maximum number of uniform blocks per program", "to a vertex shader. self.MAX_VERTEX_ATTRIBS = self.get(gl.GL_MAX_VERTEX_ATTRIBS) #: Maximum supported", "The OpenGL version as a 2 component tuple :type: tuple", "self.MAX_VERTEX_ATTRIBS = self.get(gl.GL_MAX_VERTEX_ATTRIBS) #: Maximum supported texture image units that", "isinstance(value, int): raise TypeError(\"patch_vertices must be an integer\") gl.glPatchParameteri(gl.GL_PATCH_VERTICES, value)", "TypeError(\"patch_vertices must be an integer\") gl.glPatchParameteri(gl.GL_PATCH_VERTICES, value) @property def point_size(self)", "simultaneous outputs that may be written in a fragment shader", "self.default_texture_unit = self.limits.MAX_TEXTURE_IMAGE_UNITS - 1 # Detect the default framebuffer", "modes are:\", modes) self._gc_mode = value @property def error(self) ->", "= self.get(gl.GL_MAX_UNIFORM_BLOCK_SIZE) #: The number 4-vectors for varying variables self.MAX_VARYING_VECTORS", "``pyglet.Window`` \"\"\" return self._window_ref() @property def screen(self) -> Framebuffer: \"\"\"", "#: Minimum required alignment for uniform buffer sizes and offset", "shader self.MAX_VERTEX_UNIFORM_VECTORS = self.get(gl.GL_MAX_VERTEX_UNIFORM_VECTORS) #: Maximum number of components of", "self.MAX_TEXTURE_MAX_ANISOTROPY = self.get_float(gl.GL_MAX_TEXTURE_MAX_ANISOTROPY) err = self._ctx.error if err: from warnings", "of the texture :param Any data: The texture data (optional).", "f2, f4 / i1, i2, i4 / u1, u2, u4", "components of output written by a vertex shader self.MAX_VERTEX_OUTPUT_COMPONENTS =", "Buffer index_buffer: Index/element buffer (optional) :param int mode: The default", "pixel CLAMP_TO_EDGE = gl.GL_CLAMP_TO_EDGE # Texture wrap mode: Clamp to", "-> Framebuffer: \"\"\" Get the currently active framebuffer. This property", "Tuple[int, int, int, int]: \"\"\" Get or set the viewport", "A rough estimate of the largest cube-map texture that the", ".texture import Texture from .query import Query from .glsl import", "supporting the buffer protocol. :param GLenum wrap_x: How the texture", "return self._screen @property def fbo(self) -> Framebuffer: \"\"\" Get the", ") #: Maximum number of individual floating-point, integer, or boolean", "Set import pyglet from pyglet.window import Window from pyglet import", "context creation the window is the default render target self.active_framebuffer:", "variables self.MAX_VARYING_VECTORS = self.get(gl.GL_MAX_VARYING_VECTORS) #: Maximum number of 4-component generic", "self.objects = deque() @property def window(self) -> Window: \"\"\" The", "-> str: \"\"\" Set the garbage collection mode for OpenGL", "number of uniform buffer binding points on the context self.MAX_UNIFORM_BUFFER_BINDINGS", "modes: ``GL_REPEAT``, ``GL_MIRRORED_REPEAT``, ``GL_CLAMP_TO_EDGE``, ``GL_CLAMP_TO_BORDER`` Minifying filters: ``GL_NEAREST``, ``GL_LINEAR``, ``GL_NEAREST_MIPMAP_NEAREST``,", "filters: ``GL_NEAREST``, ``GL_LINEAR`` :param Tuple[int, int] size: The size of", "depth_attachment=depth_attachment ) def texture( self, size: Tuple[int, int], *, components:", "required alignment for uniform buffer sizes and offset self.UNIFORM_BUFFER_OFFSET_ALIGNMENT =", "__init__(self, window: pyglet.window.Window, gc_mode: str = \"auto\"): self._window_ref = weakref.ref(window)", "-> Union[str, None]: \"\"\"Check OpenGL error Returns a string representation", "texture image units that can be used to access texture", "of the index buffer type. Can be 1, 2 or", "Minor version number of the OpenGL API supported by the", "be the size of the window's framebuffer:: # 4:3 screen", "mode TRIANGLE_STRIP = gl.GL_TRIANGLE_STRIP # 5 #: Primitive mode TRIANGLE_FAN", "err: raise RuntimeError(\"OpenGL error: {err}\") :type: str \"\"\" err =", "def primitive_restart_index(self) -> int: \"\"\"Get or set the primitive restart", "integer, or boolean values that can #: be held in", "framebuffer self._screen = DefaultFrameBuffer(self) # Tracking active program self.active_program: Optional[Program]", "= gc_mode #: Collected objects to gc when gc_mode is", "deque import logging import weakref from typing import Any, Dict,", "# 12 #: Primitive mode TRIANGLE_STRIP_ADJACENCY = gl.GL_TRIANGLE_STRIP_ADJACENCY # 13", "gl.GL_VERTEX_SHADER) source_fs = ( ShaderSource(fragment_shader, gl.GL_FRAGMENT_SHADER) if fragment_shader else None", "= self.get(gl.GL_MAJOR_VERSION) self.VENDOR = self.get_str(gl.GL_VENDOR) self.RENDERER = self.get_str(gl.GL_RENDERER) #: Value", "components of outputs written by a geometry shader self.MAX_GEOMETRY_OUTPUT_COMPONENTS =", "flag states are not lingering from other sections of your", "collection mode for OpenGL resources. Supported modes are: # default:", "LINE_STRIP_ADJACENCY = gl.GL_LINE_STRIP_ADJACENCY # 11 #: Primitive mode TRIANGLES_ADJACENCY =", "mode: Clamp to border color CLAMP_TO_BORDER = gl.GL_CLAMP_TO_BORDER # Texture", "return self._primitive_restart_index @primitive_restart_index.setter def primitive_restart_index(self, value: int): self._primitive_restart_index = value", "to gc when gc_mode is \"context_gc\" self.objects = deque() @property", "OpenGL should render to. Normally it would be the size", "mode POINTS = gl.GL_POINTS # 0 #: Primitive mode LINES", "Supported modes are: # default: Auto ctx.gc_mode = \"auto\" \"\"\"", "this context. This is only needed when ``gc_mode`` is ``context_gc``.", "CLAMP_TO_EDGE = gl.GL_CLAMP_TO_EDGE # Texture wrap mode: Clamp to border", "# Normal garbage collection as default (what we expect in", "gl.GLenum) -> str: \"\"\"Get a string limit\"\"\" return cast(gl.glGetString(enum), c_char_p).value.decode()", "of vertex array indices self.MAX_ELEMENTS_INDICES = self.get(gl.GL_MAX_ELEMENTS_INDICES) #: Recommended maximum", "1.0 self._flags: Set[int] = set() # Normal garbage collection as", "``pyglet.Window`` normally accessed through ``window.ctx``. The Context class contains methods", "Ensure all flags are disabled (enable no flags) ctx.enable_only() #", "gl_version(self) -> Tuple[int, int]: \"\"\" The OpenGL version as a", "mode shortcut for premultipled alpha: ``SRC_ALPHA, ONE`` BLEND_PREMULTIPLIED_ALPHA = 0x0302,", "Default is -1\"\"\" return self._primitive_restart_index @primitive_restart_index.setter def primitive_restart_index(self, value: int):", "@property def screen(self) -> Framebuffer: \"\"\" The framebuffer for the", "Tuple[int, int] size: The size of the texture :param int", "0x0000 #: Blend function ONE = 0x0001 #: Blend function", "class ContextStats: def __init__(self, warn_threshold=100): self.warn_threshold = warn_threshold # (created,", "Context flag: Depth testing DEPTH_TEST = gl.GL_DEPTH_TEST #: Context flag:", "error or ``None`` of no errors has occurred. Example:: err", "no flags) ctx.enable_only() # Make sure only blending is enabled", "largest cube-map texture that the GL can handle self.MAX_CUBE_MAP_TEXTURE_SIZE =", "patch_vertices(self) -> int: \"\"\" Get or set number of vertices", "screen OpenGL should render to. Normally it would be the", "BLEND_ADDITIVE = 0x0001, 0x0001 #: Blend mode shortcut for premultipled", "into :param arcade.gl.Texture depth_attachment: Depth texture :rtype: :py:class:`~arcade.gl.Framebuffer` \"\"\" return", "representation of the occurring error or ``None`` of no errors", "gl.GL_CLAMP_TO_BORDER # Texture wrap mode: Repeat mirrored MIRRORED_REPEAT = gl.GL_MIRRORED_REPEAT", "value: int): if not isinstance(value, int): raise TypeError(\"patch_vertices must be", "``GL_MIRRORED_REPEAT``, ``GL_CLAMP_TO_EDGE``, ``GL_CLAMP_TO_BORDER`` Minifying filters: ``GL_NEAREST``, ``GL_LINEAR``, ``GL_NEAREST_MIPMAP_NEAREST``, ``GL_LINEAR_MIPMAP_NEAREST`` ``GL_NEAREST_MIPMAP_LINEAR``,", "NEAREST_MIPMAP_LINEAR = 0x2702 #: Texture interpolation: Minification filter for mipmaps", "else None, tess_evaluation_shader=source_te.get_source(defines=defines) if source_te else None, out_attributes=out_attributes, ) def", "number of simultaneous outputs that may be written in a", "collections import deque import logging import weakref from typing import", "are gone. # Deleting one object might add new ones", "tuple (src, dst) \"\"\" return self._blend_func @blend_func.setter def blend_func(self, value:", "filter \"\"\" return Texture( self, size, components=components, data=data, dtype=dtype, wrap_x=wrap_x,", "int] size: The size of the texture :param int components:", "source_tc else None, tess_evaluation_shader=source_te.get_source(defines=defines) if source_te else None, out_attributes=out_attributes, )", "geometry shader self.MAX_GEOMETRY_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_GEOMETRY_UNIFORM_COMPONENTS ) #: Maximum number", "(created, freed + 1)) class Limits: \"\"\"OpenGL Limitations\"\"\" def __init__(self,", "integer\") gl.glPatchParameteri(gl.GL_PATCH_VERTICES, value) @property def point_size(self) -> float: \"\"\"float: Get", "gl.glPrimitiveRestartIndex(value) def finish(self) -> None: \"\"\"Wait until all OpenGL rendering", "pyglet.window.Window, gc_mode: str = \"auto\"): self._window_ref = weakref.ref(window) self.limits =", "texture that the GL can handle self.MAX_CUBE_MAP_TEXTURE_SIZE = self.get(gl.GL_MAX_CUBE_MAP_TEXTURE_SIZE) #:", "self.active_framebuffer: Framebuffer = self._screen self.stats: ContextStats = ContextStats(warn_threshold=1000) # Hardcoded", "Tuple[int, int, int, int]): self.active_framebuffer.viewport = value @property def blend_func(self)", "/ u1, u2, u4 :param Any data: The texture data", "if err: raise RuntimeError(\"OpenGL error: {err}\") :type: str \"\"\" err", "key): created, freed = getattr(self, key) setattr(self, key, (created +", "restart index to -1 by default gl.glEnable(gl.GL_PRIMITIVE_RESTART) self._primitive_restart_index = -1", "#: Value indicates the maximum number of layers allowed in", ":param GLenum wrap_y: How the texture wraps in y direction", "# Deleting one object might add new ones so we", "Depth texture :rtype: :py:class:`~arcade.gl.Framebuffer` \"\"\" return Framebuffer( self, color_attachments=color_attachments, depth_attachment=depth_attachment", "Missing in pyglet # self.MAX_TEXTURE_MAX_ANISOTROPY = self.get_float(gl.GL_MAX_TEXTURE_MAX_ANISOTROPY) err = self._ctx.error", "is present the out attributes will be located there out_attributes", "#: Primitive mode TRIANGLE_STRIP_ADJACENCY = gl.GL_TRIANGLE_STRIP_ADJACENCY # 13 #: Patch", ":type: ``pyglet.Window`` \"\"\" return self._window_ref() @property def screen(self) -> Framebuffer:", "None, defines: Dict[str, str] = None ) -> Program: \"\"\"Create", "\"\"\"Create a Framebuffer. :param List[arcade.gl.Texture] color_attachments: List of textures we", "source (optional) :param str geometry_shader: geometry shader source (optional) :param", "enable_only(self, *args): \"\"\" Enable only some flags. This will disable", "API supported by the current context self.MINOR_VERSION = self.get(gl.GL_MINOR_VERSION) #:", "per program self.MAX_COMBINED_UNIFORM_BLOCKS = self.get(gl.GL_MAX_COMBINED_UNIFORM_BLOCKS) #: Number of words for", "the default render target self.active_framebuffer: Framebuffer = self._screen self.stats: ContextStats", "from .types import BufferDescription LOG = logging.getLogger(__name__) class Context: \"\"\"", "BLEND_PREMULTIPLIED_ALPHA = 0x0302, 0x0001 # VertexArray: Primitives #: Primitive mode", "#: Maximum number of components of inputs read by a", "renderbuffers self.MAX_RENDERBUFFER_SIZE = self.get(gl.GL_MAX_RENDERBUFFER_SIZE) #: Maximum number of sample mask", "str = \"auto\"): self._window_ref = weakref.ref(window) self.limits = Limits(self) self._gl_version", "until all objects are gone. # Deleting one object might", "OpenGL Buffer object. :param Any data: The buffer data, This", "transform feedback. # When a geometry shader is present the", "(optional) :param int mode: The default draw mode (optional) :param", "800, 600 # 1080p ctx.viewport = 0, 0, 1920, 1080", "freed, ) def decr(self, key): created, freed = getattr(self, key)", "uniform variable storage for the vertex shader self.MAX_VERTEX_UNIFORM_VECTORS = self.get(gl.GL_MAX_VERTEX_UNIFORM_VECTORS)", "\"context_gc\"] if value not in modes: raise ValueError(\"Unsupported gc_mode. Supported", "or boolean values that can be held in uniform variable", "def framebuffer( self, *, color_attachments: Union[Texture, List[Texture]] = None, depth_attachment:", "for a fragment shader self.MAX_FRAGMENT_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_FRAGMENT_UNIFORM_COMPONENTS ) #:", "from pyglet.window import Window from pyglet import gl from .buffer", "Set primitive restart index to -1 by default gl.glEnable(gl.GL_PRIMITIVE_RESTART) self._primitive_restart_index", "self._screen @property def fbo(self) -> Framebuffer: \"\"\" Get the currently", "have a fragment shader we are doing transform feedback. #", ":type: :py:class:`arcade.gl.Framebuffer` \"\"\" return self.active_framebuffer @property def gl_version(self) -> Tuple[int,", "source_tc = ( ShaderSource(tess_control_shader, gl.GL_TESS_CONTROL_SHADER) if tess_control_shader else None )", "of samples in a multisample depth or depth-stencil texture self.MAX_DEPTH_TEXTURE_SAMPLES", "*, components: int = 4, dtype: str = \"f1\", data:", "#: Blend function ONE_MINUS_SRC_ALPHA = 0x0303 #: Blend function DST_ALPHA", "Maximum number of 4-vectors that may be held in uniform", "0, 0, 800, 600 # 1080p ctx.viewport = 0, 0,", "#: A mask value indicating what context profile is used", "context self.MINOR_VERSION = self.get(gl.GL_MINOR_VERSION) #: Major version number of the", "Context: \"\"\" Represents an OpenGL context. This context belongs to", "framebuffer for the window. :type: :py:class:`~arcade.Framebuffer` \"\"\" return self._screen @property", "self.get(gl.GL_MAX_CUBE_MAP_TEXTURE_SIZE) #: Maximum number of samples in a multisample depth", "not in modes: raise ValueError(\"Unsupported gc_mode. Supported modes are:\", modes)", "in a multisample depth or depth-stencil texture self.MAX_DEPTH_TEXTURE_SAMPLES = self.get(gl.GL_MAX_DEPTH_TEXTURE_SAMPLES)", "ones so we need while len(self.objects): obj = self.objects.pop() obj.delete()", "usage. 'static', 'dynamic' or 'stream' :rtype: :py:class:`~arcade.gl.Buffer` \"\"\" # create_with_size", "used enums. All enums also exist in the ``gl`` module.", "lingering from other sections of your code base:: # Ensure", "y, width, height) \"\"\" return self.active_framebuffer.viewport @viewport.setter def viewport(self, value:", "the currently active one\"\"\" cls.active = ctx def enable(self, *args):", "and subsequently used for tessellation. :type: int \"\"\" value =", "shader source (optional) :param dict defines: Substitute #defines values in", "0, 0, 1920, 1080 # Using the current framebuffer size", "= self.get(gl.GL_MAX_UNIFORM_BUFFER_BINDINGS) #: Maximum size in basic machine units of", "self.warn_threshold == 0 and created > 0: LOG.debug( \"%s allocations", "an integer limit\"\"\" value = c_int() gl.glGetIntegerv(enum, value) return value.value", "# affecting currently bound textures in the first units self.default_texture_unit", "A rough estimate of the largest rectangular texture that the", "in y direction :param Tuple[GLenum,GLenum] filter: Minification and magnification filter", "self.get_str(gl.GL_VENDOR) self.RENDERER = self.get_str(gl.GL_RENDERER) #: Value indicating the number of", "self.get_float(gl.GL_MAX_TEXTURE_MAX_ANISOTROPY) err = self._ctx.error if err: from warnings import warn", "flag: Blending BLEND = gl.GL_BLEND #: Context flag: Depth testing", "ctx.enable_only(ctx.BLEND) # Make sure only depth test and culling is", "index_buffer: Index/element buffer (optional) :param int mode: The default draw", "pyglet import gl from .buffer import Buffer from .program import", "gl.GL_STACK_UNDERFLOW: \"GL_STACK_UNDERFLOW\", gl.GL_STACK_OVERFLOW: \"GL_STACK_OVERFLOW\", } def __init__(self, window: pyglet.window.Window, gc_mode:", "maximum number of individual 4-vectors of floating-point, integer, #: or", "of 4-component generic vertex attributes accessible to a vertex shader.", "Blend function SRC_COLOR = 0x0300 #: Blend function ONE_MINUS_SRC_COLOR =", "that can be held in uniform variable storage for a", "format multisample buffers self.MAX_INTEGER_SAMPLES = self.get(gl.GL_MAX_INTEGER_SAMPLES) #: Maximum samples for", "Optional[\"Context\"] = None # --- Store the most commonly used", "def blend_func(self) -> Tuple[int, int]: \"\"\" Get or the blend", "uniform buffer binding points on the context self.MAX_UNIFORM_BUFFER_BINDINGS = self.get(gl.GL_MAX_UNIFORM_BUFFER_BINDINGS)", "framebuffer self.SAMPLE_BUFFERS = self.get(gl.GL_SAMPLE_BUFFERS) #: An estimate of the number", "supporting the buffer protocol. \"\"\" return Texture(self, size, data=data, depth=True)", "3: RGB, 4: RGBA) :param str dtype: The data type", "Blend equations: source - destination FUNC_SUBTRACT = 0x800A #: Blend", ") -> Buffer: \"\"\"Create a new OpenGL Buffer object. :param", "Framebuffer. :param List[arcade.gl.Texture] color_attachments: List of textures we want to", "Primitive mode TRIANGLE_STRIP_ADJACENCY = gl.GL_TRIANGLE_STRIP_ADJACENCY # 13 #: Patch mode", "self.SUBPIXEL_BITS = self.get(gl.GL_SUBPIXEL_BITS) #: A mask value indicating what context", "We enable scissor testing by default. # This is always", "the occurring error or ``None`` of no errors has occurred.", "int]): self.active_framebuffer.viewport = value @property def blend_func(self) -> Tuple[int, int]:", "#: Maximum number of texels allowed in the texel array", "None ) source_tc = ( ShaderSource(tess_control_shader, gl.GL_TESS_CONTROL_SHADER) if tess_control_shader else", "Normal garbage collection as default (what we expect in python)", "= 0x0301 #: Blend function SRC_ALPHA = 0x0302 #: Blend", "#: A rough estimate of the largest cube-map texture that", "import Geometry, VertexArray from .framebuffer import Framebuffer, DefaultFrameBuffer from typing", "Framebuffer: \"\"\" Get the currently active framebuffer. This property is", "rough estimate of the largest rectangular texture that the GL", "= %s] [freed = %s] [active = %s]\", key, self.warn_threshold,", "Blend function ONE_MINUS_DST_COLOR = 0x0307 # Blend equations #: source", "error Returns a string representation of the occurring error or", "13 #: Patch mode (tessellation) PATCHES = gl.GL_PATCHES # The", "%s] [active = %s]\", key, self.warn_threshold, created, freed, created -", "or an object supporting the buffer protocol. :param GLenum wrap_x:", "from typing import Any, Dict, List, Tuple, Union, Sequence, Set", "= 0x0300 #: Blend function ONE_MINUS_SRC_COLOR = 0x0301 #: Blend", "600 # 1080p ctx.viewport = 0, 0, 1920, 1080 #", "render to. Normally it would be the size of the", "#: Blend function ONE = 0x0001 #: Blend function SRC_COLOR", "normally accessed through ``window.ctx``. The Context class contains methods for", "CLAMP_TO_BORDER = gl.GL_CLAMP_TO_BORDER # Texture wrap mode: Repeat mirrored MIRRORED_REPEAT", "individual floating-point, integer, or boolean values that #: can be", "buffer data, This can be ``bytes`` or an object supporting", "vertex attributes accessible to a vertex shader. self.MAX_VERTEX_ATTRIBS = self.get(gl.GL_MAX_VERTEX_ATTRIBS)", ":param Tuple[int, int] size: The size of the texture :param", "ShaderSource(tess_evaluation_shader, gl.GL_TESS_EVALUATION_SHADER) if tess_evaluation_shader else None ) # If we", "value @property def blend_func(self) -> Tuple[int, int]: \"\"\" Get or", "self.active_program: Optional[Program] = None # Tracking active framebuffer. On context", "OpenGL API supported by the current context. self.MAJOR_VERSION = self.get(gl.GL_MAJOR_VERSION)", "object self.MAX_TEXTURE_BUFFER_SIZE = self.get(gl.GL_MAX_TEXTURE_BUFFER_SIZE) #: Maximum number of uniform buffer", "self._blend_func = self.BLEND_DEFAULT self._point_size = 1.0 self._flags: Set[int] = set()", "self._gc_mode = value @property def error(self) -> Union[str, None]: \"\"\"Check", "flag) -> bool: \"\"\" Check if a context flag is", "can be used to access texture maps from the vertex", "binding points on the context self.MAX_UNIFORM_BUFFER_BINDINGS = self.get(gl.GL_MAX_UNIFORM_BUFFER_BINDINGS) #: The", "self.get(gl.GL_MAX_UNIFORM_BLOCK_SIZE) #: The number 4-vectors for varying variables self.MAX_VARYING_VECTORS =", "filter for mipmaps NEAREST_MIPMAP_LINEAR = 0x2702 #: Texture interpolation: Minification", "value = c_int() gl.glGetIntegerv(enum, value) return value.value def get_float(self, enum)", "0x8006 #: Blend equations: source - destination FUNC_SUBTRACT = 0x800A", "freed = getattr(self, key) setattr(self, key, (created + 1, freed))", "= DefaultFrameBuffer(self) # Tracking active program self.active_program: Optional[Program] = None", "for mipmaps NEAREST_MIPMAP_NEAREST = 0x2700 #: Texture interpolation: Minification filter", "words for geometry shader uniform variables in all uniform blocks", "value: Tuple[int, int]): self._blend_func = value gl.glBlendFunc(value[0], value[1]) # def", "None, wrap_y: gl.GLenum = None, filter: Tuple[gl.GLenum, gl.GLenum] = None", "if fragment_shader else None ) source_geo = ( ShaderSource(geometry_shader, gl.GL_GEOMETRY_SHADER)", "freed + 1)) class Limits: \"\"\"OpenGL Limitations\"\"\" def __init__(self, ctx):", "an object supporting the buffer protocol. :param int reserve: The", "ctx.ONE :type: tuple (src, dst) \"\"\" return self._blend_func @blend_func.setter def", "the source (optional) :rtype: :py:class:`~arcade.gl.Program` \"\"\" source_vs = ShaderSource(vertex_shader, gl.GL_VERTEX_SHADER)", "profile is used (core, compat etc.) self.CONTEXT_PROFILE_MASK = self.get(gl.GL_CONTEXT_PROFILE_MASK) #:", "= self.get(gl.GL_MAX_COLOR_TEXTURE_SAMPLES) #: the number of words for fragment shader", "def point_size(self) -> float: \"\"\"float: Get or set the point", "RG, 3: RGB, 4: RGBA) :param str dtype: The data", "by default gl.glEnable(gl.GL_PRIMITIVE_RESTART) self._primitive_restart_index = -1 self.primitive_restart_index = self._primitive_restart_index #", "= self.get(gl.GL_MAX_TEXTURE_SIZE) #: Maximum number of uniform buffer binding points", "shader source :param str fragment_shader: fragment shader source (optional) :param", "function ONE_MINUS_SRC_COLOR = 0x0301 #: Blend function SRC_ALPHA = 0x0302", "self.get( gl.GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS ) #: Maximum number of uniform blocks per", "List of textures we want to render into :param arcade.gl.Texture", "List[str] if not source_fs: if source_geo: out_attributes = source_geo.out_attributes else:", "= self.get( gl.GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT ) #: Value indicates the maximum number", "= self.get(gl.GL_MAX_ELEMENTS_INDICES) #: Recommended maximum number of vertex array vertices", "= \"auto\"): self._window_ref = weakref.ref(window) self.limits = Limits(self) self._gl_version =", "Context flag: Enable ``gl_PointSize`` in shaders. PROGRAM_POINT_SIZE = gl.GL_PROGRAM_POINT_SIZE #", "can handle. The value must be at least 64 self.MAX_3D_TEXTURE_SIZE", "fragment shader we are doing transform feedback. # When a", "enums _errors = { gl.GL_INVALID_ENUM: \"GL_INVALID_ENUM\", gl.GL_INVALID_VALUE: \"GL_INVALID_VALUE\", gl.GL_INVALID_OPERATION: \"GL_INVALID_OPERATION\",", "a vertex shader self.MAX_VERTEX_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_VERTEX_UNIFORM_COMPONENTS ) #: Maximum", "= None, geometry_shader: str = None, tess_control_shader: str = None,", "\"\"\" return self._gc_mode @gc_mode.setter def gc_mode(self, value: str): modes =", "dtype=dtype, wrap_x=wrap_x, wrap_y=wrap_y, filter=filter, ) def depth_texture(self, size: Tuple[int, int],", "#: Number of words for geometry shader uniform variables in", "of bytes reserve :param str usage: Buffer usage. 'static', 'dynamic'", "#: Maximum number of components of outputs written by a", "object supporting the buffer protocol. :param int reserve: The number", "Framebuffer = self._screen self.stats: ContextStats = ContextStats(warn_threshold=1000) # Hardcoded states", "return self.active_framebuffer.viewport @viewport.setter def viewport(self, value: Tuple[int, int, int, int]):", "image units that can be used to access texture maps", "fbo(self) -> Framebuffer: \"\"\" Get the currently active framebuffer. This", "-> Texture: \"\"\"Create a 2D Texture. Wrap modes: ``GL_REPEAT``, ``GL_MIRRORED_REPEAT``,", "#: Texture wrap mode: Repeat REPEAT = gl.GL_REPEAT # Texture", "or set number of vertices that will be used to", "as the currently active one\"\"\" cls.active = ctx def enable(self,", "point size.\"\"\" return self._point_size @point_size.setter def point_size(self, value: float): gl.glPointSize(self._point_size)", "None, reserve: int = 0, usage: str = \"static\" )", "self.program = (0, 0) self.vertex_array = (0, 0) self.geometry =", "self._flags = set(args) if self.BLEND in self._flags: gl.glEnable(self.BLEND) else: gl.glDisable(self.BLEND)", "values that #: can be held in uniform variable storage", "by the current context. self.MAJOR_VERSION = self.get(gl.GL_MAJOR_VERSION) self.VENDOR = self.get_str(gl.GL_VENDOR)", "gl.GL_GEOMETRY_SHADER) if geometry_shader else None ) source_tc = ( ShaderSource(tess_control_shader,", "mask value indicating what context profile is used (core, compat", ":param int index_element_size: Byte size of the index buffer type.", "0x0001, 0x0001 #: Blend mode shortcut for premultipled alpha: ``SRC_ALPHA,", "value) return value.value def get_float(self, enum) -> float: \"\"\"Get a", "gl.GL_MAX_FRAGMENT_UNIFORM_COMPONENTS ) #: maximum number of individual 4-vectors of floating-point,", "integer limit\"\"\" value = c_int() gl.glGetIntegerv(enum, value) return value.value def", "of source and destination MAX = 0x8008 # Blend mode", "self._flags: gl.glEnable(self.PROGRAM_POINT_SIZE) else: gl.glDisable(self.PROGRAM_POINT_SIZE) def disable(self, *args): \"\"\" Disable one", "Tuple[int, int]): self._blend_func = value gl.glBlendFunc(value[0], value[1]) # def blend_equation(self)", "( ShaderSource(geometry_shader, gl.GL_GEOMETRY_SHADER) if geometry_shader else None ) source_tc =", "str): modes = [\"auto\", \"context_gc\"] if value not in modes:", "floating-point, integer, #: or boolean values that can be held", ") #: Maximum number of components of outputs written by", "the vertex shader self.MAX_VERTEX_UNIFORM_VECTORS = self.get(gl.GL_MAX_VERTEX_UNIFORM_VECTORS) #: Maximum number of", "@blend_func.setter def blend_func(self, value: Tuple[int, int]): self._blend_func = value gl.glBlendFunc(value[0],", "gl.GL_CULL_FACE #: Context flag: Enable ``gl_PointSize`` in shaders. PROGRAM_POINT_SIZE =", "multisample buffers self.MAX_INTEGER_SAMPLES = self.get(gl.GL_MAX_INTEGER_SAMPLES) #: Maximum samples for a", "return self._window_ref() @property def screen(self) -> Framebuffer: \"\"\" The framebuffer", "we are doing transform feedback. # When a geometry shader", "API supported by the current context. self.MAJOR_VERSION = self.get(gl.GL_MAJOR_VERSION) self.VENDOR", "points on the context self.MAX_UNIFORM_BUFFER_BINDINGS = self.get(gl.GL_MAX_UNIFORM_BUFFER_BINDINGS) #: Maximum number", "return Framebuffer( self, color_attachments=color_attachments, depth_attachment=depth_attachment ) def texture( self, size:", "size: The size of the texture :param Any data: The", "source_fs = ( ShaderSource(fragment_shader, gl.GL_FRAGMENT_SHADER) if fragment_shader else None )", "\"\"\" Represents an OpenGL context. This context belongs to a", "= 0x8007 #: Blend equations: Maximum of source and destination", "on the context self.MAX_UNIFORM_BUFFER_BINDINGS = self.get(gl.GL_MAX_UNIFORM_BUFFER_BINDINGS) #: Maximum size in", "storage for a fragment shader self.MAX_FRAGMENT_UNIFORM_VECTORS = self.get(gl.GL_MAX_FRAGMENT_UNIFORM_VECTORS) #: Maximum", "1)) class Limits: \"\"\"OpenGL Limitations\"\"\" def __init__(self, ctx): self._ctx =", "else: gl.glDisable(self.BLEND) if self.DEPTH_TEST in self._flags: gl.glEnable(self.DEPTH_TEST) else: gl.glDisable(self.DEPTH_TEST) if", "def finish(self) -> None: \"\"\"Wait until all OpenGL rendering commands", "#: Maximum number of color attachments in a framebuffer self.MAX_COLOR_ATTACHMENTS", "color multisample texture self.MAX_COLOR_TEXTURE_SAMPLES = self.get(gl.GL_MAX_COLOR_TEXTURE_SAMPLES) #: the number of", "self.MAX_DUAL_SOURCE_DRAW_BUFFERS = self.get(gl.GL_MAX_DUAL_SOURCE_DRAW_BUFFERS) #: Recommended maximum number of vertex array", "gl.GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS ) #: Maximum number of uniform blocks per geometry", "List[Texture]] = None, depth_attachment: Texture = None ) -> Framebuffer:", "be used to access texture maps from the vertex shader.", "self._ctx.error if err: from warnings import warn warn(\"Error happened while", "float): gl.glPointSize(self._point_size) self._point_size = value @property def primitive_restart_index(self) -> int:", "be bytes or an object supporting the buffer protocol. :param", "written in a fragment shader self.MAX_DRAW_BUFFERS = self.get(gl.GL_MAX_DRAW_BUFFERS) #: Maximum", "err = gl.glGetError() if err == gl.GL_NO_ERROR: return None return", "while querying of limits. Moving on ..\") def get(self, enum:", "self._flags.update(args) for flag in args: gl.glEnable(flag) def enable_only(self, *args): \"\"\"", "as a 2 component tuple :type: tuple (major, minor) version", "self._errors.get(err, \"GL_UNKNOWN_ERROR\") @classmethod def activate(cls, ctx: \"Context\"): \"\"\"Mark a context", "sections of your code base:: # Ensure all flags are", "a fragment shader self.MAX_DRAW_BUFFERS = self.get(gl.GL_MAX_DRAW_BUFFERS) #: Maximum number of", ") #: A rough estimate of the largest cube-map texture", "source_vs.out_attributes return Program( self, vertex_shader=source_vs.get_source(defines=defines), fragment_shader=source_fs.get_source(defines=defines) if source_fs else None,", "Union, Sequence, Set import pyglet from pyglet.window import Window from", "= 0x0305 #: Blend function DST_COLOR = 0x0306 #: Blend", "depth test and culling is enabled ctx.enable_only(ctx.DEPTH_TEST, ctx.CULL_FACE) \"\"\" self._flags", ":rtype: :py:class:`~arcade.gl.Buffer` \"\"\" # create_with_size return Buffer(self, data, reserve=reserve, usage=usage)", "components of inputs read by a geometry shader self.MAX_GEOMETRY_INPUT_COMPONENTS =", "ctx.viewport = 0, 0, 800, 600 # 1080p ctx.viewport =", "def texture( self, size: Tuple[int, int], *, components: int =", "\"\"\"Get an integer limit\"\"\" value = c_int() gl.glGetIntegerv(enum, value) return", "may be written in a fragment shader self.MAX_DRAW_BUFFERS = self.get(gl.GL_MAX_DRAW_BUFFERS)", "read by the fragment shader self.MAX_FRAGMENT_INPUT_COMPONENTS = self.get( gl.GL_MAX_FRAGMENT_INPUT_COMPONENTS )", "number of 4-component generic vertex attributes accessible to a vertex", "-1 self.primitive_restart_index = self._primitive_restart_index # We enable scissor testing by", "given the vertex, fragment and geometry shader. :param str vertex_shader:", "front_face(self) # def cull_face(self) @property def patch_vertices(self) -> int: \"\"\"", "used to access texture maps from the geometry shader self.MAX_GEOMETRY_TEXTURE_IMAGE_UNITS", "OpenGL API supported by the current context self.MINOR_VERSION = self.get(gl.GL_MINOR_VERSION)", "self.VENDOR = self.get_str(gl.GL_VENDOR) self.RENDERER = self.get_str(gl.GL_RENDERER) #: Value indicating the", "= self.get_str(gl.GL_VENDOR) self.RENDERER = self.get_str(gl.GL_RENDERER) #: Value indicating the number", "Blend function DST_COLOR = 0x0306 #: Blend function ONE_MINUS_DST_COLOR =", "Resource methods --- def buffer( self, *, data: Optional[Any] =", "we don't have a fragment shader we are doing transform", "interpolate LINEAR = 0x2601 #: Texture interpolation: Minification filter for", "vertex_shader: vertex shader source :param str fragment_shader: fragment shader source", "= self.get(gl.GL_MAX_FRAGMENT_UNIFORM_VECTORS) #: Maximum number of uniform blocks per fragment", "filter: Minification and magnification filter \"\"\" return Texture( self, size,", "= 0x0000 #: Blend function ONE = 0x0001 #: Blend", "current framebuffer size ctx.viewport = 0, 0, *ctx.screen.size :type: tuple", ") #: Number of words for geometry shader uniform variables", "sure only blending is enabled ctx.enable_only(ctx.BLEND) # Make sure only", "\"\"\" self._flags.update(args) for flag in args: gl.glEnable(flag) def enable_only(self, *args):", "@classmethod def activate(cls, ctx: \"Context\"): \"\"\"Mark a context as the", "outside the viewport gl.glEnable(gl.GL_SCISSOR_TEST) # States self._blend_func = self.BLEND_DEFAULT self._point_size", "else None ) source_te = ( ShaderSource(tess_evaluation_shader, gl.GL_TESS_EVALUATION_SHADER) if tess_evaluation_shader", "until all OpenGL rendering commands are completed\"\"\" gl.glFinish() # ---", "#: Primitive mode LINE_STRIP_ADJACENCY = gl.GL_LINE_STRIP_ADJACENCY # 11 #: Primitive", "# Texture #: Texture interpolation: Nearest pixel NEAREST = 0x2600", "import BufferDescription LOG = logging.getLogger(__name__) class Context: \"\"\" Represents an", "contains methods for creating resources, global states and commonly used", "so we need while len(self.objects): obj = self.objects.pop() obj.delete() @property", "'stream' :rtype: :py:class:`~arcade.gl.Buffer` \"\"\" # create_with_size return Buffer(self, data, reserve=reserve,", "block self.MAX_UNIFORM_BLOCK_SIZE = self.get(gl.GL_MAX_UNIFORM_BLOCK_SIZE) #: The number 4-vectors for varying", "DefaultFrameBuffer(self) # Tracking active program self.active_program: Optional[Program] = None #", "Maximum size in basic machine units of a uniform block", "of samples supported in integer format multisample buffers self.MAX_INTEGER_SAMPLES =", "str] = None ) -> Program: \"\"\"Create a :py:class:`~arcade.gl.Program` given", "program( self, *, vertex_shader: str, fragment_shader: str = None, geometry_shader:", "of sample buffers associated with the framebuffer self.SAMPLE_BUFFERS = self.get(gl.GL_SAMPLE_BUFFERS)", "bool \"\"\" return flag in self._flags @property def viewport(self) ->", "@property def viewport(self) -> Tuple[int, int, int, int]: \"\"\" Get", "if source_tc else None, tess_evaluation_shader=source_te.get_source(defines=defines) if source_te else None, out_attributes=out_attributes,", "number of the OpenGL API supported by the current context.", "gl.GL_TESS_CONTROL_SHADER) if tess_control_shader else None ) source_te = ( ShaderSource(tess_evaluation_shader,", "-> int: \"\"\"Get an integer limit\"\"\" value = c_int() gl.glGetIntegerv(enum,", "``GL_LINEAR_MIPMAP_NEAREST`` ``GL_NEAREST_MIPMAP_LINEAR``, ``GL_LINEAR_MIPMAP_LINEAR`` Magnifying filters: ``GL_NEAREST``, ``GL_LINEAR`` :param Tuple[int, int]", "we expect in python) self._gc_mode = \"auto\" self.gc_mode = gc_mode", "equations: source - destination FUNC_SUBTRACT = 0x800A #: Blend equations:", "number of active draw buffers when using dual-source blending self.MAX_DUAL_SOURCE_DRAW_BUFFERS", "= value @property def error(self) -> Union[str, None]: \"\"\"Check OpenGL", "gl.GL_TESS_EVALUATION_SHADER) if tess_evaluation_shader else None ) # If we don't", "Tuple[int, int]: \"\"\" The OpenGL version as a 2 component", "size, components=components, data=data, dtype=dtype, wrap_x=wrap_x, wrap_y=wrap_y, filter=filter, ) def depth_texture(self,", "*args): \"\"\" Enables one or more context flags:: # Single", "-1\"\"\" return self._primitive_restart_index @primitive_restart_index.setter def primitive_restart_index(self, value: int): self._primitive_restart_index =", "variables in all uniform blocks self.MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS )", "a single patch primitive. Patch primitives are consumed by the", "Recommended maximum number of vertex array indices self.MAX_ELEMENTS_INDICES = self.get(gl.GL_MAX_ELEMENTS_INDICES)", "a fragment shader self.MAX_FRAGMENT_UNIFORM_VECTORS = self.get(gl.GL_MAX_FRAGMENT_UNIFORM_VECTORS) #: Maximum number of", "active: Optional[\"Context\"] = None # --- Store the most commonly", "Example:: err = ctx.error if err: raise RuntimeError(\"OpenGL error: {err}\")", "restart index. Default is -1\"\"\" return self._primitive_restart_index @primitive_restart_index.setter def primitive_restart_index(self,", "that can be used to access texture maps from the", "of vertices that will be used to make up a", "# 1 #: Primitive mode LINE_STRIP = gl.GL_LINE_STRIP # 3", "and destination MAX = 0x8008 # Blend mode shortcuts #:", "of samples in a color multisample texture self.MAX_COLOR_TEXTURE_SAMPLES = self.get(gl.GL_MAX_COLOR_TEXTURE_SAMPLES)", "floating-point, integer, or boolean values that #: can be held", "= c_float() gl.glGetFloatv(enum, value) return value.value def get_str(self, enum: gl.GLenum)", "else: gl.glDisable(self.CULL_FACE) if self.PROGRAM_POINT_SIZE in self._flags: gl.glEnable(self.PROGRAM_POINT_SIZE) else: gl.glDisable(self.PROGRAM_POINT_SIZE) def", "'dynamic' or 'stream' :rtype: :py:class:`~arcade.gl.Buffer` \"\"\" # create_with_size return Buffer(self,", "self.MAX_ELEMENTS_VERTICES = self.get(gl.GL_MAX_ELEMENTS_VERTICES) #: Maximum number of components of the", "def blend_func(self, value: Tuple[int, int]): self._blend_func = value gl.glBlendFunc(value[0], value[1])", "methods for creating resources, global states and commonly used enums.", "-> Window: \"\"\" The window this context belongs to. :type:", "if not isinstance(value, int): raise TypeError(\"patch_vertices must be an integer\")", "a Geomtry instance. :param list content: List of :py:class:`~arcade.gl.BufferDescription` (optional)", "@property def error(self) -> Union[str, None]: \"\"\"Check OpenGL error Returns", "that the GL can handle self.MAX_TEXTURE_SIZE = self.get(gl.GL_MAX_TEXTURE_SIZE) #: Maximum", "bytes reserve :param str usage: Buffer usage. 'static', 'dynamic' or", "Can be bytes or an object supporting the buffer protocol.", "self.get(gl.GL_MAX_INTEGER_SAMPLES) #: Maximum samples for a framebuffer self.MAX_SAMPLES = self.get(gl.GL_MAX_SAMPLES)", "``GL_NEAREST``, ``GL_LINEAR`` :param Tuple[int, int] size: The size of the", "of simultaneous outputs that may be written in a fragment", "4-vectors that may be held in uniform variable storage for", "self.get( gl.GL_MAX_FRAGMENT_UNIFORM_COMPONENTS ) #: maximum number of individual 4-vectors of", "self._primitive_restart_index = value gl.glPrimitiveRestartIndex(value) def finish(self) -> None: \"\"\"Wait until", "def disable(self, *args): \"\"\" Disable one or more context flags::", "= None, index_element_size: int = 4, ): \"\"\" Create a", "On context creation the window is the default render target", "``SRC_ALPHA, ONE_MINUS_SRC_ALPHA`` BLEND_DEFAULT = 0x0302, 0x0303 #: Blend mode shortcut", "components (1: R, 2: RG, 3: RGB, 4: RGBA) :param", "self.MAX_VERTEX_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_VERTEX_UNIFORM_COMPONENTS ) #: Maximum number of 4-vectors", "= ( ShaderSource(geometry_shader, gl.GL_GEOMETRY_SHADER) if geometry_shader else None ) source_tc", "Texture. Wrap modes: ``GL_REPEAT``, ``GL_MIRRORED_REPEAT``, ``GL_CLAMP_TO_EDGE``, ``GL_CLAMP_TO_BORDER`` Minifying filters: ``GL_NEAREST``,", "active framebuffer. On context creation the window is the default", "new OpenGL Buffer object. :param Any data: The buffer data,", "occurred. Example:: err = ctx.error if err: raise RuntimeError(\"OpenGL error:", "OpenGL context. This context belongs to a ``pyglet.Window`` normally accessed", "Framebuffer: \"\"\" The framebuffer for the window. :type: :py:class:`~arcade.Framebuffer` \"\"\"", "depth-stencil texture self.MAX_DEPTH_TEXTURE_SAMPLES = self.get(gl.GL_MAX_DEPTH_TEXTURE_SAMPLES) #: Maximum number of simultaneous", "context. self.MAJOR_VERSION = self.get(gl.GL_MAJOR_VERSION) self.VENDOR = self.get_str(gl.GL_VENDOR) self.RENDERER = self.get_str(gl.GL_RENDERER)", "blocks per program self.MAX_COMBINED_UNIFORM_BLOCKS = self.get(gl.GL_MAX_COMBINED_UNIFORM_BLOCKS) #: Number of words", "Maximum number of 4-component generic vertex attributes accessible to a", "must be at least 64 self.MAX_3D_TEXTURE_SIZE = self.get(gl.GL_MAX_3D_TEXTURE_SIZE) #: Maximum", "of uniform blocks per program self.MAX_COMBINED_UNIFORM_BLOCKS = self.get(gl.GL_MAX_COMBINED_UNIFORM_BLOCKS) #: Number", "if geometry_shader else None ) source_tc = ( ShaderSource(tess_control_shader, gl.GL_TESS_CONTROL_SHADER)", "gl.glDisable(self.CULL_FACE) if self.PROGRAM_POINT_SIZE in self._flags: gl.glEnable(self.PROGRAM_POINT_SIZE) else: gl.glDisable(self.PROGRAM_POINT_SIZE) def disable(self,", "Maximum of source and destination MAX = 0x8008 # Blend", "# 13 #: Patch mode (tessellation) PATCHES = gl.GL_PATCHES #", "# 4:3 screen ctx.viewport = 0, 0, 800, 600 #", "active program self.active_program: Optional[Program] = None # Tracking active framebuffer.", "def error(self) -> Union[str, None]: \"\"\"Check OpenGL error Returns a", "enums. All enums also exist in the ``gl`` module. (``ctx.BLEND``", ":param str tess_control_shader: tessellation control shader source (optional) :param str", "None, wrap_x: gl.GLenum = None, wrap_y: gl.GLenum = None, filter:", "shader self.MAX_GEOMETRY_OUTPUT_COMPONENTS = self.get( gl.GL_MAX_GEOMETRY_OUTPUT_COMPONENTS ) #: Maximum supported texture", "gl.GL_MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS ) #: Maximum supported texture image units that can", ") def decr(self, key): created, freed = getattr(self, key) setattr(self,", "\"\"\" return self._blend_func @blend_func.setter def blend_func(self, value: Tuple[int, int]): self._blend_func", "Single flag ctx.enable(ctx.BLEND) # Multiple flags ctx.enable(ctx.DEPTH_TEST, ctx.CULL_FACE) \"\"\" self._flags.update(args)", "gc_mode: str = \"auto\"): self._window_ref = weakref.ref(window) self.limits = Limits(self)", "= self.get(gl.GL_MAX_VERTEX_ATTRIB_RELATIVE_OFFSET) # self.MAX_VERTEX_ATTRIB_BINDINGS = self.get(gl.GL_MAX_VERTEX_ATTRIB_BINDINGS) self.MAX_TEXTURE_IMAGE_UNITS = self.get(gl.GL_MAX_TEXTURE_IMAGE_UNITS) #", "# Using the current framebuffer size ctx.viewport = 0, 0,", "flags:: # Single flag ctx.disable(ctx.BLEND) # Multiple flags ctx.disable(ctx.DEPTH_TEST, ctx.CULL_FACE)", "sample buffers associated with the framebuffer self.SAMPLE_BUFFERS = self.get(gl.GL_SAMPLE_BUFFERS) #:", "object might add new ones so we need while len(self.objects):", "= self.get(gl.GL_CONTEXT_PROFILE_MASK) #: Minimum required alignment for uniform buffer sizes", "interpolation: Minification filter for mipmaps NEAREST_MIPMAP_NEAREST = 0x2700 #: Texture", "self.stats: ContextStats = ContextStats(warn_threshold=1000) # Hardcoded states # This should", "one\"\"\" cls.active = ctx def enable(self, *args): \"\"\" Enables one", "# If we don't have a fragment shader we are", "shader self.MAX_COMBINED_TEXTURE_IMAGE_UNITS = self.get( gl.GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS ) #: Maximum number of", "allowed in the texel array of a texture buffer object", "4-vectors of floating-point, integer, #: or boolean values that can", "per vertex shader. self.MAX_VERTEX_UNIFORM_BLOCKS = self.get(gl.GL_MAX_VERTEX_UNIFORM_BLOCKS) # self.MAX_VERTEX_ATTRIB_RELATIVE_OFFSET = self.get(gl.GL_MAX_VERTEX_ATTRIB_RELATIVE_OFFSET)", "import gl from .buffer import Buffer from .program import Program", "Enables one or more context flags:: # Single flag ctx.enable(ctx.BLEND)", "indicating the number of sample buffers associated with the framebuffer", "str: \"\"\"Get a string limit\"\"\" return cast(gl.glGetString(enum), c_char_p).value.decode() # type:", "rendering commands are completed\"\"\" gl.glFinish() # --- Resource methods ---", "as default (what we expect in python) self._gc_mode = \"auto\"", "tess_evaluation_shader else None ) # If we don't have a", "blending: ``ONE, ONE`` BLEND_ADDITIVE = 0x0001, 0x0001 #: Blend mode", "\"GL_INVALID_FRAMEBUFFER_OPERATION\", gl.GL_OUT_OF_MEMORY: \"GL_OUT_OF_MEMORY\", gl.GL_STACK_UNDERFLOW: \"GL_STACK_UNDERFLOW\", gl.GL_STACK_OVERFLOW: \"GL_STACK_OVERFLOW\", } def __init__(self,", "flag ctx.enable(ctx.BLEND) # Multiple flags ctx.enable(ctx.DEPTH_TEST, ctx.CULL_FACE) \"\"\" self._flags.update(args) for", "-> str: \"\"\"Get a string limit\"\"\" return cast(gl.glGetString(enum), c_char_p).value.decode() #", "str usage: Buffer usage. 'static', 'dynamic' or 'stream' :rtype: :py:class:`~arcade.gl.Buffer`", "\"\"\"Get a string limit\"\"\" return cast(gl.glGetString(enum), c_char_p).value.decode() # type: ignore", "one object might add new ones so we need while", "#: Context flag: Blending BLEND = gl.GL_BLEND #: Context flag:", "blend_func(self, value: Tuple[int, int]): self._blend_func = value gl.glBlendFunc(value[0], value[1]) #", "written by a geometry shader self.MAX_GEOMETRY_OUTPUT_COMPONENTS = self.get( gl.GL_MAX_GEOMETRY_OUTPUT_COMPONENTS )", "created, freed = getattr(self, key) setattr(self, key, (created + 1,", "Tuple[gl.GLenum, gl.GLenum] = None ) -> Texture: \"\"\"Create a 2D", "equations #: source + destination FUNC_ADD = 0x8006 #: Blend", "Framebuffer, DefaultFrameBuffer from typing import Optional from .texture import Texture", "a multisample depth or depth-stencil texture self.MAX_DEPTH_TEXTURE_SAMPLES = self.get(gl.GL_MAX_DEPTH_TEXTURE_SAMPLES) #:", "supported by the current context. self.MAJOR_VERSION = self.get(gl.GL_MAJOR_VERSION) self.VENDOR =", "from .program import Program from .vertex_array import Geometry, VertexArray from", "mode: The default draw mode (optional) :param int mode: The", "} def __init__(self, window: pyglet.window.Window, gc_mode: str = \"auto\"): self._window_ref", "in shaders. PROGRAM_POINT_SIZE = gl.GL_PROGRAM_POINT_SIZE # Blend functions #: Blend", "self._point_size = value @property def primitive_restart_index(self) -> int: \"\"\"Get or", "handle self.MAX_CUBE_MAP_TEXTURE_SIZE = self.get(gl.GL_MAX_CUBE_MAP_TEXTURE_SIZE) #: Maximum number of samples in", "int components: Number of components (1: R, 2: RG, 3:", "the window. :type: :py:class:`~arcade.Framebuffer` \"\"\" return self._screen @property def fbo(self)", "context self.MAX_UNIFORM_BUFFER_BINDINGS = self.get(gl.GL_MAX_UNIFORM_BUFFER_BINDINGS) #: Maximum size in basic machine", "of components of inputs read by a geometry shader self.MAX_GEOMETRY_INPUT_COMPONENTS", "= [] # type: List[str] if not source_fs: if source_geo:", "``SRC_ALPHA, ONE`` BLEND_PREMULTIPLIED_ALPHA = 0x0302, 0x0001 # VertexArray: Primitives #:", "for premultipled alpha: ``SRC_ALPHA, ONE`` BLEND_PREMULTIPLIED_ALPHA = 0x0302, 0x0001 #", "fragment_shader=source_fs.get_source(defines=defines) if source_fs else None, geometry_shader=source_geo.get_source(defines=defines) if source_geo else None,", "on the context self.MAX_UNIFORM_BUFFER_BINDINGS = self.get(gl.GL_MAX_UNIFORM_BUFFER_BINDINGS) #: The value gives", "shader uniform variables in all uniform blocks self.MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS = self.get(", "Minimum required alignment for uniform buffer sizes and offset self.UNIFORM_BUFFER_OFFSET_ALIGNMENT", "ShaderSource(geometry_shader, gl.GL_GEOMETRY_SHADER) if geometry_shader else None ) source_tc = (", "\"auto\"): self._window_ref = weakref.ref(window) self.limits = Limits(self) self._gl_version = (self.limits.MAJOR_VERSION,", "accessible to a vertex shader. self.MAX_VERTEX_ATTRIBS = self.get(gl.GL_MAX_VERTEX_ATTRIBS) #: Maximum", "the largest rectangular texture that the GL can handle self.MAX_RECTANGLE_TEXTURE_SIZE", "for vertex shader uniform variables in all uniform blocks self.MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS", "blend_func(self) -> Tuple[int, int]: \"\"\" Get or the blend function::", "mode LINE_STRIP = gl.GL_LINE_STRIP # 3 #: Primitive mode TRIANGLES", "gl.GL_INVALID_FRAMEBUFFER_OPERATION: \"GL_INVALID_FRAMEBUFFER_OPERATION\", gl.GL_OUT_OF_MEMORY: \"GL_OUT_OF_MEMORY\", gl.GL_STACK_UNDERFLOW: \"GL_STACK_UNDERFLOW\", gl.GL_STACK_OVERFLOW: \"GL_STACK_OVERFLOW\", } def", "0x0303 #: Blend function DST_ALPHA = 0x0304 #: Blend function", "Context flag: Face culling CULL_FACE = gl.GL_CULL_FACE #: Context flag:", "#: Primitive mode POINTS = gl.GL_POINTS # 0 #: Primitive", "RGB, 4: RGBA) :param str dtype: The data type of", "# def blend_equation(self) # def front_face(self) # def cull_face(self) @property", "mode LINES = gl.GL_LINES # 1 #: Primitive mode LINE_STRIP", "-> Buffer: \"\"\"Create a new OpenGL Buffer object. :param Any", "shader self.MAX_DRAW_BUFFERS = self.get(gl.GL_MAX_DRAW_BUFFERS) #: Maximum number of active draw", "variable storage for the vertex shader self.MAX_VERTEX_UNIFORM_VECTORS = self.get(gl.GL_MAX_VERTEX_UNIFORM_VECTORS) #:", "%s]\", key, self.warn_threshold, created, freed, created - freed, ) def", "version number of the OpenGL API supported by the current", "shader uniform variables in all uniform blocks self.MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS = self.get(", "shader self.MAX_GEOMETRY_TEXTURE_IMAGE_UNITS = self.get( gl.GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS ) #: Maximum number of", "\"\"\"Get a float limit\"\"\" value = c_float() gl.glGetFloatv(enum, value) return", "all objects are gone. # Deleting one object might add", "= self._primitive_restart_index # We enable scissor testing by default. #", "number of bits of subpixel resolution #: that are used", "used OpenGL constants # Texture #: Texture interpolation: Nearest pixel", "window is the default render target self.active_framebuffer: Framebuffer = self._screen", "Hardcoded states # This should always be enabled gl.glEnable(gl.GL_TEXTURE_CUBE_MAP_SEAMLESS) #", "0x2701 #: Texture interpolation: Minification filter for mipmaps NEAREST_MIPMAP_LINEAR =", "set the viewport for the currently active framebuffer. The viewport", "self.MAX_CUBE_MAP_TEXTURE_SIZE = self.get(gl.GL_MAX_CUBE_MAP_TEXTURE_SIZE) #: Maximum number of samples in a", "= \"auto\" self.gc_mode = gc_mode #: Collected objects to gc", "array texture, and must be at least 256 self.MAX_ARRAY_TEXTURE_LAYERS =", "basic machine units of a uniform block self.MAX_UNIFORM_BLOCK_SIZE = self.get(gl.GL_MAX_UNIFORM_BLOCK_SIZE)", "LINEAR_MIPMAP_LINEAR = 0x2703 #: Texture wrap mode: Repeat REPEAT =", ":py:class:`~arcade.gl.Query` \"\"\" return Query(self) class ContextStats: def __init__(self, warn_threshold=100): self.warn_threshold", "shaders. PROGRAM_POINT_SIZE = gl.GL_PROGRAM_POINT_SIZE # Blend functions #: Blend function", "reserve: int = 0, usage: str = \"static\" ) ->", "defines: Substitute #defines values in the source (optional) :rtype: :py:class:`~arcade.gl.Program`", "storage for a vertex shader self.MAX_VERTEX_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_VERTEX_UNIFORM_COMPONENTS )", "return value.value def get_float(self, enum) -> float: \"\"\"Get a float", "\"\"\" err = gl.glGetError() if err == gl.GL_NO_ERROR: return None", "samples in a color multisample texture self.MAX_COLOR_TEXTURE_SAMPLES = self.get(gl.GL_MAX_COLOR_TEXTURE_SAMPLES) #:", "blocks self.MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS ) #: Maximum supported texture", "= gl.GL_REPEAT # Texture wrap mode: Clamp to border pixel", "Blend function ONE = 0x0001 #: Blend function SRC_COLOR =", "#: Primitive mode LINES = gl.GL_LINES # 1 #: Primitive", "Optional[Sequence[BufferDescription]] = None, index_buffer: Buffer = None, mode: int =", "of words for vertex shader uniform variables in all uniform", "the fragment shader self.MAX_FRAGMENT_INPUT_COMPONENTS = self.get( gl.GL_MAX_FRAGMENT_INPUT_COMPONENTS ) #: Maximum", "ctx.CULL_FACE) \"\"\" self._flags = set(args) if self.BLEND in self._flags: gl.glEnable(self.BLEND)", "self.MAX_FRAGMENT_UNIFORM_BLOCKS = self.get(gl.GL_MAX_FRAGMENT_UNIFORM_BLOCKS) #: Maximum number of components of inputs", "mode LINE_STRIP_ADJACENCY = gl.GL_LINE_STRIP_ADJACENCY # 11 #: Primitive mode TRIANGLES_ADJACENCY", "self.MAX_COLOR_TEXTURE_SAMPLES = self.get(gl.GL_MAX_COLOR_TEXTURE_SAMPLES) #: the number of words for fragment", "mode TRIANGLES = gl.GL_TRIANGLES # 4 #: Primitive mode TRIANGLE_STRIP", "c_float() gl.glGetFloatv(enum, value) return value.value def get_str(self, enum: gl.GLenum) ->", "return self._blend_func @blend_func.setter def blend_func(self, value: Tuple[int, int]): self._blend_func =", "if self.CULL_FACE in self._flags: gl.glEnable(self.CULL_FACE) else: gl.glDisable(self.CULL_FACE) if self.PROGRAM_POINT_SIZE in", "self.MAX_DEPTH_TEXTURE_SAMPLES = self.get(gl.GL_MAX_DEPTH_TEXTURE_SAMPLES) #: Maximum number of simultaneous outputs that", "*args): \"\"\" Disable one or more context flags:: # Single", "boolean values that #: can be held in uniform variable", "0x0301 #: Blend function SRC_ALPHA = 0x0302 #: Blend function", "errors has occurred. Example:: err = ctx.error if err: raise", "garbage collection mode for OpenGL resources. Supported modes are: #", "Texture wrap mode: Repeat REPEAT = gl.GL_REPEAT # Texture wrap", "other sections of your code base:: # Ensure all flags", "#: Maximum number of active draw buffers when using dual-source", "point_size(self) -> float: \"\"\"float: Get or set the point size.\"\"\"", "-> Tuple[int, int]: \"\"\" Get or the blend function:: ctx.blend_func", "Union[Texture, List[Texture]] = None, depth_attachment: Texture = None ) ->", "used to position rasterized geometry in window coordinates self.SUBPIXEL_BITS =", "wrap_x=wrap_x, wrap_y=wrap_y, filter=filter, ) def depth_texture(self, size: Tuple[int, int], *,", "of inputs read by a geometry shader self.MAX_GEOMETRY_INPUT_COMPONENTS = self.get(", "\"\"\"Create a :py:class:`~arcade.gl.Program` given the vertex, fragment and geometry shader.", "ShaderSource(tess_control_shader, gl.GL_TESS_CONTROL_SHADER) if tess_control_shader else None ) source_te = (", "etc.) self.CONTEXT_PROFILE_MASK = self.get(gl.GL_CONTEXT_PROFILE_MASK) #: Minimum required alignment for uniform", "0, 1920, 1080 # Using the current framebuffer size ctx.viewport", "of individual floating-point, integer, or boolean values that can #:", "shader uniform variables in all uniform blocks self.MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS = self.get(", "of the OpenGL API supported by the current context self.MINOR_VERSION", "Maximum number of uniform buffer binding points on the context", "LINES_ADJACENCY = gl.GL_LINES_ADJACENCY # 10 #: Primitive mode LINE_STRIP_ADJACENCY =", "Make sure only blending is enabled ctx.enable_only(ctx.BLEND) # Make sure", "in uniform variable storage for a fragment shader self.MAX_FRAGMENT_UNIFORM_VECTORS =", "make up a single patch primitive. Patch primitives are consumed", "float: \"\"\"Get a float limit\"\"\" value = c_float() gl.glGetFloatv(enum, value)", "a string representation of the occurring error or ``None`` of", "= Limits(self) self._gl_version = (self.limits.MAJOR_VERSION, self.limits.MINOR_VERSION) Context.activate(self) # Texture unit", "estimate of the largest cube-map texture that the GL can", "= c_int() gl.glGetIntegerv(enum, value) return value.value def get_float(self, enum) ->", "tess_evaluation_shader: str = None, defines: Dict[str, str] = None )", "be an integer\") gl.glPatchParameteri(gl.GL_PATCH_VERTICES, value) @property def point_size(self) -> float:", "to. Normally it would be the size of the window's", "individual floating-point, integer, or boolean values that can #: be", ":param Buffer index_buffer: Index/element buffer (optional) :param int mode: The", "access texture maps from the vertex shader self.MAX_COMBINED_TEXTURE_IMAGE_UNITS = self.get(", "weakref from typing import Any, Dict, List, Tuple, Union, Sequence,", "Texture wrap mode: Clamp to border color CLAMP_TO_BORDER = gl.GL_CLAMP_TO_BORDER", "of output written by a vertex shader self.MAX_VERTEX_OUTPUT_COMPONENTS = self.get(gl.GL_MAX_VERTEX_OUTPUT_COMPONENTS)", "# The most common error enums _errors = { gl.GL_INVALID_ENUM:", "by default. # This is always set to the same", "from the vertex shader. self.MAX_VERTEX_TEXTURE_IMAGE_UNITS = self.get( gl.GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS ) #:", "u2, u4 :param Any data: The texture data (optional). Can", "freed) self.texture = (0, 0) self.framebuffer = (0, 0) self.buffer", "0) self.program = (0, 0) self.vertex_array = (0, 0) self.geometry", "# 1080p ctx.viewport = 0, 0, 1920, 1080 # Using", "and must be at least 256 self.MAX_ARRAY_TEXTURE_LAYERS = self.get(gl.GL_MAX_ARRAY_TEXTURE_LAYERS) #:", "always be enabled gl.glEnable(gl.GL_TEXTURE_CUBE_MAP_SEAMLESS) # Set primitive restart index to", "can be used to access texture maps from the geometry", "width, height) \"\"\" return self.active_framebuffer.viewport @viewport.setter def viewport(self, value: Tuple[int,", "getattr(self, key) setattr(self, key, (created + 1, freed)) if created", "python) self._gc_mode = \"auto\" self.gc_mode = gc_mode #: Collected objects", "u1, u2, u4 :param Any data: The texture data (optional).", "number of components of inputs read by a geometry shader", "used for tessellation. :type: int \"\"\" value = c_int() gl.glGetIntegerv(gl.GL_PATCH_VERTICES,", "self.MAX_COLOR_ATTACHMENTS = self.get(gl.GL_MAX_COLOR_ATTACHMENTS) #: Maximum number of samples in a", "shader self.MAX_GEOMETRY_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_GEOMETRY_UNIFORM_COMPONENTS ) #: Maximum number of", "= 4, ): \"\"\" Create a Geomtry instance. :param list", "source (optional) :param str tess_control_shader: tessellation control shader source (optional)", "If we don't have a fragment shader we are doing", "framebuffer. This property is read-only :type: :py:class:`arcade.gl.Framebuffer` \"\"\" return self.active_framebuffer", "(1: R, 2: RG, 3: RGB, 4: RGBA) :param str", "buffer protocol. \"\"\" return Texture(self, size, data=data, depth=True) def geometry(", "Optional from .texture import Texture from .query import Query from", "= gl.GL_CLAMP_TO_BORDER # Texture wrap mode: Repeat mirrored MIRRORED_REPEAT =", "self.vertex_array = (0, 0) self.geometry = (0, 0) def incr(self,", "Can be 1, 2 or 4 (8, 16 or 32", "import weakref from typing import Any, Dict, List, Tuple, Union,", "integer, #: or boolean values that can be held in", "array of a texture buffer object self.MAX_TEXTURE_BUFFER_SIZE = self.get(gl.GL_MAX_TEXTURE_BUFFER_SIZE) #:", "Maximum number of components of output written by a vertex", "color_attachments: List of textures we want to render into :param", "#: Primitive mode LINE_STRIP = gl.GL_LINE_STRIP # 3 #: Primitive", "else: gl.glDisable(self.DEPTH_TEST) if self.CULL_FACE in self._flags: gl.glEnable(self.CULL_FACE) else: gl.glDisable(self.CULL_FACE) if", "Blend function DST_ALPHA = 0x0304 #: Blend function ONE_MINUS_DST_ALPHA =", "mode: int = None, index_element_size: int = 4, ): \"\"\"", "-> float: \"\"\"Get a float limit\"\"\" value = c_float() gl.glGetFloatv(enum,", "one or more context flags:: # Single flag ctx.enable(ctx.BLEND) #", "def patch_vertices(self, value: int): if not isinstance(value, int): raise TypeError(\"patch_vertices", "viewport gl.glEnable(gl.GL_SCISSOR_TEST) # States self._blend_func = self.BLEND_DEFAULT self._point_size = 1.0", "return value.value def get_str(self, enum: gl.GLenum) -> str: \"\"\"Get a", "is read-only :type: :py:class:`arcade.gl.Framebuffer` \"\"\" return self.active_framebuffer @property def gl_version(self)", "handle self.MAX_TEXTURE_SIZE = self.get(gl.GL_MAX_TEXTURE_SIZE) #: Maximum number of uniform buffer", "\"\"\"Create a 2D Texture. Wrap modes: ``GL_REPEAT``, ``GL_MIRRORED_REPEAT``, ``GL_CLAMP_TO_EDGE``, ``GL_CLAMP_TO_BORDER``", "is always set to the same value as the viewport", "Maximum number of active draw buffers when using dual-source blending", "self._blend_func = value gl.glBlendFunc(value[0], value[1]) # def blend_equation(self) # def", "number 4-vectors for varying variables self.MAX_VARYING_VECTORS = self.get(gl.GL_MAX_VARYING_VECTORS) #: Maximum", "primitive_restart_index(self, value: int): self._primitive_restart_index = value gl.glPrimitiveRestartIndex(value) def finish(self) ->", "str vertex_shader: vertex shader source :param str fragment_shader: fragment shader", "int \"\"\" value = c_int() gl.glGetIntegerv(gl.GL_PATCH_VERTICES, value) return value.value @patch_vertices.setter", "None ) -> Texture: \"\"\"Create a 2D Texture. Wrap modes:", "defines: Dict[str, str] = None ) -> Program: \"\"\"Create a", "for the vertex shader self.MAX_VERTEX_UNIFORM_VECTORS = self.get(gl.GL_MAX_VERTEX_UNIFORM_VECTORS) #: Maximum number", "self.MAX_UNIFORM_BUFFER_BINDINGS = self.get(gl.GL_MAX_UNIFORM_BUFFER_BINDINGS) #: Maximum size in basic machine units", "states and commonly used enums. All enums also exist in", "(0, 0) self.buffer = (0, 0) self.program = (0, 0)", "self.get(gl.GL_MAX_RECTANGLE_TEXTURE_SIZE) #: Maximum supported size for renderbuffers self.MAX_RENDERBUFFER_SIZE = self.get(gl.GL_MAX_RENDERBUFFER_SIZE)", "units that can be used to access texture maps from", "common error enums _errors = { gl.GL_INVALID_ENUM: \"GL_INVALID_ENUM\", gl.GL_INVALID_VALUE: \"GL_INVALID_VALUE\",", ") #: maximum number of individual 4-vectors of floating-point, integer,", "= self.get( gl.GL_MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS ) #: A rough estimate of the", "shader self.MAX_GEOMETRY_UNIFORM_BLOCKS = self.get(gl.GL_MAX_GEOMETRY_UNIFORM_BLOCKS) #: Maximum number of individual floating-point,", "coordinates self.SUBPIXEL_BITS = self.get(gl.GL_SUBPIXEL_BITS) #: A mask value indicating what", "active context active: Optional[\"Context\"] = None # --- Store the", "size ctx.viewport = 0, 0, *ctx.screen.size :type: tuple (x, y,", "# This is always set to the same value as", "self.MAX_TEXTURE_IMAGE_UNITS = self.get(gl.GL_MAX_TEXTURE_IMAGE_UNITS) # TODO: Missing in pyglet # self.MAX_TEXTURE_MAX_ANISOTROPY", "geometry in window coordinates self.SUBPIXEL_BITS = self.get(gl.GL_SUBPIXEL_BITS) #: A mask", "least 64 self.MAX_3D_TEXTURE_SIZE = self.get(gl.GL_MAX_3D_TEXTURE_SIZE) #: Maximum number of color", "simple way to ensure that context flag states are not", "self, size: Tuple[int, int], *, components: int = 4, dtype:", ") #: Value indicates the maximum number of layers allowed", "else: out_attributes = source_vs.out_attributes return Program( self, vertex_shader=source_vs.get_source(defines=defines), fragment_shader=source_fs.get_source(defines=defines) if", "target self.active_framebuffer: Framebuffer = self._screen self.stats: ContextStats = ContextStats(warn_threshold=1000) #", "Recommended maximum number of vertex array vertices self.MAX_ELEMENTS_VERTICES = self.get(gl.GL_MAX_ELEMENTS_VERTICES)", "= ( ShaderSource(tess_evaluation_shader, gl.GL_TESS_EVALUATION_SHADER) if tess_evaluation_shader else None ) #", "Clamp to border pixel CLAMP_TO_EDGE = gl.GL_CLAMP_TO_EDGE # Texture wrap", "\"\"\" Get or set number of vertices that will be", "more context flags:: # Single flag ctx.disable(ctx.BLEND) # Multiple flags", "c_int, c_char_p, cast, c_float from collections import deque import logging", "Maximum number of uniform blocks per fragment shader. self.MAX_FRAGMENT_UNIFORM_BLOCKS =", "gl.glDisable(self.BLEND) if self.DEPTH_TEST in self._flags: gl.glEnable(self.DEPTH_TEST) else: gl.glDisable(self.DEPTH_TEST) if self.CULL_FACE", "inputs read by the fragment shader self.MAX_FRAGMENT_INPUT_COMPONENTS = self.get( gl.GL_MAX_FRAGMENT_INPUT_COMPONENTS", "maps from the vertex shader. self.MAX_VERTEX_TEXTURE_IMAGE_UNITS = self.get( gl.GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS )", "Nearest pixel NEAREST = 0x2600 #: Texture interpolation: Linear interpolate", "This can be ``bytes`` or an object supporting the buffer", "self.get(gl.GL_MAX_ARRAY_TEXTURE_LAYERS) #: A rough estimate of the largest 3D texture", "for this context. This is only needed when ``gc_mode`` is", "context as the currently active one\"\"\" cls.active = ctx def", "the vertex shader. self.MAX_VERTEX_TEXTURE_IMAGE_UNITS = self.get( gl.GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS ) #: Maximum", "--- Store the most commonly used OpenGL constants # Texture", "shader. self.MAX_VERTEX_ATTRIBS = self.get(gl.GL_MAX_VERTEX_ATTRIBS) #: Maximum supported texture image units", "- 1 # Detect the default framebuffer self._screen = DefaultFrameBuffer(self)", "gl.glPatchParameteri(gl.GL_PATCH_VERTICES, value) @property def point_size(self) -> float: \"\"\"float: Get or", "def fbo(self) -> Framebuffer: \"\"\" Get the currently active framebuffer.", "size.\"\"\" return self._point_size @point_size.setter def point_size(self, value: float): gl.glPointSize(self._point_size) self._point_size", "Optional[Any] = None, reserve: int = 0, usage: str =", "all uniform blocks self.MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS ) #: Number", "be used to access texture maps from the vertex shader", "destination FUNC_SUBTRACT = 0x800A #: Blend equations: destination - source", "1 #: Primitive mode LINE_STRIP = gl.GL_LINE_STRIP # 3 #:", "primitive_restart_index(self) -> int: \"\"\"Get or set the primitive restart index.", "destination MAX = 0x8008 # Blend mode shortcuts #: Blend", "\"\"\" return Texture( self, size, components=components, data=data, dtype=dtype, wrap_x=wrap_x, wrap_y=wrap_y,", "can be #: held in uniform variable storage for a", "\"\"\" The framebuffer for the window. :type: :py:class:`~arcade.Framebuffer` \"\"\" return", "of a uniform block self.MAX_UNIFORM_BLOCK_SIZE = self.get(gl.GL_MAX_UNIFORM_BLOCK_SIZE) #: The number", "an object supporting the buffer protocol. :param GLenum wrap_x: How", "uniform variable storage for a geometry shader self.MAX_GEOMETRY_UNIFORM_COMPONENTS = self.get(", "data=data, depth=True) def geometry( self, content: Optional[Sequence[BufferDescription]] = None, index_buffer:", "of the screen OpenGL should render to. Normally it would", "self.primitive_restart_index = self._primitive_restart_index # We enable scissor testing by default.", "Get or set the point size.\"\"\" return self._point_size @point_size.setter def", "= gl.GL_TRIANGLE_STRIP # 5 #: Primitive mode TRIANGLE_FAN = gl.GL_TRIANGLE_FAN", "calls in opengl. :rtype: :py:class:`~arcade.gl.Query` \"\"\" return Query(self) class ContextStats:", "index buffer type. Can be 1, 2 or 4 (8,", "TRIANGLES_ADJACENCY = gl.GL_TRIANGLES_ADJACENCY # 12 #: Primitive mode TRIANGLE_STRIP_ADJACENCY =", "geometry_shader else None ) source_tc = ( ShaderSource(tess_control_shader, gl.GL_TESS_CONTROL_SHADER) if", "flags) ctx.enable_only() # Make sure only blending is enabled ctx.enable_only(ctx.BLEND)", "in uniform variable storage for a fragment shader self.MAX_FRAGMENT_UNIFORM_COMPONENTS =", "gl.glEnable(gl.GL_PRIMITIVE_RESTART) self._primitive_restart_index = -1 self.primitive_restart_index = self._primitive_restart_index # We enable", "gl.glGetIntegerv(enum, value) return value.value def get_float(self, enum) -> float: \"\"\"Get", "A rough estimate of the largest 3D texture that the", "def activate(cls, ctx: \"Context\"): \"\"\"Mark a context as the currently", "value.value @patch_vertices.setter def patch_vertices(self, value: int): if not isinstance(value, int):", "expect in python) self._gc_mode = \"auto\" self.gc_mode = gc_mode #:", "that will be used to make up a single patch", "Blend equations: destination - source FUNC_REVERSE_SUBTRACT = 0x800B #: Blend", "on the context self.MAX_UNIFORM_BUFFER_BINDINGS = self.get(gl.GL_MAX_UNIFORM_BUFFER_BINDINGS) #: Maximum number of", "= gl.GL_PROGRAM_POINT_SIZE # Blend functions #: Blend function ZERO =", "This is a simple way to ensure that context flag", "rough estimate of the largest cube-map texture that the GL", "[created = %s] [freed = %s] [active = %s]\", key,", "gl.GL_TRIANGLE_STRIP_ADJACENCY # 13 #: Patch mode (tessellation) PATCHES = gl.GL_PATCHES", "interpolation: Minification filter for mipmaps LINEAR_MIPMAP_NEAREST = 0x2701 #: Texture", "None, tess_evaluation_shader=source_te.get_source(defines=defines) if source_te else None, out_attributes=out_attributes, ) def query(self):", "the currently active framebuffer. This property is read-only :type: :py:class:`arcade.gl.Framebuffer`", "buffer protocol. :param GLenum wrap_x: How the texture wraps in", "be bytes or an object supporting the buffer protocol. \"\"\"", "self.RENDERER = self.get_str(gl.GL_RENDERER) #: Value indicating the number of sample", "= None, mode: int = None, index_element_size: int = 4,", "an OpenGL context. This context belongs to a ``pyglet.Window`` normally", "describes what pixels of the screen OpenGL should render to.", "Limits(self) self._gl_version = (self.limits.MAJOR_VERSION, self.limits.MINOR_VERSION) Context.activate(self) # Texture unit we", "be held in uniform variable storage for the vertex shader", "held in uniform variable storage for a fragment shader self.MAX_FRAGMENT_UNIFORM_VECTORS", "self.get(gl.GL_MAX_ELEMENTS_INDICES) #: Recommended maximum number of vertex array vertices self.MAX_ELEMENTS_VERTICES", "self.get(gl.GL_MAX_VERTEX_ATTRIB_BINDINGS) self.MAX_TEXTURE_IMAGE_UNITS = self.get(gl.GL_MAX_TEXTURE_IMAGE_UNITS) # TODO: Missing in pyglet #", "window this context belongs to. :type: ``pyglet.Window`` \"\"\" return self._window_ref()", "self._flags -= set(args) for flag in args: gl.glDisable(flag) def is_enabled(self,", "of individual floating-point, integer, or boolean values that can be", "buffer protocol. :param int reserve: The number of bytes reserve", "self.get(gl.GL_MAX_TEXTURE_BUFFER_SIZE) #: Maximum number of uniform buffer binding points on", "uniform blocks per vertex shader. self.MAX_VERTEX_UNIFORM_BLOCKS = self.get(gl.GL_MAX_VERTEX_UNIFORM_BLOCKS) # self.MAX_VERTEX_ATTRIB_RELATIVE_OFFSET", ":type: tuple (x, y, width, height) \"\"\" return self.active_framebuffer.viewport @viewport.setter", "List[arcade.gl.Texture] color_attachments: List of textures we want to render into", "of active draw buffers when using dual-source blending self.MAX_DUAL_SOURCE_DRAW_BUFFERS =", "outputs that may be written in a fragment shader self.MAX_DRAW_BUFFERS", "Texture from .query import Query from .glsl import ShaderSource from", "self.get(gl.GL_SUBPIXEL_BITS) #: A mask value indicating what context profile is", "= (0, 0) self.vertex_array = (0, 0) self.geometry = (0,", "self.warn_threshold, created, freed, created - freed, ) def decr(self, key):", "self.MAX_FRAGMENT_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_FRAGMENT_UNIFORM_COMPONENTS ) #: maximum number of individual", "self, color_attachments=color_attachments, depth_attachment=depth_attachment ) def texture( self, size: Tuple[int, int],", "= None, index_buffer: Buffer = None, mode: int = None,", "# 0 #: Primitive mode LINES = gl.GL_LINES # 1", "largest 3D texture that the GL can handle. The value", "gc_mode(self, value: str): modes = [\"auto\", \"context_gc\"] if value not", "wraps in x direction :param GLenum wrap_y: How the texture", "measuring rendering calls in opengl. :rtype: :py:class:`~arcade.gl.Query` \"\"\" return Query(self)", "number of individual floating-point, integer, or boolean values that can", "= gl.GL_LINES # 1 #: Primitive mode LINE_STRIP = gl.GL_LINE_STRIP", "to access texture maps from the vertex shader. self.MAX_VERTEX_TEXTURE_IMAGE_UNITS =", "for a framebuffer self.MAX_SAMPLES = self.get(gl.GL_MAX_SAMPLES) #: A rough estimate", "int mode: The default draw mode (optional) :param int mode:", "mode for OpenGL resources. Supported modes are: # default: Auto", "for additive blending: ``ONE, ONE`` BLEND_ADDITIVE = 0x0001, 0x0001 #:", "string representation of the occurring error or ``None`` of no", "the texture wraps in y direction :param Tuple[GLenum,GLenum] filter: Minification", "color affecting areas outside the viewport gl.glEnable(gl.GL_SCISSOR_TEST) # States self._blend_func", "str \"\"\" err = gl.glGetError() if err == gl.GL_NO_ERROR: return", "# Single flag ctx.enable(ctx.BLEND) # Multiple flags ctx.enable(ctx.DEPTH_TEST, ctx.CULL_FACE) \"\"\"", "when ``gc_mode`` is ``context_gc``. \"\"\" # Loop the array until", "gl.glEnable(gl.GL_SCISSOR_TEST) # States self._blend_func = self.BLEND_DEFAULT self._point_size = 1.0 self._flags:", "= warn_threshold # (created, freed) self.texture = (0, 0) self.framebuffer", "def is_enabled(self, flag) -> bool: \"\"\" Check if a context", "or ``None`` of no errors has occurred. Example:: err =", "# 4 #: Primitive mode TRIANGLE_STRIP = gl.GL_TRIANGLE_STRIP # 5", "varying variables self.MAX_VARYING_VECTORS = self.get(gl.GL_MAX_VARYING_VECTORS) #: Maximum number of 4-component", "shader. self.MAX_VERTEX_UNIFORM_BLOCKS = self.get(gl.GL_MAX_VERTEX_UNIFORM_BLOCKS) # self.MAX_VERTEX_ATTRIB_RELATIVE_OFFSET = self.get(gl.GL_MAX_VERTEX_ATTRIB_RELATIVE_OFFSET) # self.MAX_VERTEX_ATTRIB_BINDINGS", "self.get(gl.GL_SAMPLE_BUFFERS) #: An estimate of the number of bits of", "source (optional) :rtype: :py:class:`~arcade.gl.Program` \"\"\" source_vs = ShaderSource(vertex_shader, gl.GL_VERTEX_SHADER) source_fs", "vertex shader. self.MAX_VERTEX_UNIFORM_BLOCKS = self.get(gl.GL_MAX_VERTEX_UNIFORM_BLOCKS) # self.MAX_VERTEX_ATTRIB_RELATIVE_OFFSET = self.get(gl.GL_MAX_VERTEX_ATTRIB_RELATIVE_OFFSET) #", "and culling is enabled ctx.enable_only(ctx.DEPTH_TEST, ctx.CULL_FACE) \"\"\" self._flags = set(args)", "Texture unit we use when doing operations on textures to", "function ZERO = 0x0000 #: Blend function ONE = 0x0001", "we need while len(self.objects): obj = self.objects.pop() obj.delete() @property def", "def screen(self) -> Framebuffer: \"\"\" The framebuffer for the window.", "return value.value @patch_vertices.setter def patch_vertices(self, value: int): if not isinstance(value,", "depth_attachment: Texture = None ) -> Framebuffer: \"\"\"Create a Framebuffer.", "are used to position rasterized geometry in window coordinates self.SUBPIXEL_BITS", "uniform variables in all uniform blocks self.MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS", "tuple (x, y, width, height) \"\"\" return self.active_framebuffer.viewport @viewport.setter def", "active framebuffer. This property is read-only :type: :py:class:`arcade.gl.Framebuffer` \"\"\" return", "(x, y, width, height) \"\"\" return self.active_framebuffer.viewport @viewport.setter def viewport(self,", "of the texture :param int components: Number of components (1:", "(enable no flags) ctx.enable_only() # Make sure only blending is", "by a geometry shader self.MAX_GEOMETRY_INPUT_COMPONENTS = self.get( gl.GL_MAX_GEOMETRY_INPUT_COMPONENTS ) #:", "def __init__(self, warn_threshold=100): self.warn_threshold = warn_threshold # (created, freed) self.texture", "an object supporting the buffer protocol. \"\"\" return Texture(self, size,", "code base:: # Ensure all flags are disabled (enable no", "handle self.MAX_RECTANGLE_TEXTURE_SIZE = self.get(gl.GL_MAX_RECTANGLE_TEXTURE_SIZE) #: Maximum supported size for renderbuffers", "int: \"\"\"Get an integer limit\"\"\" value = c_int() gl.glGetIntegerv(enum, value)", "usage: Buffer usage. 'static', 'dynamic' or 'stream' :rtype: :py:class:`~arcade.gl.Buffer` \"\"\"", "the point size.\"\"\" return self._point_size @point_size.setter def point_size(self, value: float):", ":py:class:`~arcade.Framebuffer` \"\"\" return self._screen @property def fbo(self) -> Framebuffer: \"\"\"", "str fragment_shader: fragment shader source (optional) :param str geometry_shader: geometry", "self.PROGRAM_POINT_SIZE in self._flags: gl.glEnable(self.PROGRAM_POINT_SIZE) else: gl.glDisable(self.PROGRAM_POINT_SIZE) def disable(self, *args): \"\"\"", "Limits: \"\"\"OpenGL Limitations\"\"\" def __init__(self, ctx): self._ctx = ctx #:", ") def depth_texture(self, size: Tuple[int, int], *, data=None) -> Texture:", "\"\"\" Enable only some flags. This will disable all other", "= (0, 0) self.program = (0, 0) self.vertex_array = (0,", "self.MAX_ARRAY_TEXTURE_LAYERS = self.get(gl.GL_MAX_ARRAY_TEXTURE_LAYERS) #: A rough estimate of the largest", "0, 800, 600 # 1080p ctx.viewport = 0, 0, 1920,", "SRC_COLOR = 0x0300 #: Blend function ONE_MINUS_SRC_COLOR = 0x0301 #:", "return self._gc_mode @gc_mode.setter def gc_mode(self, value: str): modes = [\"auto\",", "self._primitive_restart_index # We enable scissor testing by default. # This", ":param str usage: Buffer usage. 'static', 'dynamic' or 'stream' :rtype:", "sample mask words self.MAX_SAMPLE_MASK_WORDS = self.get(gl.GL_MAX_SAMPLE_MASK_WORDS) #: Maximum number of", "will be used to make up a single patch primitive.", "self.MAX_DRAW_BUFFERS = self.get(gl.GL_MAX_DRAW_BUFFERS) #: Maximum number of active draw buffers", "str = None, tess_control_shader: str = None, tess_evaluation_shader: str =", "for flag in args: gl.glDisable(flag) def is_enabled(self, flag) -> bool:", "by a geometry shader self.MAX_GEOMETRY_OUTPUT_COMPONENTS = self.get( gl.GL_MAX_GEOMETRY_OUTPUT_COMPONENTS ) #:", "# Blend functions #: Blend function ZERO = 0x0000 #:", "0x0302 #: Blend function ONE_MINUS_SRC_ALPHA = 0x0303 #: Blend function", "= None # Tracking active framebuffer. On context creation the", "viewport(self) -> Tuple[int, int, int, int]: \"\"\" Get or set", "None ) source_te = ( ShaderSource(tess_evaluation_shader, gl.GL_TESS_EVALUATION_SHADER) if tess_evaluation_shader else", "SRC_ALPHA = 0x0302 #: Blend function ONE_MINUS_SRC_ALPHA = 0x0303 #:", "import Program from .vertex_array import Geometry, VertexArray from .framebuffer import", "``arcade.gl.BLEND``). \"\"\" #: The active context active: Optional[\"Context\"] = None", "\"\"\" Create a query object for measuring rendering calls in", ".vertex_array import Geometry, VertexArray from .framebuffer import Framebuffer, DefaultFrameBuffer from", "The value gives a rough estimate of the largest texture", "OpenGL error Returns a string representation of the occurring error", "set(args) if self.BLEND in self._flags: gl.glEnable(self.BLEND) else: gl.glDisable(self.BLEND) if self.DEPTH_TEST", "{err}\") :type: str \"\"\" err = gl.glGetError() if err ==", "of the inputs read by the fragment shader self.MAX_FRAGMENT_INPUT_COMPONENTS =", "on ..\") def get(self, enum: gl.GLenum) -> int: \"\"\"Get an", "default. # This is always set to the same value", "= set(args) if self.BLEND in self._flags: gl.glEnable(self.BLEND) else: gl.glDisable(self.BLEND) if", "when doing operations on textures to avoid # affecting currently", "context profile is used (core, compat etc.) self.CONTEXT_PROFILE_MASK = self.get(gl.GL_CONTEXT_PROFILE_MASK)", "key) setattr(self, key, (created + 1, freed)) if created %", "(created + 1, freed)) if created % self.warn_threshold == 0", "a context as the currently active one\"\"\" cls.active = ctx", "from .vertex_array import Geometry, VertexArray from .framebuffer import Framebuffer, DefaultFrameBuffer", "\"\"\" The OpenGL version as a 2 component tuple :type:", "that the GL can handle self.MAX_RECTANGLE_TEXTURE_SIZE = self.get(gl.GL_MAX_RECTANGLE_TEXTURE_SIZE) #: Maximum", "MAX = 0x8008 # Blend mode shortcuts #: Blend mode", "@property def patch_vertices(self) -> int: \"\"\" Get or set number", "from collections import deque import logging import weakref from typing", "type. Can be 1, 2 or 4 (8, 16 or", "filter for mipmaps LINEAR_MIPMAP_NEAREST = 0x2701 #: Texture interpolation: Minification", "None, tess_evaluation_shader: str = None, defines: Dict[str, str] = None", "floating-point, integer, or boolean values that can #: be held", "= self.limits.MAX_TEXTURE_IMAGE_UNITS - 1 # Detect the default framebuffer self._screen", "mode shortcut for default blend mode: ``SRC_ALPHA, ONE_MINUS_SRC_ALPHA`` BLEND_DEFAULT =", "return None return self._errors.get(err, \"GL_UNKNOWN_ERROR\") @classmethod def activate(cls, ctx: \"Context\"):", "None ) -> Program: \"\"\"Create a :py:class:`~arcade.gl.Program` given the vertex,", "Buffer from .program import Program from .vertex_array import Geometry, VertexArray", "Tuple[int, int], *, data=None) -> Texture: \"\"\"Create a 2D depth", "= self.get(gl.GL_MAX_DRAW_BUFFERS) #: Maximum number of active draw buffers when", "created % self.warn_threshold == 0 and created > 0: LOG.debug(", "all uniform blocks self.MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS ) #: A", "ctx.CULL_FACE) \"\"\" self._flags.update(args) for flag in args: gl.glEnable(flag) def enable_only(self,", ":type: tuple (src, dst) \"\"\" return self._blend_func @blend_func.setter def blend_func(self,", "\"\"\" return self.active_framebuffer.viewport @viewport.setter def viewport(self, value: Tuple[int, int, int,", "or boolean values that can be #: held in uniform", "# Texture wrap mode: Clamp to border color CLAMP_TO_BORDER =", "= gl.GL_LINES_ADJACENCY # 10 #: Primitive mode LINE_STRIP_ADJACENCY = gl.GL_LINE_STRIP_ADJACENCY", "\"GL_INVALID_VALUE\", gl.GL_INVALID_OPERATION: \"GL_INVALID_OPERATION\", gl.GL_INVALID_FRAMEBUFFER_OPERATION: \"GL_INVALID_FRAMEBUFFER_OPERATION\", gl.GL_OUT_OF_MEMORY: \"GL_OUT_OF_MEMORY\", gl.GL_STACK_UNDERFLOW: \"GL_STACK_UNDERFLOW\", gl.GL_STACK_OVERFLOW:", "import logging import weakref from typing import Any, Dict, List,", "gl.GL_MAX_GEOMETRY_INPUT_COMPONENTS ) #: Maximum number of components of outputs written", "#: Maximum number of uniform blocks per geometry shader self.MAX_GEOMETRY_UNIFORM_BLOCKS", "the blend function:: ctx.blend_func = ctx.ONE, ctx.ONE :type: tuple (src,", "(0, 0) def incr(self, key): created, freed = getattr(self, key)", "flag ctx.disable(ctx.BLEND) # Multiple flags ctx.disable(ctx.DEPTH_TEST, ctx.CULL_FACE) \"\"\" self._flags -=", "filter for mipmaps NEAREST_MIPMAP_NEAREST = 0x2700 #: Texture interpolation: Minification", "obj.delete() @property def gc_mode(self) -> str: \"\"\" Set the garbage", "data type of each component: f1, f2, f4 / i1,", "(optional) :param int index_element_size: Byte size of the index buffer", "= gl.GL_TRIANGLE_STRIP_ADJACENCY # 13 #: Patch mode (tessellation) PATCHES =", "This will disable all other flags. This is a simple", "import Window from pyglet import gl from .buffer import Buffer", "self.UNIFORM_BUFFER_OFFSET_ALIGNMENT = self.get( gl.GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT ) #: Value indicates the maximum", "buffer (optional) :param int mode: The default draw mode (optional)", "modes = [\"auto\", \"context_gc\"] if value not in modes: raise", "#: Blend function ONE_MINUS_DST_ALPHA = 0x0305 #: Blend function DST_COLOR", "enabled gl.glEnable(gl.GL_TEXTURE_CUBE_MAP_SEAMLESS) # Set primitive restart index to -1 by", "3D texture that the GL can handle. The value must", "Value indicates the maximum number of layers allowed in an", "def get(self, enum: gl.GLenum) -> int: \"\"\"Get an integer limit\"\"\"", "of :py:class:`~arcade.gl.BufferDescription` (optional) :param Buffer index_buffer: Index/element buffer (optional) :param", "is -1\"\"\" return self._primitive_restart_index @primitive_restart_index.setter def primitive_restart_index(self, value: int): self._primitive_restart_index", "else None, out_attributes=out_attributes, ) def query(self): \"\"\" Create a query", "in a framebuffer self.MAX_COLOR_ATTACHMENTS = self.get(gl.GL_MAX_COLOR_ATTACHMENTS) #: Maximum number of", "avoid background color affecting areas outside the viewport gl.glEnable(gl.GL_SCISSOR_TEST) #", "value = c_float() gl.glGetFloatv(enum, value) return value.value def get_str(self, enum:", "def blend_equation(self) # def front_face(self) # def cull_face(self) @property def", "Optional[Program] = None # Tracking active framebuffer. On context creation", "the OpenGL API supported by the current context self.MINOR_VERSION =", "access texture maps from the geometry shader self.MAX_GEOMETRY_TEXTURE_IMAGE_UNITS = self.get(", "\"context_gc\" self.objects = deque() @property def window(self) -> Window: \"\"\"", "context belongs to. :type: ``pyglet.Window`` \"\"\" return self._window_ref() @property def", "Union[str, None]: \"\"\"Check OpenGL error Returns a string representation of", "'static', 'dynamic' or 'stream' :rtype: :py:class:`~arcade.gl.Buffer` \"\"\" # create_with_size return", "Get or set the viewport for the currently active framebuffer.", "the current framebuffer size ctx.viewport = 0, 0, *ctx.screen.size :type:", "Linear interpolate LINEAR = 0x2601 #: Texture interpolation: Minification filter", "#: Maximum number of individual floating-point, integer, or boolean values", "typing import Optional from .texture import Texture from .query import", "= gl.GL_TRIANGLES_ADJACENCY # 12 #: Primitive mode TRIANGLE_STRIP_ADJACENCY = gl.GL_TRIANGLE_STRIP_ADJACENCY", "index_element_size: Byte size of the index buffer type. Can be", "#: Recommended maximum number of vertex array vertices self.MAX_ELEMENTS_VERTICES =", "texture data (optional). Can be bytes or an object supporting", "#: Number of words for vertex shader uniform variables in", "vertex_shader=source_vs.get_source(defines=defines), fragment_shader=source_fs.get_source(defines=defines) if source_fs else None, geometry_shader=source_geo.get_source(defines=defines) if source_geo else", "unsigned integer) \"\"\" return Geometry(self, content, index_buffer=index_buffer, mode=mode, index_element_size=index_element_size) def", "number of samples supported in integer format multisample buffers self.MAX_INTEGER_SAMPLES", "0, usage: str = \"static\" ) -> Buffer: \"\"\"Create a", "for flag in args: gl.glEnable(flag) def enable_only(self, *args): \"\"\" Enable", "# to avoid background color affecting areas outside the viewport", "Multiple flags ctx.enable(ctx.DEPTH_TEST, ctx.CULL_FACE) \"\"\" self._flags.update(args) for flag in args:", "framebuffer self.MAX_COLOR_ATTACHMENTS = self.get(gl.GL_MAX_COLOR_ATTACHMENTS) #: Maximum number of samples in", "be used to make up a single patch primitive. Patch", "gl.glBlendFunc(value[0], value[1]) # def blend_equation(self) # def front_face(self) # def", "DST_ALPHA = 0x0304 #: Blend function ONE_MINUS_DST_ALPHA = 0x0305 #:", "= 0, 0, *ctx.screen.size :type: tuple (x, y, width, height)", "#: Maximum number of samples in a multisample depth or", "# Make sure only blending is enabled ctx.enable_only(ctx.BLEND) # Make", "(optional) :param str tess_control_shader: tessellation control shader source (optional) :param", "equations: Maximum of source and destination MAX = 0x8008 #", "mipmaps LINEAR_MIPMAP_NEAREST = 0x2701 #: Texture interpolation: Minification filter for", "the buffer protocol. \"\"\" return Texture(self, size, data=data, depth=True) def", "geometry shader uniform variables in all uniform blocks self.MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS =", "to render into :param arcade.gl.Texture depth_attachment: Depth texture :rtype: :py:class:`~arcade.gl.Framebuffer`", "Texture #: Texture interpolation: Nearest pixel NEAREST = 0x2600 #:", "tessellation evaluation shader source (optional) :param dict defines: Substitute #defines", "feedback. # When a geometry shader is present the out", "through ``window.ctx``. The Context class contains methods for creating resources,", "``ONE, ONE`` BLEND_ADDITIVE = 0x0001, 0x0001 #: Blend mode shortcut", "= 0, 0, 800, 600 # 1080p ctx.viewport = 0,", "= self.get(gl.GL_MAX_VERTEX_ATTRIBS) #: Maximum supported texture image units that can", "self.get(gl.GL_MAX_3D_TEXTURE_SIZE) #: Maximum number of color attachments in a framebuffer", "(0, 0) self.framebuffer = (0, 0) self.buffer = (0, 0)", "are: # default: Auto ctx.gc_mode = \"auto\" \"\"\" return self._gc_mode", "module. (``ctx.BLEND`` or ``arcade.gl.BLEND``). \"\"\" #: The active context active:", "number of uniform blocks per vertex shader. self.MAX_VERTEX_UNIFORM_BLOCKS = self.get(gl.GL_MAX_VERTEX_UNIFORM_BLOCKS)", "= self.get( gl.GL_MAX_GEOMETRY_UNIFORM_COMPONENTS ) #: Maximum number of samples supported", "mode=mode, index_element_size=index_element_size) def program( self, *, vertex_shader: str, fragment_shader: str", "reserve: The number of bytes reserve :param str usage: Buffer", "def program( self, *, vertex_shader: str, fragment_shader: str = None,", "Geomtry instance. :param list content: List of :py:class:`~arcade.gl.BufferDescription` (optional) :param", "= 0x2600 #: Texture interpolation: Linear interpolate LINEAR = 0x2601", "Number of words for vertex shader uniform variables in all", "#: Maximum supported size for renderbuffers self.MAX_RENDERBUFFER_SIZE = self.get(gl.GL_MAX_RENDERBUFFER_SIZE) #:", "currently active framebuffer. The viewport simply describes what pixels of", "12 #: Primitive mode TRIANGLE_STRIP_ADJACENCY = gl.GL_TRIANGLE_STRIP_ADJACENCY # 13 #:", "= self.get(gl.GL_MAX_GEOMETRY_UNIFORM_BLOCKS) #: Maximum number of individual floating-point, integer, or", "happened while querying of limits. Moving on ..\") def get(self,", "..\") def get(self, enum: gl.GLenum) -> int: \"\"\"Get an integer", "wrap_x: gl.GLenum = None, wrap_y: gl.GLenum = None, filter: Tuple[gl.GLenum,", "number of bytes reserve :param str usage: Buffer usage. 'static',", "= self.get(gl.GL_MAX_VERTEX_OUTPUT_COMPONENTS) #: Maximum number of uniform blocks per vertex", "active one\"\"\" cls.active = ctx def enable(self, *args): \"\"\" Enables", "= 0x2702 #: Texture interpolation: Minification filter for mipmaps LINEAR_MIPMAP_LINEAR", "source + destination FUNC_ADD = 0x8006 #: Blend equations: source", ":py:class:`~arcade.gl.Program` given the vertex, fragment and geometry shader. :param str", ":type: str \"\"\" err = gl.glGetError() if err == gl.GL_NO_ERROR:", "framebuffer:: # 4:3 screen ctx.viewport = 0, 0, 800, 600", "value: int): self._primitive_restart_index = value gl.glPrimitiveRestartIndex(value) def finish(self) -> None:", "source_geo = ( ShaderSource(geometry_shader, gl.GL_GEOMETRY_SHADER) if geometry_shader else None )", "ctx): self._ctx = ctx #: Minor version number of the", "of uniform blocks per fragment shader. self.MAX_FRAGMENT_UNIFORM_BLOCKS = self.get(gl.GL_MAX_FRAGMENT_UNIFORM_BLOCKS) #:", "#: Primitive mode TRIANGLE_STRIP = gl.GL_TRIANGLE_STRIP # 5 #: Primitive", "vertex array indices self.MAX_ELEMENTS_INDICES = self.get(gl.GL_MAX_ELEMENTS_INDICES) #: Recommended maximum number", "#: Blend function DST_ALPHA = 0x0304 #: Blend function ONE_MINUS_DST_ALPHA", "same value as the viewport # to avoid background color", "all uniform blocks self.MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS ) #: Maximum", "0, *ctx.screen.size :type: tuple (x, y, width, height) \"\"\" return", "\"auto\" \"\"\" return self._gc_mode @gc_mode.setter def gc_mode(self, value: str): modes", "rendering calls in opengl. :rtype: :py:class:`~arcade.gl.Query` \"\"\" return Query(self) class", "= self.get(gl.GL_MAX_TEXTURE_BUFFER_SIZE) #: Maximum number of uniform buffer binding points", "This should always be enabled gl.glEnable(gl.GL_TEXTURE_CUBE_MAP_SEAMLESS) # Set primitive restart", "modes are: # default: Auto ctx.gc_mode = \"auto\" \"\"\" return", "# Loop the array until all objects are gone. #", "= self.get_str(gl.GL_RENDERER) #: Value indicating the number of sample buffers", "screen(self) -> Framebuffer: \"\"\" The framebuffer for the window. :type:", "of vertex array vertices self.MAX_ELEMENTS_VERTICES = self.get(gl.GL_MAX_ELEMENTS_VERTICES) #: Maximum number", "# VertexArray: Primitives #: Primitive mode POINTS = gl.GL_POINTS #", "wrap mode: Repeat REPEAT = gl.GL_REPEAT # Texture wrap mode:", "4 #: Primitive mode TRIANGLE_STRIP = gl.GL_TRIANGLE_STRIP # 5 #:", "window's framebuffer:: # 4:3 screen ctx.viewport = 0, 0, 800,", "components: Number of components (1: R, 2: RG, 3: RGB,", "and geometry shader. :param str vertex_shader: vertex shader source :param", "flag: Depth testing DEPTH_TEST = gl.GL_DEPTH_TEST #: Context flag: Face", "None, tess_control_shader: str = None, tess_evaluation_shader: str = None, defines:", "self._point_size @point_size.setter def point_size(self, value: float): gl.glPointSize(self._point_size) self._point_size = value", "key, self.warn_threshold, created, freed, created - freed, ) def decr(self,", "gl.GL_PROGRAM_POINT_SIZE # Blend functions #: Blend function ZERO = 0x0000", "NEAREST_MIPMAP_NEAREST = 0x2700 #: Texture interpolation: Minification filter for mipmaps", "Minifying filters: ``GL_NEAREST``, ``GL_LINEAR``, ``GL_NEAREST_MIPMAP_NEAREST``, ``GL_LINEAR_MIPMAP_NEAREST`` ``GL_NEAREST_MIPMAP_LINEAR``, ``GL_LINEAR_MIPMAP_LINEAR`` Magnifying filters:", ":param GLenum wrap_x: How the texture wraps in x direction", "freed)) if created % self.warn_threshold == 0 and created >", "attachments in a framebuffer self.MAX_COLOR_ATTACHMENTS = self.get(gl.GL_MAX_COLOR_ATTACHMENTS) #: Maximum number", "index to -1 by default gl.glEnable(gl.GL_PRIMITIVE_RESTART) self._primitive_restart_index = -1 self.primitive_restart_index", "up a single patch primitive. Patch primitives are consumed by", "an array texture, and must be at least 256 self.MAX_ARRAY_TEXTURE_LAYERS", "largest rectangular texture that the GL can handle self.MAX_RECTANGLE_TEXTURE_SIZE =", "what context profile is used (core, compat etc.) self.CONTEXT_PROFILE_MASK =", "self.MAX_FRAGMENT_INPUT_COMPONENTS = self.get( gl.GL_MAX_FRAGMENT_INPUT_COMPONENTS ) #: Maximum number of individual", "in python) self._gc_mode = \"auto\" self.gc_mode = gc_mode #: Collected", "int, int, int]: \"\"\" Get or set the viewport for", "is used (core, compat etc.) self.CONTEXT_PROFILE_MASK = self.get(gl.GL_CONTEXT_PROFILE_MASK) #: Minimum", "source FUNC_REVERSE_SUBTRACT = 0x800B #: Blend equations: Minimum of source", "%s] [freed = %s] [active = %s]\", key, self.warn_threshold, created,", "all OpenGL rendering commands are completed\"\"\" gl.glFinish() # --- Resource", "= None, defines: Dict[str, str] = None ) -> Program:", "weakref.ref(window) self.limits = Limits(self) self._gl_version = (self.limits.MAJOR_VERSION, self.limits.MINOR_VERSION) Context.activate(self) #", "vertex shader. self.MAX_VERTEX_TEXTURE_IMAGE_UNITS = self.get( gl.GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS ) #: Maximum number", "\"\"\" Run garbage collection of OpenGL objects for this context.", "#: Major version number of the OpenGL API supported by", "OpenGL objects for this context. This is only needed when", "Number of words for geometry shader uniform variables in all", "\"GL_INVALID_OPERATION\", gl.GL_INVALID_FRAMEBUFFER_OPERATION: \"GL_INVALID_FRAMEBUFFER_OPERATION\", gl.GL_OUT_OF_MEMORY: \"GL_OUT_OF_MEMORY\", gl.GL_STACK_UNDERFLOW: \"GL_STACK_UNDERFLOW\", gl.GL_STACK_OVERFLOW: \"GL_STACK_OVERFLOW\", }", "context. This context belongs to a ``pyglet.Window`` normally accessed through", "interpolation: Linear interpolate LINEAR = 0x2601 #: Texture interpolation: Minification", "TRIANGLE_FAN = gl.GL_TRIANGLE_FAN # 6 #: Primitive mode LINES_ADJACENCY =", "gl.glGetError() if err == gl.GL_NO_ERROR: return None return self._errors.get(err, \"GL_UNKNOWN_ERROR\")", "fragment shader self.MAX_DRAW_BUFFERS = self.get(gl.GL_MAX_DRAW_BUFFERS) #: Maximum number of active", "\"\"\" Get or the blend function:: ctx.blend_func = ctx.ONE, ctx.ONE", "individual floating-point, integer, or boolean values that can be #:", "if source_geo: out_attributes = source_geo.out_attributes else: out_attributes = source_vs.out_attributes return", "framebuffer size ctx.viewport = 0, 0, *ctx.screen.size :type: tuple (x,", "texture that the GL can handle self.MAX_TEXTURE_SIZE = self.get(gl.GL_MAX_TEXTURE_SIZE) #:", "function DST_COLOR = 0x0306 #: Blend function ONE_MINUS_DST_COLOR = 0x0307", "Minification filter for mipmaps LINEAR_MIPMAP_NEAREST = 0x2701 #: Texture interpolation:", "texture( self, size: Tuple[int, int], *, components: int = 4,", "buffers self.MAX_INTEGER_SAMPLES = self.get(gl.GL_MAX_INTEGER_SAMPLES) #: Maximum samples for a framebuffer", "areas outside the viewport gl.glEnable(gl.GL_SCISSOR_TEST) # States self._blend_func = self.BLEND_DEFAULT", "function DST_ALPHA = 0x0304 #: Blend function ONE_MINUS_DST_ALPHA = 0x0305", "objects are gone. # Deleting one object might add new", "shortcut for premultipled alpha: ``SRC_ALPHA, ONE`` BLEND_PREMULTIPLIED_ALPHA = 0x0302, 0x0001", "self.get( gl.GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS ) #: Maximum number of uniform blocks per", "number of color attachments in a framebuffer self.MAX_COLOR_ATTACHMENTS = self.get(gl.GL_MAX_COLOR_ATTACHMENTS)", "self.objects.pop() obj.delete() @property def gc_mode(self) -> str: \"\"\" Set the", "Check if a context flag is enabled :type: bool \"\"\"", "@property def primitive_restart_index(self) -> int: \"\"\"Get or set the primitive", "a vertex shader self.MAX_VERTEX_OUTPUT_COMPONENTS = self.get(gl.GL_MAX_VERTEX_OUTPUT_COMPONENTS) #: Maximum number of", "self.get(gl.GL_MAX_DRAW_BUFFERS) #: Maximum number of active draw buffers when using", "def get_float(self, enum) -> float: \"\"\"Get a float limit\"\"\" value", "out attributes will be located there out_attributes = [] #", "def viewport(self, value: Tuple[int, int, int, int]): self.active_framebuffer.viewport = value", "= ctx.ONE, ctx.ONE :type: tuple (src, dst) \"\"\" return self._blend_func", ":param dict defines: Substitute #defines values in the source (optional)", "in the texel array of a texture buffer object self.MAX_TEXTURE_BUFFER_SIZE", "self.framebuffer = (0, 0) self.buffer = (0, 0) self.program =", "destination FUNC_ADD = 0x8006 #: Blend equations: source - destination", "setattr(self, key, (created, freed + 1)) class Limits: \"\"\"OpenGL Limitations\"\"\"", "in args: gl.glEnable(flag) def enable_only(self, *args): \"\"\" Enable only some", "= self.get(gl.GL_MAX_RECTANGLE_TEXTURE_SIZE) #: Maximum supported size for renderbuffers self.MAX_RENDERBUFFER_SIZE =", "mode (optional) :param int mode: The default draw mode (optional)", "blocks per geometry shader self.MAX_GEOMETRY_UNIFORM_BLOCKS = self.get(gl.GL_MAX_GEOMETRY_UNIFORM_BLOCKS) #: Maximum number", "ctx.enable(ctx.DEPTH_TEST, ctx.CULL_FACE) \"\"\" self._flags.update(args) for flag in args: gl.glEnable(flag) def", "for a geometry shader self.MAX_GEOMETRY_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_GEOMETRY_UNIFORM_COMPONENTS ) #:", "ContextStats(warn_threshold=1000) # Hardcoded states # This should always be enabled", "usage=usage) def framebuffer( self, *, color_attachments: Union[Texture, List[Texture]] = None,", "gc_mode #: Collected objects to gc when gc_mode is \"context_gc\"", "-> Tuple[int, int]: \"\"\" The OpenGL version as a 2", "= self.get( gl.GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS ) #: Maximum number of uniform blocks", "context self.MAX_UNIFORM_BUFFER_BINDINGS = self.get(gl.GL_MAX_UNIFORM_BUFFER_BINDINGS) #: The value gives a rough", "None ) # If we don't have a fragment shader", "doing transform feedback. # When a geometry shader is present", "that can #: be held in uniform variable storage for", "filter=filter, ) def depth_texture(self, size: Tuple[int, int], *, data=None) ->", "by the current context self.MINOR_VERSION = self.get(gl.GL_MINOR_VERSION) #: Major version", "self.MAX_FRAGMENT_UNIFORM_VECTORS = self.get(gl.GL_MAX_FRAGMENT_UNIFORM_VECTORS) #: Maximum number of uniform blocks per", "window(self) -> Window: \"\"\" The window this context belongs to.", "mask words self.MAX_SAMPLE_MASK_WORDS = self.get(gl.GL_MAX_SAMPLE_MASK_WORDS) #: Maximum number of texels", "cull_face(self) @property def patch_vertices(self) -> int: \"\"\" Get or set", "MIRRORED_REPEAT = gl.GL_MIRRORED_REPEAT # Flags #: Context flag: Blending BLEND", "Supported modes are:\", modes) self._gc_mode = value @property def error(self)", "int], *, components: int = 4, dtype: str = \"f1\",", "#: Texture interpolation: Linear interpolate LINEAR = 0x2601 #: Texture", "geometry shader is present the out attributes will be located", "# 11 #: Primitive mode TRIANGLES_ADJACENCY = gl.GL_TRIANGLES_ADJACENCY # 12", "#: Maximum number of samples supported in integer format multisample", "set() # Normal garbage collection as default (what we expect", "reserve :param str usage: Buffer usage. 'static', 'dynamic' or 'stream'", "= 0x0001 #: Blend function SRC_COLOR = 0x0300 #: Blend", "self.active_framebuffer @property def gl_version(self) -> Tuple[int, int]: \"\"\" The OpenGL", "or boolean values that can #: be held in uniform", "else None ) # If we don't have a fragment", "#defines values in the source (optional) :rtype: :py:class:`~arcade.gl.Program` \"\"\" source_vs", "value as the viewport # to avoid background color affecting", "objects for this context. This is only needed when ``gc_mode``", "freed = getattr(self, key) setattr(self, key, (created, freed + 1))", "texture that the GL can handle self.MAX_RECTANGLE_TEXTURE_SIZE = self.get(gl.GL_MAX_RECTANGLE_TEXTURE_SIZE) #:", "#: Primitive mode LINES_ADJACENCY = gl.GL_LINES_ADJACENCY # 10 #: Primitive", "= self.get(gl.GL_MAX_VERTEX_UNIFORM_VECTORS) #: Maximum number of components of output written", "modes: raise ValueError(\"Unsupported gc_mode. Supported modes are:\", modes) self._gc_mode =", "gl.glGetIntegerv(gl.GL_PATCH_VERTICES, value) return value.value @patch_vertices.setter def patch_vertices(self, value: int): if", "states are not lingering from other sections of your code", "index_element_size: int = 4, ): \"\"\" Create a Geomtry instance.", "Maximum number of components of outputs written by a geometry", "None ) source_geo = ( ShaderSource(geometry_shader, gl.GL_GEOMETRY_SHADER) if geometry_shader else", "VertexArray from .framebuffer import Framebuffer, DefaultFrameBuffer from typing import Optional", "interpolation: Nearest pixel NEAREST = 0x2600 #: Texture interpolation: Linear", "a Framebuffer. :param List[arcade.gl.Texture] color_attachments: List of textures we want", "self.MAJOR_VERSION = self.get(gl.GL_MAJOR_VERSION) self.VENDOR = self.get_str(gl.GL_VENDOR) self.RENDERER = self.get_str(gl.GL_RENDERER) #:", ":param Tuple[GLenum,GLenum] filter: Minification and magnification filter \"\"\" return Texture(", "self.MINOR_VERSION = self.get(gl.GL_MINOR_VERSION) #: Major version number of the OpenGL", "a 2 component tuple :type: tuple (major, minor) version \"\"\"", "function ONE_MINUS_DST_COLOR = 0x0307 # Blend equations #: source +", "NEAREST = 0x2600 #: Texture interpolation: Linear interpolate LINEAR =", "Blend equations #: source + destination FUNC_ADD = 0x8006 #:", "in self._flags @property def viewport(self) -> Tuple[int, int, int, int]:", "ONE_MINUS_DST_ALPHA = 0x0305 #: Blend function DST_COLOR = 0x0306 #:", "if a context flag is enabled :type: bool \"\"\" return", "content, index_buffer=index_buffer, mode=mode, index_element_size=index_element_size) def program( self, *, vertex_shader: str,", "= c_int() gl.glGetIntegerv(gl.GL_PATCH_VERTICES, value) return value.value @patch_vertices.setter def patch_vertices(self, value:", "shader we are doing transform feedback. # When a geometry", "\"\"\" Create a Geomtry instance. :param list content: List of", "This context belongs to a ``pyglet.Window`` normally accessed through ``window.ctx``.", "self.get( gl.GL_MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS ) #: Number of words for geometry shader", "#: Maximum number of uniform blocks per program self.MAX_COMBINED_UNIFORM_BLOCKS =", "4, ): \"\"\" Create a Geomtry instance. :param list content:", "flag: Enable ``gl_PointSize`` in shaders. PROGRAM_POINT_SIZE = gl.GL_PROGRAM_POINT_SIZE # Blend", "Flags #: Context flag: Blending BLEND = gl.GL_BLEND #: Context", "#: Blend equations: source - destination FUNC_SUBTRACT = 0x800A #:", "= self.get(gl.GL_MAX_CUBE_MAP_TEXTURE_SIZE) #: Maximum number of samples in a multisample", "maximum number of layers allowed in an array texture, and", "(if present) and subsequently used for tessellation. :type: int \"\"\"", "#: be held in uniform variable storage for a geometry", "limits. Moving on ..\") def get(self, enum: gl.GLenum) -> int:", "most commonly used OpenGL constants # Texture #: Texture interpolation:", "/ i1, i2, i4 / u1, u2, u4 :param Any", "self._blend_func @blend_func.setter def blend_func(self, value: Tuple[int, int]): self._blend_func = value", "is a simple way to ensure that context flag states", "object supporting the buffer protocol. :param GLenum wrap_x: How the", "#: Patch mode (tessellation) PATCHES = gl.GL_PATCHES # The most", "= gl.GL_MIRRORED_REPEAT # Flags #: Context flag: Blending BLEND =", "None, index_element_size: int = 4, ): \"\"\" Create a Geomtry", "size of the window's framebuffer:: # 4:3 screen ctx.viewport =", "value not in modes: raise ValueError(\"Unsupported gc_mode. Supported modes are:\",", "logging.getLogger(__name__) class Context: \"\"\" Represents an OpenGL context. This context", "object supporting the buffer protocol. \"\"\" return Texture(self, size, data=data,", ":param list content: List of :py:class:`~arcade.gl.BufferDescription` (optional) :param Buffer index_buffer:", "don't have a fragment shader we are doing transform feedback.", "for varying variables self.MAX_VARYING_VECTORS = self.get(gl.GL_MAX_VARYING_VECTORS) #: Maximum number of", "else None, geometry_shader=source_geo.get_source(defines=defines) if source_geo else None, tess_control_shader=source_tc.get_source(defines=defines) if source_tc", "dst) \"\"\" return self._blend_func @blend_func.setter def blend_func(self, value: Tuple[int, int]):", ":py:class:`~arcade.gl.Framebuffer` \"\"\" return Framebuffer( self, color_attachments=color_attachments, depth_attachment=depth_attachment ) def texture(", "# Texture wrap mode: Repeat mirrored MIRRORED_REPEAT = gl.GL_MIRRORED_REPEAT #", ") #: Maximum number of uniform blocks per geometry shader", "#: Maximum number of simultaneous outputs that may be written", "ZERO = 0x0000 #: Blend function ONE = 0x0001 #:", "[] # type: List[str] if not source_fs: if source_geo: out_attributes", "Buffer object. :param Any data: The buffer data, This can", "gl.GL_MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS ) #: A rough estimate of the largest cube-map", "pyglet from pyglet.window import Window from pyglet import gl from", "\"\"\"Create a new OpenGL Buffer object. :param Any data: The", "of a texture buffer object self.MAX_TEXTURE_BUFFER_SIZE = self.get(gl.GL_MAX_TEXTURE_BUFFER_SIZE) #: Maximum", "# Make sure only depth test and culling is enabled", "= None, depth_attachment: Texture = None ) -> Framebuffer: \"\"\"Create", "= (0, 0) self.geometry = (0, 0) def incr(self, key):", "shortcut for default blend mode: ``SRC_ALPHA, ONE_MINUS_SRC_ALPHA`` BLEND_DEFAULT = 0x0302,", "output written by a vertex shader self.MAX_VERTEX_OUTPUT_COMPONENTS = self.get(gl.GL_MAX_VERTEX_OUTPUT_COMPONENTS) #:", "# Flags #: Context flag: Blending BLEND = gl.GL_BLEND #:", "fragment shader uniform variables in all uniform blocks self.MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS =", "texture that the GL can handle. The value must be", "self.get(gl.GL_MAX_COLOR_ATTACHMENTS) #: Maximum number of samples in a color multisample", "``gl`` module. (``ctx.BLEND`` or ``arcade.gl.BLEND``). \"\"\" #: The active context", "TODO: Missing in pyglet # self.MAX_TEXTURE_MAX_ANISOTROPY = self.get_float(gl.GL_MAX_TEXTURE_MAX_ANISOTROPY) err =", "gl.GL_TRIANGLE_FAN # 6 #: Primitive mode LINES_ADJACENCY = gl.GL_LINES_ADJACENCY #", "self.MAX_VERTEX_UNIFORM_VECTORS = self.get(gl.GL_MAX_VERTEX_UNIFORM_VECTORS) #: Maximum number of components of output", "def front_face(self) # def cull_face(self) @property def patch_vertices(self) -> int:", "read-only :type: :py:class:`arcade.gl.Framebuffer` \"\"\" return self.active_framebuffer @property def gl_version(self) ->", "buffer type. Can be 1, 2 or 4 (8, 16", "def viewport(self) -> Tuple[int, int, int, int]: \"\"\" Get or", "subsequently used for tessellation. :type: int \"\"\" value = c_int()", "number of components of the inputs read by the fragment", "gives a rough estimate of the largest texture that the", "self._flags: gl.glEnable(self.CULL_FACE) else: gl.glDisable(self.CULL_FACE) if self.PROGRAM_POINT_SIZE in self._flags: gl.glEnable(self.PROGRAM_POINT_SIZE) else:", "self.get( gl.GL_MAX_VERTEX_UNIFORM_COMPONENTS ) #: Maximum number of 4-vectors that may", "Primitive mode TRIANGLES = gl.GL_TRIANGLES # 4 #: Primitive mode", "a new OpenGL Buffer object. :param Any data: The buffer", "key, (created + 1, freed)) if created % self.warn_threshold ==", "u4 :param Any data: The texture data (optional). Can be", "+ destination FUNC_ADD = 0x8006 #: Blend equations: source -", "Tuple[int, int], *, components: int = 4, dtype: str =", "#: Blend function ONE_MINUS_SRC_COLOR = 0x0301 #: Blend function SRC_ALPHA", "\"\"\" Check if a context flag is enabled :type: bool", "used (core, compat etc.) self.CONTEXT_PROFILE_MASK = self.get(gl.GL_CONTEXT_PROFILE_MASK) #: Minimum required", "= 0x0001, 0x0001 #: Blend mode shortcut for premultipled alpha:", "context. This is only needed when ``gc_mode`` is ``context_gc``. \"\"\"", "return self.active_framebuffer @property def gl_version(self) -> Tuple[int, int]: \"\"\" The", "uniform blocks per geometry shader self.MAX_GEOMETRY_UNIFORM_BLOCKS = self.get(gl.GL_MAX_GEOMETRY_UNIFORM_BLOCKS) #: Maximum", "return flag in self._flags @property def viewport(self) -> Tuple[int, int,", "( ShaderSource(fragment_shader, gl.GL_FRAGMENT_SHADER) if fragment_shader else None ) source_geo =", "if source_fs else None, geometry_shader=source_geo.get_source(defines=defines) if source_geo else None, tess_control_shader=source_tc.get_source(defines=defines)", "None # --- Store the most commonly used OpenGL constants", "accessed through ``window.ctx``. The Context class contains methods for creating", "if tess_evaluation_shader else None ) # If we don't have", "FUNC_ADD = 0x8006 #: Blend equations: source - destination FUNC_SUBTRACT", "self.get( gl.GL_MAX_GEOMETRY_UNIFORM_COMPONENTS ) #: Maximum number of samples supported in", "4, dtype: str = \"f1\", data: Any = None, wrap_x:", "def decr(self, key): created, freed = getattr(self, key) setattr(self, key,", "self.MAX_TEXTURE_SIZE = self.get(gl.GL_MAX_TEXTURE_SIZE) #: Maximum number of uniform buffer binding", "dtype: str = \"f1\", data: Any = None, wrap_x: gl.GLenum", "2D depth texture :param Tuple[int, int] size: The size of", "depth_attachment: Depth texture :rtype: :py:class:`~arcade.gl.Framebuffer` \"\"\" return Framebuffer( self, color_attachments=color_attachments,", "a float limit\"\"\" value = c_float() gl.glGetFloatv(enum, value) return value.value", "of color attachments in a framebuffer self.MAX_COLOR_ATTACHMENTS = self.get(gl.GL_MAX_COLOR_ATTACHMENTS) #:", "source (optional) :param str tess_evaluation_shader: tessellation evaluation shader source (optional)", "= 0x800A #: Blend equations: destination - source FUNC_REVERSE_SUBTRACT =", "object. :param Any data: The buffer data, This can be", "also exist in the ``gl`` module. (``ctx.BLEND`` or ``arcade.gl.BLEND``). \"\"\"", "#: Blend equations: Minimum of source and destination MIN =", "type: List[str] if not source_fs: if source_geo: out_attributes = source_geo.out_attributes", "least 256 self.MAX_ARRAY_TEXTURE_LAYERS = self.get(gl.GL_MAX_ARRAY_TEXTURE_LAYERS) #: A rough estimate of", "or set the primitive restart index. Default is -1\"\"\" return", "= { gl.GL_INVALID_ENUM: \"GL_INVALID_ENUM\", gl.GL_INVALID_VALUE: \"GL_INVALID_VALUE\", gl.GL_INVALID_OPERATION: \"GL_INVALID_OPERATION\", gl.GL_INVALID_FRAMEBUFFER_OPERATION: \"GL_INVALID_FRAMEBUFFER_OPERATION\",", "fragment_shader: str = None, geometry_shader: str = None, tess_control_shader: str", "= self.get(gl.GL_SUBPIXEL_BITS) #: A mask value indicating what context profile", "Program( self, vertex_shader=source_vs.get_source(defines=defines), fragment_shader=source_fs.get_source(defines=defines) if source_fs else None, geometry_shader=source_geo.get_source(defines=defines) if", "only blending is enabled ctx.enable_only(ctx.BLEND) # Make sure only depth", "blocks self.MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS ) #: A rough estimate", "self.get( gl.GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS ) #: Maximum number of individual floating-point, integer,", "\"\"\"Get or set the primitive restart index. Default is -1\"\"\"", "component: f1, f2, f4 / i1, i2, i4 / u1,", "new ones so we need while len(self.objects): obj = self.objects.pop()", "\"\"\" return Texture(self, size, data=data, depth=True) def geometry( self, content:", "self, vertex_shader=source_vs.get_source(defines=defines), fragment_shader=source_fs.get_source(defines=defines) if source_fs else None, geometry_shader=source_geo.get_source(defines=defines) if source_geo", "self.MAX_GEOMETRY_OUTPUT_COMPONENTS = self.get( gl.GL_MAX_GEOMETRY_OUTPUT_COMPONENTS ) #: Maximum supported texture image", "# 10 #: Primitive mode LINE_STRIP_ADJACENCY = gl.GL_LINE_STRIP_ADJACENCY # 11", "arcade.gl.Texture depth_attachment: Depth texture :rtype: :py:class:`~arcade.gl.Framebuffer` \"\"\" return Framebuffer( self,", "be #: held in uniform variable storage for a fragment", "The default draw mode (optional) :param int mode: The default", "all other flags. This is a simple way to ensure", "geometry( self, content: Optional[Sequence[BufferDescription]] = None, index_buffer: Buffer = None,", "texels allowed in the texel array of a texture buffer", "that the GL can handle self.MAX_CUBE_MAP_TEXTURE_SIZE = self.get(gl.GL_MAX_CUBE_MAP_TEXTURE_SIZE) #: Maximum", "= self.get( gl.GL_MAX_FRAGMENT_INPUT_COMPONENTS ) #: Maximum number of individual floating-point,", "boolean values that can #: be held in uniform variable", "# create_with_size return Buffer(self, data, reserve=reserve, usage=usage) def framebuffer( self,", "the vertex shader self.MAX_COMBINED_TEXTURE_IMAGE_UNITS = self.get( gl.GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS ) #: Maximum", "object for measuring rendering calls in opengl. :rtype: :py:class:`~arcade.gl.Query` \"\"\"", "Detect the default framebuffer self._screen = DefaultFrameBuffer(self) # Tracking active", "value.value def get_float(self, enum) -> float: \"\"\"Get a float limit\"\"\"", "uniform blocks per program self.MAX_COMBINED_UNIFORM_BLOCKS = self.get(gl.GL_MAX_COMBINED_UNIFORM_BLOCKS) #: Number of", "ctx.viewport = 0, 0, 1920, 1080 # Using the current", ") def texture( self, size: Tuple[int, int], *, components: int", "or boolean values that #: can be held in uniform", "interpolation: Minification filter for mipmaps NEAREST_MIPMAP_LINEAR = 0x2702 #: Texture", "variables in all uniform blocks self.MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS )", "the GL can handle self.MAX_CUBE_MAP_TEXTURE_SIZE = self.get(gl.GL_MAX_CUBE_MAP_TEXTURE_SIZE) #: Maximum number", "filter for mipmaps LINEAR_MIPMAP_LINEAR = 0x2703 #: Texture wrap mode:", "self.get(gl.GL_MAX_VERTEX_ATTRIBS) #: Maximum supported texture image units that can be", "Collected objects to gc when gc_mode is \"context_gc\" self.objects =", "value: Tuple[int, int, int, int]): self.active_framebuffer.viewport = value @property def", "= gl.GL_DEPTH_TEST #: Context flag: Face culling CULL_FACE = gl.GL_CULL_FACE", "color CLAMP_TO_BORDER = gl.GL_CLAMP_TO_BORDER # Texture wrap mode: Repeat mirrored", "Primitive mode POINTS = gl.GL_POINTS # 0 #: Primitive mode", "#: A rough estimate of the largest 3D texture that", "currently active one\"\"\" cls.active = ctx def enable(self, *args): \"\"\"", "\"f1\", data: Any = None, wrap_x: gl.GLenum = None, wrap_y:", "primitive. Patch primitives are consumed by the tessellation control shader", "wraps in y direction :param Tuple[GLenum,GLenum] filter: Minification and magnification", "= self.get(gl.GL_MAX_COLOR_ATTACHMENTS) #: Maximum number of samples in a color", "incr(self, key): created, freed = getattr(self, key) setattr(self, key, (created", "number of samples in a multisample depth or depth-stencil texture", "the primitive restart index. Default is -1\"\"\" return self._primitive_restart_index @primitive_restart_index.setter", "# Blend equations #: source + destination FUNC_ADD = 0x8006", "None, tess_control_shader=source_tc.get_source(defines=defines) if source_tc else None, tess_evaluation_shader=source_te.get_source(defines=defines) if source_te else", "tess_evaluation_shader=source_te.get_source(defines=defines) if source_te else None, out_attributes=out_attributes, ) def query(self): \"\"\"", "uniform blocks self.MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS ) #: A rough", "interpolation: Minification filter for mipmaps LINEAR_MIPMAP_LINEAR = 0x2703 #: Texture", "Blend mode shortcut for premultipled alpha: ``SRC_ALPHA, ONE`` BLEND_PREMULTIPLIED_ALPHA =", "index_element_size=index_element_size) def program( self, *, vertex_shader: str, fragment_shader: str =", "int]: \"\"\" Get or set the viewport for the currently", "tuple :type: tuple (major, minor) version \"\"\" return self._gl_version def", "self.MAX_GEOMETRY_UNIFORM_BLOCKS = self.get(gl.GL_MAX_GEOMETRY_UNIFORM_BLOCKS) #: Maximum number of individual floating-point, integer,", "activate(cls, ctx: \"Context\"): \"\"\"Mark a context as the currently active", "<filename>arcade/gl/context.py from ctypes import c_int, c_char_p, cast, c_float from collections", "texture :param int components: Number of components (1: R, 2:", "is \"context_gc\" self.objects = deque() @property def window(self) -> Window:", "in x direction :param GLenum wrap_y: How the texture wraps", ":py:class:`~arcade.gl.Program` \"\"\" source_vs = ShaderSource(vertex_shader, gl.GL_VERTEX_SHADER) source_fs = ( ShaderSource(fragment_shader,", "for geometry shader uniform variables in all uniform blocks self.MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS", "0: LOG.debug( \"%s allocations passed threshold (%s) [created = %s]", "boolean values that can be held in uniform variable storage", "can handle self.MAX_RECTANGLE_TEXTURE_SIZE = self.get(gl.GL_MAX_RECTANGLE_TEXTURE_SIZE) #: Maximum supported size for", "gl.GLenum) -> int: \"\"\"Get an integer limit\"\"\" value = c_int()", "def __init__(self, ctx): self._ctx = ctx #: Minor version number", "additive blending: ``ONE, ONE`` BLEND_ADDITIVE = 0x0001, 0x0001 #: Blend", "the most commonly used OpenGL constants # Texture #: Texture", "ctx.ONE, ctx.ONE :type: tuple (src, dst) \"\"\" return self._blend_func @blend_func.setter", "buffers associated with the framebuffer self.SAMPLE_BUFFERS = self.get(gl.GL_SAMPLE_BUFFERS) #: An", "\"\"\" The window this context belongs to. :type: ``pyglet.Window`` \"\"\"", "(core, compat etc.) self.CONTEXT_PROFILE_MASK = self.get(gl.GL_CONTEXT_PROFILE_MASK) #: Minimum required alignment", "wrap_y: gl.GLenum = None, filter: Tuple[gl.GLenum, gl.GLenum] = None )", "height) \"\"\" return self.active_framebuffer.viewport @viewport.setter def viewport(self, value: Tuple[int, int,", "be 1, 2 or 4 (8, 16 or 32 bit", "None]: \"\"\"Check OpenGL error Returns a string representation of the", "self.MAX_SAMPLES = self.get(gl.GL_MAX_SAMPLES) #: A rough estimate of the largest", "the geometry shader self.MAX_GEOMETRY_TEXTURE_IMAGE_UNITS = self.get( gl.GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS ) #: Maximum", "the largest cube-map texture that the GL can handle self.MAX_CUBE_MAP_TEXTURE_SIZE", "OpenGL constants # Texture #: Texture interpolation: Nearest pixel NEAREST", "``gc_mode`` is ``context_gc``. \"\"\" # Loop the array until all", "Context flag: Blending BLEND = gl.GL_BLEND #: Context flag: Depth", "2 or 4 (8, 16 or 32 bit unsigned integer)", "\"\"\" Enables one or more context flags:: # Single flag", "Minimum of source and destination MIN = 0x8007 #: Blend", "for mipmaps LINEAR_MIPMAP_NEAREST = 0x2701 #: Texture interpolation: Minification filter", "= self.get(gl.GL_MAX_ARRAY_TEXTURE_LAYERS) #: A rough estimate of the largest 3D", "-= set(args) for flag in args: gl.glDisable(flag) def is_enabled(self, flag)", "enum: gl.GLenum) -> int: \"\"\"Get an integer limit\"\"\" value =", "storage for a geometry shader self.MAX_GEOMETRY_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_GEOMETRY_UNIFORM_COMPONENTS )", "0x0304 #: Blend function ONE_MINUS_DST_ALPHA = 0x0305 #: Blend function", "str = \"f1\", data: Any = None, wrap_x: gl.GLenum =", "active draw buffers when using dual-source blending self.MAX_DUAL_SOURCE_DRAW_BUFFERS = self.get(gl.GL_MAX_DUAL_SOURCE_DRAW_BUFFERS)", "collection of OpenGL objects for this context. This is only", "blend function:: ctx.blend_func = ctx.ONE, ctx.ONE :type: tuple (src, dst)", "maximum number of vertex array indices self.MAX_ELEMENTS_INDICES = self.get(gl.GL_MAX_ELEMENTS_INDICES) #:", "= gl.GL_LINE_STRIP # 3 #: Primitive mode TRIANGLES = gl.GL_TRIANGLES", "\"static\" ) -> Buffer: \"\"\"Create a new OpenGL Buffer object.", "the buffer protocol. :param GLenum wrap_x: How the texture wraps", "is the default render target self.active_framebuffer: Framebuffer = self._screen self.stats:", "dual-source blending self.MAX_DUAL_SOURCE_DRAW_BUFFERS = self.get(gl.GL_MAX_DUAL_SOURCE_DRAW_BUFFERS) #: Recommended maximum number of", "scissor testing by default. # This is always set to", "individual 4-vectors of floating-point, integer, #: or boolean values that", "# Texture wrap mode: Clamp to border pixel CLAMP_TO_EDGE =", "str dtype: The data type of each component: f1, f2,", "#: Texture interpolation: Minification filter for mipmaps NEAREST_MIPMAP_NEAREST = 0x2700", "= value @property def primitive_restart_index(self) -> int: \"\"\"Get or set", "``None`` of no errors has occurred. Example:: err = ctx.error", "-> Program: \"\"\"Create a :py:class:`~arcade.gl.Program` given the vertex, fragment and", "test and culling is enabled ctx.enable_only(ctx.DEPTH_TEST, ctx.CULL_FACE) \"\"\" self._flags =", "#: Blend mode shortcut for premultipled alpha: ``SRC_ALPHA, ONE`` BLEND_PREMULTIPLIED_ALPHA", "uniform variables in all uniform blocks self.MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS", "Maximum supported size for renderbuffers self.MAX_RENDERBUFFER_SIZE = self.get(gl.GL_MAX_RENDERBUFFER_SIZE) #: Maximum", "= weakref.ref(window) self.limits = Limits(self) self._gl_version = (self.limits.MAJOR_VERSION, self.limits.MINOR_VERSION) Context.activate(self)", "data, This can be ``bytes`` or an object supporting the", "0x0001 #: Blend mode shortcut for premultipled alpha: ``SRC_ALPHA, ONE``", "points on the context self.MAX_UNIFORM_BUFFER_BINDINGS = self.get(gl.GL_MAX_UNIFORM_BUFFER_BINDINGS) #: Maximum size", ":param int reserve: The number of bytes reserve :param str", "Primitive mode TRIANGLE_FAN = gl.GL_TRIANGLE_FAN # 6 #: Primitive mode", "= gl.GL_BLEND #: Context flag: Depth testing DEPTH_TEST = gl.GL_DEPTH_TEST", "from .query import Query from .glsl import ShaderSource from .types", "avoid # affecting currently bound textures in the first units", "gc when gc_mode is \"context_gc\" self.objects = deque() @property def", "gl.GL_DEPTH_TEST #: Context flag: Face culling CULL_FACE = gl.GL_CULL_FACE #:", "= ContextStats(warn_threshold=1000) # Hardcoded states # This should always be", "1080p ctx.viewport = 0, 0, 1920, 1080 # Using the", "that #: can be held in uniform variable storage for", "uniform variable storage for a fragment shader self.MAX_FRAGMENT_UNIFORM_VECTORS = self.get(gl.GL_MAX_FRAGMENT_UNIFORM_VECTORS)", "self.get(gl.GL_MAX_UNIFORM_BUFFER_BINDINGS) #: Maximum number of uniform buffer binding points on", "#: Blend mode shortcut for default blend mode: ``SRC_ALPHA, ONE_MINUS_SRC_ALPHA``", "pyglet.window import Window from pyglet import gl from .buffer import", "fragment_shader else None ) source_geo = ( ShaderSource(geometry_shader, gl.GL_GEOMETRY_SHADER) if", "-> None: \"\"\"Wait until all OpenGL rendering commands are completed\"\"\"", "= gl.GL_PATCHES # The most common error enums _errors =", "= self.get(gl.GL_MAX_COMBINED_UNIFORM_BLOCKS) #: Number of words for vertex shader uniform", "words for fragment shader uniform variables in all uniform blocks", "self.get(gl.GL_MAX_COMBINED_UNIFORM_BLOCKS) #: Number of words for vertex shader uniform variables", "self.MAX_GEOMETRY_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_GEOMETRY_UNIFORM_COMPONENTS ) #: Maximum number of samples", "bytes or an object supporting the buffer protocol. :param GLenum", "inputs read by a geometry shader self.MAX_GEOMETRY_INPUT_COMPONENTS = self.get( gl.GL_MAX_GEOMETRY_INPUT_COMPONENTS", "ctx.enable_only() # Make sure only blending is enabled ctx.enable_only(ctx.BLEND) #", "or an object supporting the buffer protocol. :param int reserve:", "source_geo: out_attributes = source_geo.out_attributes else: out_attributes = source_vs.out_attributes return Program(", "self._gl_version def gc(self): \"\"\" Run garbage collection of OpenGL objects", "gl.glEnable(self.BLEND) else: gl.glDisable(self.BLEND) if self.DEPTH_TEST in self._flags: gl.glEnable(self.DEPTH_TEST) else: gl.glDisable(self.DEPTH_TEST)", "fragment and geometry shader. :param str vertex_shader: vertex shader source", "fragment shader self.MAX_FRAGMENT_INPUT_COMPONENTS = self.get( gl.GL_MAX_FRAGMENT_INPUT_COMPONENTS ) #: Maximum number", "0x0302, 0x0303 #: Blend mode shortcut for additive blending: ``ONE,", "@property def gl_version(self) -> Tuple[int, int]: \"\"\" The OpenGL version", "#: Maximum number of uniform blocks per vertex shader. self.MAX_VERTEX_UNIFORM_BLOCKS", "vertex, fragment and geometry shader. :param str vertex_shader: vertex shader", "gl.GL_MAX_FRAGMENT_INPUT_COMPONENTS ) #: Maximum number of individual floating-point, integer, or", "Blend function ONE_MINUS_SRC_COLOR = 0x0301 #: Blend function SRC_ALPHA =", "[\"auto\", \"context_gc\"] if value not in modes: raise ValueError(\"Unsupported gc_mode.", "limit\"\"\" value = c_float() gl.glGetFloatv(enum, value) return value.value def get_str(self,", "gl.GL_INVALID_OPERATION: \"GL_INVALID_OPERATION\", gl.GL_INVALID_FRAMEBUFFER_OPERATION: \"GL_INVALID_FRAMEBUFFER_OPERATION\", gl.GL_OUT_OF_MEMORY: \"GL_OUT_OF_MEMORY\", gl.GL_STACK_UNDERFLOW: \"GL_STACK_UNDERFLOW\", gl.GL_STACK_OVERFLOW: \"GL_STACK_OVERFLOW\",", "index_buffer=index_buffer, mode=mode, index_element_size=index_element_size) def program( self, *, vertex_shader: str, fragment_shader:", "Run garbage collection of OpenGL objects for this context. This", "the out attributes will be located there out_attributes = []", "str geometry_shader: geometry shader source (optional) :param str tess_control_shader: tessellation", "self.get(gl.GL_MAX_RENDERBUFFER_SIZE) #: Maximum number of sample mask words self.MAX_SAMPLE_MASK_WORDS =", "program self.active_program: Optional[Program] = None # Tracking active framebuffer. On", "Store the most commonly used OpenGL constants # Texture #:", "# Tracking active program self.active_program: Optional[Program] = None # Tracking", "must be at least 256 self.MAX_ARRAY_TEXTURE_LAYERS = self.get(gl.GL_MAX_ARRAY_TEXTURE_LAYERS) #: A", "err = ctx.error if err: raise RuntimeError(\"OpenGL error: {err}\") :type:", "of uniform buffer binding points on the context self.MAX_UNIFORM_BUFFER_BINDINGS =", "= set() # Normal garbage collection as default (what we", "uniform blocks self.MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS ) #: Number of", "i4 / u1, u2, u4 :param Any data: The texture", "Query(self) class ContextStats: def __init__(self, warn_threshold=100): self.warn_threshold = warn_threshold #", "Maximum number of color attachments in a framebuffer self.MAX_COLOR_ATTACHMENTS =", "= self.get(gl.GL_MAX_VARYING_VECTORS) #: Maximum number of 4-component generic vertex attributes", "else None, tess_control_shader=source_tc.get_source(defines=defines) if source_tc else None, tess_evaluation_shader=source_te.get_source(defines=defines) if source_te", "Number of components (1: R, 2: RG, 3: RGB, 4:", "self._primitive_restart_index = -1 self.primitive_restart_index = self._primitive_restart_index # We enable scissor", "flag: Face culling CULL_FACE = gl.GL_CULL_FACE #: Context flag: Enable", "# Texture unit we use when doing operations on textures", "tess_control_shader=source_tc.get_source(defines=defines) if source_tc else None, tess_evaluation_shader=source_te.get_source(defines=defines) if source_te else None,", "number of 4-vectors that may be held in uniform variable", "err == gl.GL_NO_ERROR: return None return self._errors.get(err, \"GL_UNKNOWN_ERROR\") @classmethod def", "be enabled gl.glEnable(gl.GL_TEXTURE_CUBE_MAP_SEAMLESS) # Set primitive restart index to -1", "# Set primitive restart index to -1 by default gl.glEnable(gl.GL_PRIMITIVE_RESTART)", "Patch primitives are consumed by the tessellation control shader (if", "from other sections of your code base:: # Ensure all", "texture wraps in x direction :param GLenum wrap_y: How the", "or 32 bit unsigned integer) \"\"\" return Geometry(self, content, index_buffer=index_buffer,", "(optional) :param str geometry_shader: geometry shader source (optional) :param str", "self.MAX_UNIFORM_BUFFER_BINDINGS = self.get(gl.GL_MAX_UNIFORM_BUFFER_BINDINGS) #: The value gives a rough estimate", "protocol. \"\"\" return Texture(self, size, data=data, depth=True) def geometry( self,", "a uniform block self.MAX_UNIFORM_BLOCK_SIZE = self.get(gl.GL_MAX_UNIFORM_BLOCK_SIZE) #: The number 4-vectors", "ctx.gc_mode = \"auto\" \"\"\" return self._gc_mode @gc_mode.setter def gc_mode(self, value:", "resolution #: that are used to position rasterized geometry in", "import Buffer from .program import Program from .vertex_array import Geometry,", "the viewport for the currently active framebuffer. The viewport simply", "blocks self.MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS ) #: Number of words", "index. Default is -1\"\"\" return self._primitive_restart_index @primitive_restart_index.setter def primitive_restart_index(self, value:", "``bytes`` or an object supporting the buffer protocol. :param int", "gl.glEnable(flag) def enable_only(self, *args): \"\"\" Enable only some flags. This", "not lingering from other sections of your code base:: #", "mode LINES_ADJACENCY = gl.GL_LINES_ADJACENCY # 10 #: Primitive mode LINE_STRIP_ADJACENCY", "@gc_mode.setter def gc_mode(self, value: str): modes = [\"auto\", \"context_gc\"] if", "self, content: Optional[Sequence[BufferDescription]] = None, index_buffer: Buffer = None, mode:", "0) def incr(self, key): created, freed = getattr(self, key) setattr(self,", "pixel NEAREST = 0x2600 #: Texture interpolation: Linear interpolate LINEAR", "+ 1, freed)) if created % self.warn_threshold == 0 and", ":type: :py:class:`~arcade.Framebuffer` \"\"\" return self._screen @property def fbo(self) -> Framebuffer:", "held in uniform variable storage for the vertex shader self.MAX_VERTEX_UNIFORM_VECTORS", "samples for a framebuffer self.MAX_SAMPLES = self.get(gl.GL_MAX_SAMPLES) #: A rough", "depth=True) def geometry( self, content: Optional[Sequence[BufferDescription]] = None, index_buffer: Buffer", "import warn warn(\"Error happened while querying of limits. Moving on", "value) @property def point_size(self) -> float: \"\"\"float: Get or set", "= (self.limits.MAJOR_VERSION, self.limits.MINOR_VERSION) Context.activate(self) # Texture unit we use when", "the currently active framebuffer. The viewport simply describes what pixels", "raise RuntimeError(\"OpenGL error: {err}\") :type: str \"\"\" err = gl.glGetError()", "(self.limits.MAJOR_VERSION, self.limits.MINOR_VERSION) Context.activate(self) # Texture unit we use when doing", ":param str fragment_shader: fragment shader source (optional) :param str geometry_shader:", "self.get(gl.GL_MAX_UNIFORM_BUFFER_BINDINGS) #: The value gives a rough estimate of the", "``GL_REPEAT``, ``GL_MIRRORED_REPEAT``, ``GL_CLAMP_TO_EDGE``, ``GL_CLAMP_TO_BORDER`` Minifying filters: ``GL_NEAREST``, ``GL_LINEAR``, ``GL_NEAREST_MIPMAP_NEAREST``, ``GL_LINEAR_MIPMAP_NEAREST``", "#: or boolean values that can be held in uniform", "self.get(gl.GL_MAX_VERTEX_OUTPUT_COMPONENTS) #: Maximum number of uniform blocks per vertex shader.", "err = self._ctx.error if err: from warnings import warn warn(\"Error", "in opengl. :rtype: :py:class:`~arcade.gl.Query` \"\"\" return Query(self) class ContextStats: def", "0) self.vertex_array = (0, 0) self.geometry = (0, 0) def", "= 0x0304 #: Blend function ONE_MINUS_DST_ALPHA = 0x0305 #: Blend", "= self.get(gl.GL_MAX_UNIFORM_BUFFER_BINDINGS) #: The value gives a rough estimate of", "with the framebuffer self.SAMPLE_BUFFERS = self.get(gl.GL_SAMPLE_BUFFERS) #: An estimate of", "to position rasterized geometry in window coordinates self.SUBPIXEL_BITS = self.get(gl.GL_SUBPIXEL_BITS)", "at least 64 self.MAX_3D_TEXTURE_SIZE = self.get(gl.GL_MAX_3D_TEXTURE_SIZE) #: Maximum number of", "from .texture import Texture from .query import Query from .glsl", "self._gl_version = (self.limits.MAJOR_VERSION, self.limits.MINOR_VERSION) Context.activate(self) # Texture unit we use", "def query(self): \"\"\" Create a query object for measuring rendering", "__init__(self, ctx): self._ctx = ctx #: Minor version number of", "the largest 3D texture that the GL can handle. The", "self.CULL_FACE in self._flags: gl.glEnable(self.CULL_FACE) else: gl.glDisable(self.CULL_FACE) if self.PROGRAM_POINT_SIZE in self._flags:", "gone. # Deleting one object might add new ones so", "= 0, usage: str = \"static\" ) -> Buffer: \"\"\"Create", "created - freed, ) def decr(self, key): created, freed =", "be at least 256 self.MAX_ARRAY_TEXTURE_LAYERS = self.get(gl.GL_MAX_ARRAY_TEXTURE_LAYERS) #: A rough", "#: the number of words for fragment shader uniform variables", "self.get( gl.GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT ) #: Value indicates the maximum number of", "value.value def get_str(self, enum: gl.GLenum) -> str: \"\"\"Get a string", "active framebuffer. The viewport simply describes what pixels of the", "doing operations on textures to avoid # affecting currently bound", "textures in the first units self.default_texture_unit = self.limits.MAX_TEXTURE_IMAGE_UNITS - 1", "All enums also exist in the ``gl`` module. (``ctx.BLEND`` or", "self.get(gl.GL_MAX_TEXTURE_SIZE) #: Maximum number of uniform buffer binding points on", "_errors = { gl.GL_INVALID_ENUM: \"GL_INVALID_ENUM\", gl.GL_INVALID_VALUE: \"GL_INVALID_VALUE\", gl.GL_INVALID_OPERATION: \"GL_INVALID_OPERATION\", gl.GL_INVALID_FRAMEBUFFER_OPERATION:", ":param int mode: The default draw mode (optional) :param int", "gl.GL_LINES # 1 #: Primitive mode LINE_STRIP = gl.GL_LINE_STRIP #", "a context flag is enabled :type: bool \"\"\" return flag", "y direction :param Tuple[GLenum,GLenum] filter: Minification and magnification filter \"\"\"", "0x0001 # VertexArray: Primitives #: Primitive mode POINTS = gl.GL_POINTS", "of outputs written by a geometry shader self.MAX_GEOMETRY_OUTPUT_COMPONENTS = self.get(", "points on the context self.MAX_UNIFORM_BUFFER_BINDINGS = self.get(gl.GL_MAX_UNIFORM_BUFFER_BINDINGS) #: The value", "multisample depth or depth-stencil texture self.MAX_DEPTH_TEXTURE_SAMPLES = self.get(gl.GL_MAX_DEPTH_TEXTURE_SAMPLES) #: Maximum", "generic vertex attributes accessible to a vertex shader. self.MAX_VERTEX_ATTRIBS =", "ctx.disable(ctx.BLEND) # Multiple flags ctx.disable(ctx.DEPTH_TEST, ctx.CULL_FACE) \"\"\" self._flags -= set(args)", "self.limits.MINOR_VERSION) Context.activate(self) # Texture unit we use when doing operations", "must be an integer\") gl.glPatchParameteri(gl.GL_PATCH_VERTICES, value) @property def point_size(self) ->", "from warnings import warn warn(\"Error happened while querying of limits.", "binding points on the context self.MAX_UNIFORM_BUFFER_BINDINGS = self.get(gl.GL_MAX_UNIFORM_BUFFER_BINDINGS) #: Maximum", "used to access texture maps from the vertex shader. self.MAX_VERTEX_TEXTURE_IMAGE_UNITS", "= 0x8008 # Blend mode shortcuts #: Blend mode shortcut", "self.warn_threshold = warn_threshold # (created, freed) self.texture = (0, 0)", "estimate of the largest 3D texture that the GL can", "= self.get( gl.GL_MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS ) #: Number of words for geometry", "global states and commonly used enums. All enums also exist", "components=components, data=data, dtype=dtype, wrap_x=wrap_x, wrap_y=wrap_y, filter=filter, ) def depth_texture(self, size:", "associated with the framebuffer self.SAMPLE_BUFFERS = self.get(gl.GL_SAMPLE_BUFFERS) #: An estimate", "(0, 0) self.geometry = (0, 0) def incr(self, key): created,", "Blend equations: Maximum of source and destination MAX = 0x8008", "texture :param Any data: The texture data (optional). Can be", "1, freed)) if created % self.warn_threshold == 0 and created", "flags are disabled (enable no flags) ctx.enable_only() # Make sure", "The data type of each component: f1, f2, f4 /", "self._ctx = ctx #: Minor version number of the OpenGL", ":param arcade.gl.Texture depth_attachment: Depth texture :rtype: :py:class:`~arcade.gl.Framebuffer` \"\"\" return Framebuffer(", "POINTS = gl.GL_POINTS # 0 #: Primitive mode LINES =", "is ``context_gc``. \"\"\" # Loop the array until all objects", "# 5 #: Primitive mode TRIANGLE_FAN = gl.GL_TRIANGLE_FAN # 6", "OpenGL version as a 2 component tuple :type: tuple (major,", "border pixel CLAMP_TO_EDGE = gl.GL_CLAMP_TO_EDGE # Texture wrap mode: Clamp", "Framebuffer: \"\"\"Create a Framebuffer. :param List[arcade.gl.Texture] color_attachments: List of textures", "screen ctx.viewport = 0, 0, 800, 600 # 1080p ctx.viewport", "a 2D depth texture :param Tuple[int, int] size: The size", "The most common error enums _errors = { gl.GL_INVALID_ENUM: \"GL_INVALID_ENUM\",", "value @property def error(self) -> Union[str, None]: \"\"\"Check OpenGL error", "= None, filter: Tuple[gl.GLenum, gl.GLenum] = None ) -> Texture:", "flags. This will disable all other flags. This is a", "program self.MAX_COMBINED_UNIFORM_BLOCKS = self.get(gl.GL_MAX_COMBINED_UNIFORM_BLOCKS) #: Number of words for vertex", "ctx #: Minor version number of the OpenGL API supported", "``gl_PointSize`` in shaders. PROGRAM_POINT_SIZE = gl.GL_PROGRAM_POINT_SIZE # Blend functions #:", "c_int() gl.glGetIntegerv(gl.GL_PATCH_VERTICES, value) return value.value @patch_vertices.setter def patch_vertices(self, value: int):", "key) setattr(self, key, (created, freed + 1)) class Limits: \"\"\"OpenGL", "= 0, 0, 1920, 1080 # Using the current framebuffer", "for tessellation. :type: int \"\"\" value = c_int() gl.glGetIntegerv(gl.GL_PATCH_VERTICES, value)", "commands are completed\"\"\" gl.glFinish() # --- Resource methods --- def", "the index buffer type. Can be 1, 2 or 4", "= \"static\" ) -> Buffer: \"\"\"Create a new OpenGL Buffer", "0x2702 #: Texture interpolation: Minification filter for mipmaps LINEAR_MIPMAP_LINEAR =", "The active context active: Optional[\"Context\"] = None # --- Store", "+ 1)) class Limits: \"\"\"OpenGL Limitations\"\"\" def __init__(self, ctx): self._ctx", "OpenGL resources. Supported modes are: # default: Auto ctx.gc_mode =", "gl.glEnable(gl.GL_TEXTURE_CUBE_MAP_SEAMLESS) # Set primitive restart index to -1 by default", "The viewport simply describes what pixels of the screen OpenGL", "in self._flags: gl.glEnable(self.PROGRAM_POINT_SIZE) else: gl.glDisable(self.PROGRAM_POINT_SIZE) def disable(self, *args): \"\"\" Disable", "self.MAX_VARYING_VECTORS = self.get(gl.GL_MAX_VARYING_VECTORS) #: Maximum number of 4-component generic vertex", "control shader (if present) and subsequently used for tessellation. :type:", "Primitive mode TRIANGLES_ADJACENCY = gl.GL_TRIANGLES_ADJACENCY # 12 #: Primitive mode", "framebuffer self.MAX_SAMPLES = self.get(gl.GL_MAX_SAMPLES) #: A rough estimate of the", "cube-map texture that the GL can handle self.MAX_CUBE_MAP_TEXTURE_SIZE = self.get(gl.GL_MAX_CUBE_MAP_TEXTURE_SIZE)", "RGBA) :param str dtype: The data type of each component:", "the context self.MAX_UNIFORM_BUFFER_BINDINGS = self.get(gl.GL_MAX_UNIFORM_BUFFER_BINDINGS) #: The value gives a", "c_char_p, cast, c_float from collections import deque import logging import", "from ctypes import c_int, c_char_p, cast, c_float from collections import", "self._window_ref() @property def screen(self) -> Framebuffer: \"\"\" The framebuffer for", "at least 256 self.MAX_ARRAY_TEXTURE_LAYERS = self.get(gl.GL_MAX_ARRAY_TEXTURE_LAYERS) #: A rough estimate", "of each component: f1, f2, f4 / i1, i2, i4", "multisample texture self.MAX_COLOR_TEXTURE_SAMPLES = self.get(gl.GL_MAX_COLOR_TEXTURE_SAMPLES) #: the number of words", "supporting the buffer protocol. :param int reserve: The number of", "c_float from collections import deque import logging import weakref from", "self._screen self.stats: ContextStats = ContextStats(warn_threshold=1000) # Hardcoded states # This", "is only needed when ``gc_mode`` is ``context_gc``. \"\"\" # Loop", "set(args) for flag in args: gl.glDisable(flag) def is_enabled(self, flag) ->", "the inputs read by the fragment shader self.MAX_FRAGMENT_INPUT_COMPONENTS = self.get(", "self.limits = Limits(self) self._gl_version = (self.limits.MAJOR_VERSION, self.limits.MINOR_VERSION) Context.activate(self) # Texture", "Normally it would be the size of the window's framebuffer::", ":param Any data: The texture data (optional). Can be bytes", "Byte size of the index buffer type. Can be 1,", "= None # --- Store the most commonly used OpenGL", "attributes will be located there out_attributes = [] # type:", "value gl.glBlendFunc(value[0], value[1]) # def blend_equation(self) # def front_face(self) #", "raise TypeError(\"patch_vertices must be an integer\") gl.glPatchParameteri(gl.GL_PATCH_VERTICES, value) @property def", "gl.GL_MAX_GEOMETRY_UNIFORM_COMPONENTS ) #: Maximum number of samples supported in integer", "query(self): \"\"\" Create a query object for measuring rendering calls", "= self.get(gl.GL_MAX_VERTEX_UNIFORM_BLOCKS) # self.MAX_VERTEX_ATTRIB_RELATIVE_OFFSET = self.get(gl.GL_MAX_VERTEX_ATTRIB_RELATIVE_OFFSET) # self.MAX_VERTEX_ATTRIB_BINDINGS = self.get(gl.GL_MAX_VERTEX_ATTRIB_BINDINGS)", ") -> Program: \"\"\"Create a :py:class:`~arcade.gl.Program` given the vertex, fragment", "source_vs = ShaderSource(vertex_shader, gl.GL_VERTEX_SHADER) source_fs = ( ShaderSource(fragment_shader, gl.GL_FRAGMENT_SHADER) if", "self.get(gl.GL_MAX_FRAGMENT_UNIFORM_VECTORS) #: Maximum number of uniform blocks per fragment shader.", "self.get(gl.GL_MAX_TEXTURE_IMAGE_UNITS) # TODO: Missing in pyglet # self.MAX_TEXTURE_MAX_ANISOTROPY = self.get_float(gl.GL_MAX_TEXTURE_MAX_ANISOTROPY)", "gl.glDisable(self.DEPTH_TEST) if self.CULL_FACE in self._flags: gl.glEnable(self.CULL_FACE) else: gl.glDisable(self.CULL_FACE) if self.PROGRAM_POINT_SIZE", "Clamp to border color CLAMP_TO_BORDER = gl.GL_CLAMP_TO_BORDER # Texture wrap", "source - destination FUNC_SUBTRACT = 0x800A #: Blend equations: destination", "completed\"\"\" gl.glFinish() # --- Resource methods --- def buffer( self,", "10 #: Primitive mode LINE_STRIP_ADJACENCY = gl.GL_LINE_STRIP_ADJACENCY # 11 #:", "\"GL_OUT_OF_MEMORY\", gl.GL_STACK_UNDERFLOW: \"GL_STACK_UNDERFLOW\", gl.GL_STACK_OVERFLOW: \"GL_STACK_OVERFLOW\", } def __init__(self, window: pyglet.window.Window,", "the viewport gl.glEnable(gl.GL_SCISSOR_TEST) # States self._blend_func = self.BLEND_DEFAULT self._point_size =", "function SRC_COLOR = 0x0300 #: Blend function ONE_MINUS_SRC_COLOR = 0x0301", "= self.get(gl.GL_MINOR_VERSION) #: Major version number of the OpenGL API", "i2, i4 / u1, u2, u4 :param Any data: The", "== gl.GL_NO_ERROR: return None return self._errors.get(err, \"GL_UNKNOWN_ERROR\") @classmethod def activate(cls,", "enums also exist in the ``gl`` module. (``ctx.BLEND`` or ``arcade.gl.BLEND``).", "data=data, dtype=dtype, wrap_x=wrap_x, wrap_y=wrap_y, filter=filter, ) def depth_texture(self, size: Tuple[int,", "protocol. :param GLenum wrap_x: How the texture wraps in x", "gl.GL_TRIANGLE_STRIP # 5 #: Primitive mode TRIANGLE_FAN = gl.GL_TRIANGLE_FAN #", "number of the OpenGL API supported by the current context", "a ``pyglet.Window`` normally accessed through ``window.ctx``. The Context class contains", "value gives a rough estimate of the largest texture that", "of subpixel resolution #: that are used to position rasterized", "the maximum number of layers allowed in an array texture,", "of components of the inputs read by the fragment shader", "vertices self.MAX_ELEMENTS_VERTICES = self.get(gl.GL_MAX_ELEMENTS_VERTICES) #: Maximum number of components of", "``GL_LINEAR``, ``GL_NEAREST_MIPMAP_NEAREST``, ``GL_LINEAR_MIPMAP_NEAREST`` ``GL_NEAREST_MIPMAP_LINEAR``, ``GL_LINEAR_MIPMAP_LINEAR`` Magnifying filters: ``GL_NEAREST``, ``GL_LINEAR`` :param", "storage for the vertex shader self.MAX_VERTEX_UNIFORM_VECTORS = self.get(gl.GL_MAX_VERTEX_UNIFORM_VECTORS) #: Maximum", "self.MAX_UNIFORM_BLOCK_SIZE = self.get(gl.GL_MAX_UNIFORM_BLOCK_SIZE) #: The number 4-vectors for varying variables", "Blend function ONE_MINUS_DST_ALPHA = 0x0305 #: Blend function DST_COLOR =", "Maximum number of samples in a color multisample texture self.MAX_COLOR_TEXTURE_SAMPLES", "flag in self._flags @property def viewport(self) -> Tuple[int, int, int,", "first units self.default_texture_unit = self.limits.MAX_TEXTURE_IMAGE_UNITS - 1 # Detect the", "self.MAX_RECTANGLE_TEXTURE_SIZE = self.get(gl.GL_MAX_RECTANGLE_TEXTURE_SIZE) #: Maximum supported size for renderbuffers self.MAX_RENDERBUFFER_SIZE", "BLEND = gl.GL_BLEND #: Context flag: Depth testing DEPTH_TEST =", "value @property def primitive_restart_index(self) -> int: \"\"\"Get or set the", "self.MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS ) #: Maximum supported texture image", "default render target self.active_framebuffer: Framebuffer = self._screen self.stats: ContextStats =", "data, reserve=reserve, usage=usage) def framebuffer( self, *, color_attachments: Union[Texture, List[Texture]]", "present the out attributes will be located there out_attributes =", "of floating-point, integer, #: or boolean values that can be", "return Query(self) class ContextStats: def __init__(self, warn_threshold=100): self.warn_threshold = warn_threshold", "may be held in uniform variable storage for the vertex", "``GL_LINEAR_MIPMAP_LINEAR`` Magnifying filters: ``GL_NEAREST``, ``GL_LINEAR`` :param Tuple[int, int] size: The", "mirrored MIRRORED_REPEAT = gl.GL_MIRRORED_REPEAT # Flags #: Context flag: Blending", "a query object for measuring rendering calls in opengl. :rtype:", "def depth_texture(self, size: Tuple[int, int], *, data=None) -> Texture: \"\"\"Create", "tess_control_shader: tessellation control shader source (optional) :param str tess_evaluation_shader: tessellation", "The buffer data, This can be ``bytes`` or an object", "#: Blend function DST_COLOR = 0x0306 #: Blend function ONE_MINUS_DST_COLOR", "can be held in uniform variable storage for a fragment", "self.MAX_COMBINED_TEXTURE_IMAGE_UNITS = self.get( gl.GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS ) #: Maximum number of uniform", "self._window_ref = weakref.ref(window) self.limits = Limits(self) self._gl_version = (self.limits.MAJOR_VERSION, self.limits.MINOR_VERSION)", "The number 4-vectors for varying variables self.MAX_VARYING_VECTORS = self.get(gl.GL_MAX_VARYING_VECTORS) #:", "gl.GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT ) #: Value indicates the maximum number of layers", "mode shortcut for additive blending: ``ONE, ONE`` BLEND_ADDITIVE = 0x0001,", "collection as default (what we expect in python) self._gc_mode =", "str: \"\"\" Set the garbage collection mode for OpenGL resources.", "ctx.viewport = 0, 0, *ctx.screen.size :type: tuple (x, y, width,", "the tessellation control shader (if present) and subsequently used for", ":type: int \"\"\" value = c_int() gl.glGetIntegerv(gl.GL_PATCH_VERTICES, value) return value.value", "pixels of the screen OpenGL should render to. Normally it", "GLenum wrap_x: How the texture wraps in x direction :param", "fragment shader. self.MAX_FRAGMENT_UNIFORM_BLOCKS = self.get(gl.GL_MAX_FRAGMENT_UNIFORM_BLOCKS) #: Maximum number of components", "ONE = 0x0001 #: Blend function SRC_COLOR = 0x0300 #:", "def window(self) -> Window: \"\"\" The window this context belongs", "most common error enums _errors = { gl.GL_INVALID_ENUM: \"GL_INVALID_ENUM\", gl.GL_INVALID_VALUE:", "float limit\"\"\" value = c_float() gl.glGetFloatv(enum, value) return value.value def", "for fragment shader uniform variables in all uniform blocks self.MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS", "get(self, enum: gl.GLenum) -> int: \"\"\"Get an integer limit\"\"\" value", "methods --- def buffer( self, *, data: Optional[Any] = None,", "return Program( self, vertex_shader=source_vs.get_source(defines=defines), fragment_shader=source_fs.get_source(defines=defines) if source_fs else None, geometry_shader=source_geo.get_source(defines=defines)", "ONE_MINUS_SRC_ALPHA = 0x0303 #: Blend function DST_ALPHA = 0x0304 #:", "#: Primitive mode TRIANGLES = gl.GL_TRIANGLES # 4 #: Primitive", "self._flags: gl.glEnable(self.BLEND) else: gl.glDisable(self.BLEND) if self.DEPTH_TEST in self._flags: gl.glEnable(self.DEPTH_TEST) else:", "by the fragment shader self.MAX_FRAGMENT_INPUT_COMPONENTS = self.get( gl.GL_MAX_FRAGMENT_INPUT_COMPONENTS ) #:", "flag is enabled :type: bool \"\"\" return flag in self._flags", "-> float: \"\"\"float: Get or set the point size.\"\"\" return", "ShaderSource(fragment_shader, gl.GL_FRAGMENT_SHADER) if fragment_shader else None ) source_geo = (", "import ShaderSource from .types import BufferDescription LOG = logging.getLogger(__name__) class", "textures to avoid # affecting currently bound textures in the", "point_size(self, value: float): gl.glPointSize(self._point_size) self._point_size = value @property def primitive_restart_index(self)", "= 0x0302, 0x0303 #: Blend mode shortcut for additive blending:", "Texture(self, size, data=data, depth=True) def geometry( self, content: Optional[Sequence[BufferDescription]] =", "Minification filter for mipmaps NEAREST_MIPMAP_LINEAR = 0x2702 #: Texture interpolation:", "self.get_str(gl.GL_RENDERER) #: Value indicating the number of sample buffers associated", "a simple way to ensure that context flag states are", "list content: List of :py:class:`~arcade.gl.BufferDescription` (optional) :param Buffer index_buffer: Index/element", "str tess_evaluation_shader: tessellation evaluation shader source (optional) :param dict defines:", "array vertices self.MAX_ELEMENTS_VERTICES = self.get(gl.GL_MAX_ELEMENTS_VERTICES) #: Maximum number of components", "def __init__(self, window: pyglet.window.Window, gc_mode: str = \"auto\"): self._window_ref =", "= self.get( gl.GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS ) #: Maximum number of uniform blocks", "shortcut for additive blending: ``ONE, ONE`` BLEND_ADDITIVE = 0x0001, 0x0001", "depth texture :param Tuple[int, int] size: The size of the", "source_geo else None, tess_control_shader=source_tc.get_source(defines=defines) if source_tc else None, tess_evaluation_shader=source_te.get_source(defines=defines) if", "None ) -> Framebuffer: \"\"\"Create a Framebuffer. :param List[arcade.gl.Texture] color_attachments:", "if err == gl.GL_NO_ERROR: return None return self._errors.get(err, \"GL_UNKNOWN_ERROR\") @classmethod", "uniform blocks per fragment shader. self.MAX_FRAGMENT_UNIFORM_BLOCKS = self.get(gl.GL_MAX_FRAGMENT_UNIFORM_BLOCKS) #: Maximum", "buffer sizes and offset self.UNIFORM_BUFFER_OFFSET_ALIGNMENT = self.get( gl.GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT ) #:", "= 0x0306 #: Blend function ONE_MINUS_DST_COLOR = 0x0307 # Blend", "= logging.getLogger(__name__) class Context: \"\"\" Represents an OpenGL context. This", ".glsl import ShaderSource from .types import BufferDescription LOG = logging.getLogger(__name__)", "gl.GLenum] = None ) -> Texture: \"\"\"Create a 2D Texture.", "a texture buffer object self.MAX_TEXTURE_BUFFER_SIZE = self.get(gl.GL_MAX_TEXTURE_BUFFER_SIZE) #: Maximum number", "Texture interpolation: Minification filter for mipmaps NEAREST_MIPMAP_NEAREST = 0x2700 #:", "gl.glDisable(self.PROGRAM_POINT_SIZE) def disable(self, *args): \"\"\" Disable one or more context", "if created % self.warn_threshold == 0 and created > 0:", "affecting areas outside the viewport gl.glEnable(gl.GL_SCISSOR_TEST) # States self._blend_func =", ".program import Program from .vertex_array import Geometry, VertexArray from .framebuffer", ":param str vertex_shader: vertex shader source :param str fragment_shader: fragment", "blending self.MAX_DUAL_SOURCE_DRAW_BUFFERS = self.get(gl.GL_MAX_DUAL_SOURCE_DRAW_BUFFERS) #: Recommended maximum number of vertex", "value: float): gl.glPointSize(self._point_size) self._point_size = value @property def primitive_restart_index(self) ->", "vertex shader self.MAX_VERTEX_OUTPUT_COMPONENTS = self.get(gl.GL_MAX_VERTEX_OUTPUT_COMPONENTS) #: Maximum number of uniform", "Moving on ..\") def get(self, enum: gl.GLenum) -> int: \"\"\"Get", "Query from .glsl import ShaderSource from .types import BufferDescription LOG", ".buffer import Buffer from .program import Program from .vertex_array import", "in self._flags: gl.glEnable(self.CULL_FACE) else: gl.glDisable(self.CULL_FACE) if self.PROGRAM_POINT_SIZE in self._flags: gl.glEnable(self.PROGRAM_POINT_SIZE)", "BLEND_DEFAULT = 0x0302, 0x0303 #: Blend mode shortcut for additive", "as the viewport # to avoid background color affecting areas", "How the texture wraps in y direction :param Tuple[GLenum,GLenum] filter:", "0x0001 #: Blend function SRC_COLOR = 0x0300 #: Blend function", "if not source_fs: if source_geo: out_attributes = source_geo.out_attributes else: out_attributes", "0x2703 #: Texture wrap mode: Repeat REPEAT = gl.GL_REPEAT #", "Buffer = None, mode: int = None, index_element_size: int =", "current context self.MINOR_VERSION = self.get(gl.GL_MINOR_VERSION) #: Major version number of", "texture :param Tuple[int, int] size: The size of the texture", "gl.GL_CLAMP_TO_EDGE # Texture wrap mode: Clamp to border color CLAMP_TO_BORDER", "to -1 by default gl.glEnable(gl.GL_PRIMITIVE_RESTART) self._primitive_restart_index = -1 self.primitive_restart_index =", "Maximum number of uniform blocks per vertex shader. self.MAX_VERTEX_UNIFORM_BLOCKS =", "self.get(gl.GL_CONTEXT_PROFILE_MASK) #: Minimum required alignment for uniform buffer sizes and", "#: Blend equations: Maximum of source and destination MAX =", "held in uniform variable storage for a geometry shader self.MAX_GEOMETRY_UNIFORM_COMPONENTS", "LINE_STRIP = gl.GL_LINE_STRIP # 3 #: Primitive mode TRIANGLES =", "in uniform variable storage for a geometry shader self.MAX_GEOMETRY_UNIFORM_COMPONENTS =", "samples in a multisample depth or depth-stencil texture self.MAX_DEPTH_TEXTURE_SAMPLES =", "mode TRIANGLE_STRIP_ADJACENCY = gl.GL_TRIANGLE_STRIP_ADJACENCY # 13 #: Patch mode (tessellation)", "self.MAX_VERTEX_OUTPUT_COMPONENTS = self.get(gl.GL_MAX_VERTEX_OUTPUT_COMPONENTS) #: Maximum number of uniform blocks per", "color_attachments=color_attachments, depth_attachment=depth_attachment ) def texture( self, size: Tuple[int, int], *,", "> 0: LOG.debug( \"%s allocations passed threshold (%s) [created =", "texture maps from the vertex shader self.MAX_COMBINED_TEXTURE_IMAGE_UNITS = self.get( gl.GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS", "of the largest texture that the GL can handle self.MAX_TEXTURE_SIZE", "the number of sample buffers associated with the framebuffer self.SAMPLE_BUFFERS", "exist in the ``gl`` module. (``ctx.BLEND`` or ``arcade.gl.BLEND``). \"\"\" #:", "depth or depth-stencil texture self.MAX_DEPTH_TEXTURE_SAMPLES = self.get(gl.GL_MAX_DEPTH_TEXTURE_SAMPLES) #: Maximum number", "# Multiple flags ctx.disable(ctx.DEPTH_TEST, ctx.CULL_FACE) \"\"\" self._flags -= set(args) for", "args: gl.glEnable(flag) def enable_only(self, *args): \"\"\" Enable only some flags.", "#: Context flag: Face culling CULL_FACE = gl.GL_CULL_FACE #: Context", "ctx: \"Context\"): \"\"\"Mark a context as the currently active one\"\"\"", "of the largest 3D texture that the GL can handle.", "#: Primitive mode TRIANGLES_ADJACENCY = gl.GL_TRIANGLES_ADJACENCY # 12 #: Primitive", "List of :py:class:`~arcade.gl.BufferDescription` (optional) :param Buffer index_buffer: Index/element buffer (optional)", "shader self.MAX_GEOMETRY_INPUT_COMPONENTS = self.get( gl.GL_MAX_GEOMETRY_INPUT_COMPONENTS ) #: Maximum number of", "shader self.MAX_VERTEX_OUTPUT_COMPONENTS = self.get(gl.GL_MAX_VERTEX_OUTPUT_COMPONENTS) #: Maximum number of uniform blocks", "= self.get_float(gl.GL_MAX_TEXTURE_MAX_ANISOTROPY) err = self._ctx.error if err: from warnings import", "self.get(gl.GL_MAX_UNIFORM_BUFFER_BINDINGS) #: Maximum size in basic machine units of a", "( ShaderSource(tess_control_shader, gl.GL_TESS_CONTROL_SHADER) if tess_control_shader else None ) source_te =", "supported texture image units that can be used to access", "self.MAX_UNIFORM_BUFFER_BINDINGS = self.get(gl.GL_MAX_UNIFORM_BUFFER_BINDINGS) #: Maximum number of uniform buffer binding", "self.limits.MAX_TEXTURE_IMAGE_UNITS - 1 # Detect the default framebuffer self._screen =", "Magnifying filters: ``GL_NEAREST``, ``GL_LINEAR`` :param Tuple[int, int] size: The size", "mode (optional) :param int index_element_size: Byte size of the index", "of OpenGL objects for this context. This is only needed", "Maximum number of simultaneous outputs that may be written in", "wrap_y: How the texture wraps in y direction :param Tuple[GLenum,GLenum]", "\"\"\" return flag in self._flags @property def viewport(self) -> Tuple[int,", "Maximum number of samples supported in integer format multisample buffers", "\"\"\" self._flags = set(args) if self.BLEND in self._flags: gl.glEnable(self.BLEND) else:", "[active = %s]\", key, self.warn_threshold, created, freed, created - freed,", "#: Texture interpolation: Minification filter for mipmaps LINEAR_MIPMAP_NEAREST = 0x2701", "#: Context flag: Depth testing DEPTH_TEST = gl.GL_DEPTH_TEST #: Context", "only depth test and culling is enabled ctx.enable_only(ctx.DEPTH_TEST, ctx.CULL_FACE) \"\"\"", "Maximum number of uniform blocks per program self.MAX_COMBINED_UNIFORM_BLOCKS = self.get(gl.GL_MAX_COMBINED_UNIFORM_BLOCKS)", "Blend function ONE_MINUS_SRC_ALPHA = 0x0303 #: Blend function DST_ALPHA =", "gl from .buffer import Buffer from .program import Program from", "int, int, int]): self.active_framebuffer.viewport = value @property def blend_func(self) ->", "Any, Dict, List, Tuple, Union, Sequence, Set import pyglet from", "or more context flags:: # Single flag ctx.disable(ctx.BLEND) # Multiple", "it would be the size of the window's framebuffer:: #", "#: maximum number of individual 4-vectors of floating-point, integer, #:", "2 component tuple :type: tuple (major, minor) version \"\"\" return", "created, freed, created - freed, ) def decr(self, key): created,", ":py:class:`~arcade.gl.Buffer` \"\"\" # create_with_size return Buffer(self, data, reserve=reserve, usage=usage) def", "= 4, dtype: str = \"f1\", data: Any = None,", "self.geometry = (0, 0) def incr(self, key): created, freed =", "component tuple :type: tuple (major, minor) version \"\"\" return self._gl_version", "enable(self, *args): \"\"\" Enables one or more context flags:: #", "#: Maximum supported texture image units that can be used", "a 2D Texture. Wrap modes: ``GL_REPEAT``, ``GL_MIRRORED_REPEAT``, ``GL_CLAMP_TO_EDGE``, ``GL_CLAMP_TO_BORDER`` Minifying", "commonly used OpenGL constants # Texture #: Texture interpolation: Nearest", "``GL_CLAMP_TO_EDGE``, ``GL_CLAMP_TO_BORDER`` Minifying filters: ``GL_NEAREST``, ``GL_LINEAR``, ``GL_NEAREST_MIPMAP_NEAREST``, ``GL_LINEAR_MIPMAP_NEAREST`` ``GL_NEAREST_MIPMAP_LINEAR``, ``GL_LINEAR_MIPMAP_LINEAR``", "when using dual-source blending self.MAX_DUAL_SOURCE_DRAW_BUFFERS = self.get(gl.GL_MAX_DUAL_SOURCE_DRAW_BUFFERS) #: Recommended maximum", "size of the texture :param int components: Number of components", "# --- Resource methods --- def buffer( self, *, data:", "maps from the vertex shader self.MAX_COMBINED_TEXTURE_IMAGE_UNITS = self.get( gl.GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS )", "gl.GL_TRIANGLES_ADJACENCY # 12 #: Primitive mode TRIANGLE_STRIP_ADJACENCY = gl.GL_TRIANGLE_STRIP_ADJACENCY #", "int] size: The size of the texture :param Any data:", "int: \"\"\"Get or set the primitive restart index. Default is", "# Ensure all flags are disabled (enable no flags) ctx.enable_only()", "Index/element buffer (optional) :param int mode: The default draw mode", "= ctx.error if err: raise RuntimeError(\"OpenGL error: {err}\") :type: str", "return Texture( self, size, components=components, data=data, dtype=dtype, wrap_x=wrap_x, wrap_y=wrap_y, filter=filter,", "# default: Auto ctx.gc_mode = \"auto\" \"\"\" return self._gc_mode @gc_mode.setter", "currently active framebuffer. This property is read-only :type: :py:class:`arcade.gl.Framebuffer` \"\"\"", "number of uniform blocks per program self.MAX_COMBINED_UNIFORM_BLOCKS = self.get(gl.GL_MAX_COMBINED_UNIFORM_BLOCKS) #:", "always set to the same value as the viewport #", "is enabled ctx.enable_only(ctx.DEPTH_TEST, ctx.CULL_FACE) \"\"\" self._flags = set(args) if self.BLEND", "TRIANGLES = gl.GL_TRIANGLES # 4 #: Primitive mode TRIANGLE_STRIP =", "if self.PROGRAM_POINT_SIZE in self._flags: gl.glEnable(self.PROGRAM_POINT_SIZE) else: gl.glDisable(self.PROGRAM_POINT_SIZE) def disable(self, *args):", "self.MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS ) #: A rough estimate of", "4-vectors for varying variables self.MAX_VARYING_VECTORS = self.get(gl.GL_MAX_VARYING_VECTORS) #: Maximum number", "f4 / i1, i2, i4 / u1, u2, u4 :param", "allowed in an array texture, and must be at least", "#: Maximum number of components of the inputs read by", "current context. self.MAJOR_VERSION = self.get(gl.GL_MAJOR_VERSION) self.VENDOR = self.get_str(gl.GL_VENDOR) self.RENDERER =", "of layers allowed in an array texture, and must be", "texture maps from the vertex shader. self.MAX_VERTEX_TEXTURE_IMAGE_UNITS = self.get( gl.GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS", "\"\"\" Get or set the viewport for the currently active", "are completed\"\"\" gl.glFinish() # --- Resource methods --- def buffer(", "rough estimate of the largest 3D texture that the GL", "size of the texture :param Any data: The texture data", "0 and created > 0: LOG.debug( \"%s allocations passed threshold", "ctx.blend_func = ctx.ONE, ctx.ONE :type: tuple (src, dst) \"\"\" return", "int, int]: \"\"\" Get or set the viewport for the", "Framebuffer( self, color_attachments=color_attachments, depth_attachment=depth_attachment ) def texture( self, size: Tuple[int,", "gl.GL_MIRRORED_REPEAT # Flags #: Context flag: Blending BLEND = gl.GL_BLEND", "for a vertex shader self.MAX_VERTEX_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_VERTEX_UNIFORM_COMPONENTS ) #:", "a framebuffer self.MAX_SAMPLES = self.get(gl.GL_MAX_SAMPLES) #: A rough estimate of", "Texture interpolation: Minification filter for mipmaps LINEAR_MIPMAP_NEAREST = 0x2701 #:", "enum) -> float: \"\"\"Get a float limit\"\"\" value = c_float()", "can handle self.MAX_CUBE_MAP_TEXTURE_SIZE = self.get(gl.GL_MAX_CUBE_MAP_TEXTURE_SIZE) #: Maximum number of samples", "primitive restart index to -1 by default gl.glEnable(gl.GL_PRIMITIVE_RESTART) self._primitive_restart_index =", "data: The texture data (optional). Can be bytes or an", "[freed = %s] [active = %s]\", key, self.warn_threshold, created, freed,", "currently bound textures in the first units self.default_texture_unit = self.limits.MAX_TEXTURE_IMAGE_UNITS", "the buffer protocol. :param int reserve: The number of bytes", "ShaderSource from .types import BufferDescription LOG = logging.getLogger(__name__) class Context:", ":py:class:`arcade.gl.Framebuffer` \"\"\" return self.active_framebuffer @property def gl_version(self) -> Tuple[int, int]:", "Tuple, Union, Sequence, Set import pyglet from pyglet.window import Window", "Maximum number of samples in a multisample depth or depth-stencil", "self.MAX_GEOMETRY_INPUT_COMPONENTS = self.get( gl.GL_MAX_GEOMETRY_INPUT_COMPONENTS ) #: Maximum number of components", "gc_mode is \"context_gc\" self.objects = deque() @property def window(self) ->", "self.MAX_VERTEX_TEXTURE_IMAGE_UNITS = self.get( gl.GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS ) #: Maximum number of individual", "#: A rough estimate of the largest rectangular texture that", "querying of limits. Moving on ..\") def get(self, enum: gl.GLenum)", "we use when doing operations on textures to avoid #", "testing DEPTH_TEST = gl.GL_DEPTH_TEST #: Context flag: Face culling CULL_FACE", "there out_attributes = [] # type: List[str] if not source_fs:", "\"\"\" self._flags -= set(args) for flag in args: gl.glDisable(flag) def", "# TODO: Missing in pyglet # self.MAX_TEXTURE_MAX_ANISOTROPY = self.get_float(gl.GL_MAX_TEXTURE_MAX_ANISOTROPY) err", "magnification filter \"\"\" return Texture( self, size, components=components, data=data, dtype=dtype,", "\"GL_STACK_OVERFLOW\", } def __init__(self, window: pyglet.window.Window, gc_mode: str = \"auto\"):", "= None, tess_control_shader: str = None, tess_evaluation_shader: str = None,", "be located there out_attributes = [] # type: List[str] if", "of limits. Moving on ..\") def get(self, enum: gl.GLenum) ->", "of no errors has occurred. Example:: err = ctx.error if", "(%s) [created = %s] [freed = %s] [active = %s]\",", "gl.GL_NO_ERROR: return None return self._errors.get(err, \"GL_UNKNOWN_ERROR\") @classmethod def activate(cls, ctx:", "\"\"\"Mark a context as the currently active one\"\"\" cls.active =", "a geometry shader self.MAX_GEOMETRY_INPUT_COMPONENTS = self.get( gl.GL_MAX_GEOMETRY_INPUT_COMPONENTS ) #: Maximum", "geometry shader self.MAX_GEOMETRY_UNIFORM_BLOCKS = self.get(gl.GL_MAX_GEOMETRY_UNIFORM_BLOCKS) #: Maximum number of individual", "and magnification filter \"\"\" return Texture( self, size, components=components, data=data,", "self.get( gl.GL_MAX_FRAGMENT_INPUT_COMPONENTS ) #: Maximum number of individual floating-point, integer,", "0x0300 #: Blend function ONE_MINUS_SRC_COLOR = 0x0301 #: Blend function", "``GL_NEAREST``, ``GL_LINEAR``, ``GL_NEAREST_MIPMAP_NEAREST``, ``GL_LINEAR_MIPMAP_NEAREST`` ``GL_NEAREST_MIPMAP_LINEAR``, ``GL_LINEAR_MIPMAP_LINEAR`` Magnifying filters: ``GL_NEAREST``, ``GL_LINEAR``", "created, freed = getattr(self, key) setattr(self, key, (created, freed +", "= value @property def blend_func(self) -> Tuple[int, int]: \"\"\" Get", "Blend mode shortcut for additive blending: ``ONE, ONE`` BLEND_ADDITIVE =", "are doing transform feedback. # When a geometry shader is", "size: The size of the texture :param int components: Number", "= None, tess_evaluation_shader: str = None, defines: Dict[str, str] =", "Texture interpolation: Nearest pixel NEAREST = 0x2600 #: Texture interpolation:", "tessellation control shader source (optional) :param str tess_evaluation_shader: tessellation evaluation", "None # Tracking active framebuffer. On context creation the window", "int index_element_size: Byte size of the index buffer type. Can", "= (0, 0) def incr(self, key): created, freed = getattr(self,", "# self.MAX_TEXTURE_MAX_ANISOTROPY = self.get_float(gl.GL_MAX_TEXTURE_MAX_ANISOTROPY) err = self._ctx.error if err: from", "variable storage for a geometry shader self.MAX_GEOMETRY_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_GEOMETRY_UNIFORM_COMPONENTS", "None return self._errors.get(err, \"GL_UNKNOWN_ERROR\") @classmethod def activate(cls, ctx: \"Context\"): \"\"\"Mark", "source and destination MAX = 0x8008 # Blend mode shortcuts", "Dict, List, Tuple, Union, Sequence, Set import pyglet from pyglet.window", "Sequence, Set import pyglet from pyglet.window import Window from pyglet", ":param int components: Number of components (1: R, 2: RG,", "tess_evaluation_shader: tessellation evaluation shader source (optional) :param dict defines: Substitute", "vertices that will be used to make up a single", "filters: ``GL_NEAREST``, ``GL_LINEAR``, ``GL_NEAREST_MIPMAP_NEAREST``, ``GL_LINEAR_MIPMAP_NEAREST`` ``GL_NEAREST_MIPMAP_LINEAR``, ``GL_LINEAR_MIPMAP_LINEAR`` Magnifying filters: ``GL_NEAREST``,", "error(self) -> Union[str, None]: \"\"\"Check OpenGL error Returns a string", "def get_str(self, enum: gl.GLenum) -> str: \"\"\"Get a string limit\"\"\"", "data: The buffer data, This can be ``bytes`` or an", "out_attributes = [] # type: List[str] if not source_fs: if", "return Texture(self, size, data=data, depth=True) def geometry( self, content: Optional[Sequence[BufferDescription]]", "number of vertices that will be used to make up", "for mipmaps NEAREST_MIPMAP_LINEAR = 0x2702 #: Texture interpolation: Minification filter", "*, vertex_shader: str, fragment_shader: str = None, geometry_shader: str =", "shader is present the out attributes will be located there", "@property def fbo(self) -> Framebuffer: \"\"\" Get the currently active", "that may be held in uniform variable storage for the", "def gc_mode(self) -> str: \"\"\" Set the garbage collection mode", "When a geometry shader is present the out attributes will", "int reserve: The number of bytes reserve :param str usage:", "the framebuffer self.SAMPLE_BUFFERS = self.get(gl.GL_SAMPLE_BUFFERS) #: An estimate of the", "11 #: Primitive mode TRIANGLES_ADJACENCY = gl.GL_TRIANGLES_ADJACENCY # 12 #:", "opengl. :rtype: :py:class:`~arcade.gl.Query` \"\"\" return Query(self) class ContextStats: def __init__(self,", "str tess_control_shader: tessellation control shader source (optional) :param str tess_evaluation_shader:", "0) self.framebuffer = (0, 0) self.buffer = (0, 0) self.program", "Texture = None ) -> Framebuffer: \"\"\"Create a Framebuffer. :param", "= source_vs.out_attributes return Program( self, vertex_shader=source_vs.get_source(defines=defines), fragment_shader=source_fs.get_source(defines=defines) if source_fs else", "0x2601 #: Texture interpolation: Minification filter for mipmaps NEAREST_MIPMAP_NEAREST =", "# Detect the default framebuffer self._screen = DefaultFrameBuffer(self) # Tracking", "str = None, tess_evaluation_shader: str = None, defines: Dict[str, str]", "machine units of a uniform block self.MAX_UNIFORM_BLOCK_SIZE = self.get(gl.GL_MAX_UNIFORM_BLOCK_SIZE) #:", "mode: The default draw mode (optional) :param int index_element_size: Byte", "def cull_face(self) @property def patch_vertices(self) -> int: \"\"\" Get or", ") source_geo = ( ShaderSource(geometry_shader, gl.GL_GEOMETRY_SHADER) if geometry_shader else None", "the number of words for fragment shader uniform variables in", "flag in args: gl.glDisable(flag) def is_enabled(self, flag) -> bool: \"\"\"", "ONE`` BLEND_ADDITIVE = 0x0001, 0x0001 #: Blend mode shortcut for", "Set the garbage collection mode for OpenGL resources. Supported modes", "the context self.MAX_UNIFORM_BUFFER_BINDINGS = self.get(gl.GL_MAX_UNIFORM_BUFFER_BINDINGS) #: Maximum size in basic", "garbage collection of OpenGL objects for this context. This is", "mode: Repeat mirrored MIRRORED_REPEAT = gl.GL_MIRRORED_REPEAT # Flags #: Context", "be held in uniform variable storage for a geometry shader", "tessellation. :type: int \"\"\" value = c_int() gl.glGetIntegerv(gl.GL_PATCH_VERTICES, value) return", "array until all objects are gone. # Deleting one object", "largest texture that the GL can handle self.MAX_TEXTURE_SIZE = self.get(gl.GL_MAX_TEXTURE_SIZE)", "tess_control_shader: str = None, tess_evaluation_shader: str = None, defines: Dict[str,", "This is always set to the same value as the", "in uniform variable storage for a vertex shader self.MAX_VERTEX_UNIFORM_COMPONENTS =", "Texture interpolation: Minification filter for mipmaps NEAREST_MIPMAP_LINEAR = 0x2702 #:", "The window this context belongs to. :type: ``pyglet.Window`` \"\"\" return", "indicating what context profile is used (core, compat etc.) self.CONTEXT_PROFILE_MASK", "from .buffer import Buffer from .program import Program from .vertex_array", "= [\"auto\", \"context_gc\"] if value not in modes: raise ValueError(\"Unsupported", "number of components of outputs written by a geometry shader", "or set the viewport for the currently active framebuffer. The", "Texture wrap mode: Clamp to border pixel CLAMP_TO_EDGE = gl.GL_CLAMP_TO_EDGE", "geometry_shader: geometry shader source (optional) :param str tess_control_shader: tessellation control", "self.get(gl.GL_MAX_SAMPLES) #: A rough estimate of the largest rectangular texture", "0x800A #: Blend equations: destination - source FUNC_REVERSE_SUBTRACT = 0x800B", "for the currently active framebuffer. The viewport simply describes what", "a vertex shader. self.MAX_VERTEX_ATTRIBS = self.get(gl.GL_MAX_VERTEX_ATTRIBS) #: Maximum supported texture", "create_with_size return Buffer(self, data, reserve=reserve, usage=usage) def framebuffer( self, *,", "0) self.buffer = (0, 0) self.program = (0, 0) self.vertex_array", "#: Texture interpolation: Minification filter for mipmaps LINEAR_MIPMAP_LINEAR = 0x2703", "def gc_mode(self, value: str): modes = [\"auto\", \"context_gc\"] if value", "get_float(self, enum) -> float: \"\"\"Get a float limit\"\"\" value =", "offset self.UNIFORM_BUFFER_OFFSET_ALIGNMENT = self.get( gl.GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT ) #: Value indicates the", "for renderbuffers self.MAX_RENDERBUFFER_SIZE = self.get(gl.GL_MAX_RENDERBUFFER_SIZE) #: Maximum number of sample", "warn(\"Error happened while querying of limits. Moving on ..\") def", "@property def gc_mode(self) -> str: \"\"\" Set the garbage collection", "creation the window is the default render target self.active_framebuffer: Framebuffer", "context flag is enabled :type: bool \"\"\" return flag in", "using dual-source blending self.MAX_DUAL_SOURCE_DRAW_BUFFERS = self.get(gl.GL_MAX_DUAL_SOURCE_DRAW_BUFFERS) #: Recommended maximum number", "# We enable scissor testing by default. # This is", "the vertex, fragment and geometry shader. :param str vertex_shader: vertex", "-> bool: \"\"\" Check if a context flag is enabled", "\"\"\" return self.active_framebuffer @property def gl_version(self) -> Tuple[int, int]: \"\"\"", "error: {err}\") :type: str \"\"\" err = gl.glGetError() if err", "the context self.MAX_UNIFORM_BUFFER_BINDINGS = self.get(gl.GL_MAX_UNIFORM_BUFFER_BINDINGS) #: Maximum number of uniform", "= -1 self.primitive_restart_index = self._primitive_restart_index # We enable scissor testing", "\"%s allocations passed threshold (%s) [created = %s] [freed =", "of uniform blocks per vertex shader. self.MAX_VERTEX_UNIFORM_BLOCKS = self.get(gl.GL_MAX_VERTEX_UNIFORM_BLOCKS) #", "variables in all uniform blocks self.MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS )", "ctx.error if err: raise RuntimeError(\"OpenGL error: {err}\") :type: str \"\"\"", "occurring error or ``None`` of no errors has occurred. Example::", "self, *, color_attachments: Union[Texture, List[Texture]] = None, depth_attachment: Texture =", "position rasterized geometry in window coordinates self.SUBPIXEL_BITS = self.get(gl.GL_SUBPIXEL_BITS) #:", "of textures we want to render into :param arcade.gl.Texture depth_attachment:", "in the source (optional) :rtype: :py:class:`~arcade.gl.Program` \"\"\" source_vs = ShaderSource(vertex_shader,", ") #: Maximum supported texture image units that can be", "self.MAX_COMBINED_UNIFORM_BLOCKS = self.get(gl.GL_MAX_COMBINED_UNIFORM_BLOCKS) #: Number of words for vertex shader", "#: Minor version number of the OpenGL API supported by", "vertex_shader: str, fragment_shader: str = None, geometry_shader: str = None,", "List, Tuple, Union, Sequence, Set import pyglet from pyglet.window import", "that context flag states are not lingering from other sections", "vertex shader self.MAX_VERTEX_UNIFORM_VECTORS = self.get(gl.GL_MAX_VERTEX_UNIFORM_VECTORS) #: Maximum number of components", "out_attributes = source_geo.out_attributes else: out_attributes = source_vs.out_attributes return Program( self,", "0x0306 #: Blend function ONE_MINUS_DST_COLOR = 0x0307 # Blend equations", "size: Tuple[int, int], *, data=None) -> Texture: \"\"\"Create a 2D", "of words for fragment shader uniform variables in all uniform", "shader (if present) and subsequently used for tessellation. :type: int", "sure only depth test and culling is enabled ctx.enable_only(ctx.DEPTH_TEST, ctx.CULL_FACE)", ") -> Framebuffer: \"\"\"Create a Framebuffer. :param List[arcade.gl.Texture] color_attachments: List", "= self.get(gl.GL_SAMPLE_BUFFERS) #: An estimate of the number of bits", "content: Optional[Sequence[BufferDescription]] = None, index_buffer: Buffer = None, mode: int", "belongs to. :type: ``pyglet.Window`` \"\"\" return self._window_ref() @property def screen(self)", "mode: ``SRC_ALPHA, ONE_MINUS_SRC_ALPHA`` BLEND_DEFAULT = 0x0302, 0x0303 #: Blend mode", "FUNC_SUBTRACT = 0x800A #: Blend equations: destination - source FUNC_REVERSE_SUBTRACT", "(0, 0) self.vertex_array = (0, 0) self.geometry = (0, 0)", "testing by default. # This is always set to the", "of the largest cube-map texture that the GL can handle", "\"\"\" return Query(self) class ContextStats: def __init__(self, warn_threshold=100): self.warn_threshold =", "Get or the blend function:: ctx.blend_func = ctx.ONE, ctx.ONE :type:", "the array until all objects are gone. # Deleting one", "of components of output written by a vertex shader self.MAX_VERTEX_OUTPUT_COMPONENTS", "32 bit unsigned integer) \"\"\" return Geometry(self, content, index_buffer=index_buffer, mode=mode,", "storage for a fragment shader self.MAX_FRAGMENT_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_FRAGMENT_UNIFORM_COMPONENTS )", "mipmaps NEAREST_MIPMAP_NEAREST = 0x2700 #: Texture interpolation: Minification filter for", ":py:class:`~arcade.gl.BufferDescription` (optional) :param Buffer index_buffer: Index/element buffer (optional) :param int", "1080 # Using the current framebuffer size ctx.viewport = 0,", "The framebuffer for the window. :type: :py:class:`~arcade.Framebuffer` \"\"\" return self._screen", "@viewport.setter def viewport(self, value: Tuple[int, int, int, int]): self.active_framebuffer.viewport =", "Repeat mirrored MIRRORED_REPEAT = gl.GL_MIRRORED_REPEAT # Flags #: Context flag:", "size for renderbuffers self.MAX_RENDERBUFFER_SIZE = self.get(gl.GL_MAX_RENDERBUFFER_SIZE) #: Maximum number of", "None, geometry_shader: str = None, tess_control_shader: str = None, tess_evaluation_shader:", "= self.get(gl.GL_MAX_RENDERBUFFER_SIZE) #: Maximum number of sample mask words self.MAX_SAMPLE_MASK_WORDS", "creating resources, global states and commonly used enums. All enums", "render into :param arcade.gl.Texture depth_attachment: Depth texture :rtype: :py:class:`~arcade.gl.Framebuffer` \"\"\"", "DST_COLOR = 0x0306 #: Blend function ONE_MINUS_DST_COLOR = 0x0307 #", "by the tessellation control shader (if present) and subsequently used", "The texture data (optional). Can be bytes or an object", "pyglet # self.MAX_TEXTURE_MAX_ANISOTROPY = self.get_float(gl.GL_MAX_TEXTURE_MAX_ANISOTROPY) err = self._ctx.error if err:", "else None ) source_geo = ( ShaderSource(geometry_shader, gl.GL_GEOMETRY_SHADER) if geometry_shader", "to avoid # affecting currently bound textures in the first", "(optional) :param dict defines: Substitute #defines values in the source", "= self._ctx.error if err: from warnings import warn warn(\"Error happened", "self.get(gl.GL_MAJOR_VERSION) self.VENDOR = self.get_str(gl.GL_VENDOR) self.RENDERER = self.get_str(gl.GL_RENDERER) #: Value indicating", "might add new ones so we need while len(self.objects): obj", "the same value as the viewport # to avoid background", "= \"f1\", data: Any = None, wrap_x: gl.GLenum = None,", "written by a vertex shader self.MAX_VERTEX_OUTPUT_COMPONENTS = self.get(gl.GL_MAX_VERTEX_OUTPUT_COMPONENTS) #: Maximum", "\"GL_UNKNOWN_ERROR\") @classmethod def activate(cls, ctx: \"Context\"): \"\"\"Mark a context as", "\"auto\" self.gc_mode = gc_mode #: Collected objects to gc when", "64 self.MAX_3D_TEXTURE_SIZE = self.get(gl.GL_MAX_3D_TEXTURE_SIZE) #: Maximum number of color attachments", "need while len(self.objects): obj = self.objects.pop() obj.delete() @property def gc_mode(self)", "gl.GL_PATCHES # The most common error enums _errors = {", "Maximum number of sample mask words self.MAX_SAMPLE_MASK_WORDS = self.get(gl.GL_MAX_SAMPLE_MASK_WORDS) #:", "-> Framebuffer: \"\"\" The framebuffer for the window. :type: :py:class:`~arcade.Framebuffer`", "(major, minor) version \"\"\" return self._gl_version def gc(self): \"\"\" Run", "Any data: The buffer data, This can be ``bytes`` or", "way to ensure that context flag states are not lingering", "variable storage for a fragment shader self.MAX_FRAGMENT_UNIFORM_VECTORS = self.get(gl.GL_MAX_FRAGMENT_UNIFORM_VECTORS) #:", "enabled :type: bool \"\"\" return flag in self._flags @property def", "#: source + destination FUNC_ADD = 0x8006 #: Blend equations:", "gc_mode. Supported modes are:\", modes) self._gc_mode = value @property def", "The Context class contains methods for creating resources, global states", "in self._flags: gl.glEnable(self.DEPTH_TEST) else: gl.glDisable(self.DEPTH_TEST) if self.CULL_FACE in self._flags: gl.glEnable(self.CULL_FACE)", "Blend function ZERO = 0x0000 #: Blend function ONE =", "enable scissor testing by default. # This is always set", ") -> Texture: \"\"\"Create a 2D Texture. Wrap modes: ``GL_REPEAT``,", "number of vertex array indices self.MAX_ELEMENTS_INDICES = self.get(gl.GL_MAX_ELEMENTS_INDICES) #: Recommended", "self, *, data: Optional[Any] = None, reserve: int = 0,", "ctx.enable_only(ctx.DEPTH_TEST, ctx.CULL_FACE) \"\"\" self._flags = set(args) if self.BLEND in self._flags:", "Texture wrap mode: Repeat mirrored MIRRORED_REPEAT = gl.GL_MIRRORED_REPEAT # Flags", "indicates the maximum number of layers allowed in an array", "ONE_MINUS_SRC_ALPHA`` BLEND_DEFAULT = 0x0302, 0x0303 #: Blend mode shortcut for", "gl.GL_MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS ) #: Number of words for geometry shader uniform", "err: from warnings import warn warn(\"Error happened while querying of", "your code base:: # Ensure all flags are disabled (enable", "window: pyglet.window.Window, gc_mode: str = \"auto\"): self._window_ref = weakref.ref(window) self.limits", "patch_vertices(self, value: int): if not isinstance(value, int): raise TypeError(\"patch_vertices must", "= None ) -> Program: \"\"\"Create a :py:class:`~arcade.gl.Program` given the", "a geometry shader self.MAX_GEOMETRY_OUTPUT_COMPONENTS = self.get( gl.GL_MAX_GEOMETRY_OUTPUT_COMPONENTS ) #: Maximum", "#: Context flag: Enable ``gl_PointSize`` in shaders. PROGRAM_POINT_SIZE = gl.GL_PROGRAM_POINT_SIZE", "number of layers allowed in an array texture, and must", "affecting currently bound textures in the first units self.default_texture_unit =", "@property def point_size(self) -> float: \"\"\"float: Get or set the", "= 0x0303 #: Blend function DST_ALPHA = 0x0304 #: Blend", "if value not in modes: raise ValueError(\"Unsupported gc_mode. Supported modes", "flags. This is a simple way to ensure that context", "for mipmaps LINEAR_MIPMAP_LINEAR = 0x2703 #: Texture wrap mode: Repeat", "This property is read-only :type: :py:class:`arcade.gl.Framebuffer` \"\"\" return self.active_framebuffer @property", "The value must be at least 64 self.MAX_3D_TEXTURE_SIZE = self.get(gl.GL_MAX_3D_TEXTURE_SIZE)", "A mask value indicating what context profile is used (core,", "disable all other flags. This is a simple way to", "culling CULL_FACE = gl.GL_CULL_FACE #: Context flag: Enable ``gl_PointSize`` in", "the GL can handle self.MAX_RECTANGLE_TEXTURE_SIZE = self.get(gl.GL_MAX_RECTANGLE_TEXTURE_SIZE) #: Maximum supported", "setattr(self, key, (created + 1, freed)) if created % self.warn_threshold", "or 4 (8, 16 or 32 bit unsigned integer) \"\"\"", "shader self.MAX_VERTEX_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_VERTEX_UNIFORM_COMPONENTS ) #: Maximum number of", "disable(self, *args): \"\"\" Disable one or more context flags:: #", "256 self.MAX_ARRAY_TEXTURE_LAYERS = self.get(gl.GL_MAX_ARRAY_TEXTURE_LAYERS) #: A rough estimate of the", "shader source (optional) :param str tess_control_shader: tessellation control shader source", "while len(self.objects): obj = self.objects.pop() obj.delete() @property def gc_mode(self) ->", "self.texture = (0, 0) self.framebuffer = (0, 0) self.buffer =", "that are used to position rasterized geometry in window coordinates", ") def query(self): \"\"\" Create a query object for measuring", "Any = None, wrap_x: gl.GLenum = None, wrap_y: gl.GLenum =", "= self.get(gl.GL_MAX_UNIFORM_BUFFER_BINDINGS) #: Maximum number of uniform buffer binding points", "viewport simply describes what pixels of the screen OpenGL should", "= self.get( gl.GL_MAX_GEOMETRY_INPUT_COMPONENTS ) #: Maximum number of components of", "Context.activate(self) # Texture unit we use when doing operations on", "#: Primitive mode TRIANGLE_FAN = gl.GL_TRIANGLE_FAN # 6 #: Primitive", "= ShaderSource(vertex_shader, gl.GL_VERTEX_SHADER) source_fs = ( ShaderSource(fragment_shader, gl.GL_FRAGMENT_SHADER) if fragment_shader", "0x0307 # Blend equations #: source + destination FUNC_ADD =", "color attachments in a framebuffer self.MAX_COLOR_ATTACHMENTS = self.get(gl.GL_MAX_COLOR_ATTACHMENTS) #: Maximum", "= self.get(gl.GL_MAX_ELEMENTS_VERTICES) #: Maximum number of components of the inputs", "\"\"\" source_vs = ShaderSource(vertex_shader, gl.GL_VERTEX_SHADER) source_fs = ( ShaderSource(fragment_shader, gl.GL_FRAGMENT_SHADER)", "def primitive_restart_index(self, value: int): self._primitive_restart_index = value gl.glPrimitiveRestartIndex(value) def finish(self)", "Blend functions #: Blend function ZERO = 0x0000 #: Blend", "Dict[str, str] = None ) -> Program: \"\"\"Create a :py:class:`~arcade.gl.Program`", "consumed by the tessellation control shader (if present) and subsequently", "CULL_FACE = gl.GL_CULL_FACE #: Context flag: Enable ``gl_PointSize`` in shaders.", "Blending BLEND = gl.GL_BLEND #: Context flag: Depth testing DEPTH_TEST", "tess_control_shader else None ) source_te = ( ShaderSource(tess_evaluation_shader, gl.GL_TESS_EVALUATION_SHADER) if", "texel array of a texture buffer object self.MAX_TEXTURE_BUFFER_SIZE = self.get(gl.GL_MAX_TEXTURE_BUFFER_SIZE)", "0 #: Primitive mode LINES = gl.GL_LINES # 1 #:", "in the first units self.default_texture_unit = self.limits.MAX_TEXTURE_IMAGE_UNITS - 1 #", "background color affecting areas outside the viewport gl.glEnable(gl.GL_SCISSOR_TEST) # States", "Repeat REPEAT = gl.GL_REPEAT # Texture wrap mode: Clamp to", "#: Blend mode shortcut for additive blending: ``ONE, ONE`` BLEND_ADDITIVE", "3 #: Primitive mode TRIANGLES = gl.GL_TRIANGLES # 4 #:", "#: The number 4-vectors for varying variables self.MAX_VARYING_VECTORS = self.get(gl.GL_MAX_VARYING_VECTORS)", "ONE`` BLEND_PREMULTIPLIED_ALPHA = 0x0302, 0x0001 # VertexArray: Primitives #: Primitive", "enum: gl.GLenum) -> str: \"\"\"Get a string limit\"\"\" return cast(gl.glGetString(enum),", "= 0x8006 #: Blend equations: source - destination FUNC_SUBTRACT =", "#: Blend function SRC_COLOR = 0x0300 #: Blend function ONE_MINUS_SRC_COLOR", "in a fragment shader self.MAX_DRAW_BUFFERS = self.get(gl.GL_MAX_DRAW_BUFFERS) #: Maximum number", "self.get(gl.GL_MAX_VERTEX_UNIFORM_VECTORS) #: Maximum number of components of output written by", "layers allowed in an array texture, and must be at", "= value gl.glPrimitiveRestartIndex(value) def finish(self) -> None: \"\"\"Wait until all", "can handle self.MAX_TEXTURE_SIZE = self.get(gl.GL_MAX_TEXTURE_SIZE) #: Maximum number of uniform", "ValueError(\"Unsupported gc_mode. Supported modes are:\", modes) self._gc_mode = value @property", "can be held in uniform variable storage for a vertex", "LINEAR = 0x2601 #: Texture interpolation: Minification filter for mipmaps", "texture self.MAX_DEPTH_TEXTURE_SAMPLES = self.get(gl.GL_MAX_DEPTH_TEXTURE_SAMPLES) #: Maximum number of simultaneous outputs", "import Texture from .query import Query from .glsl import ShaderSource", "= ( ShaderSource(fragment_shader, gl.GL_FRAGMENT_SHADER) if fragment_shader else None ) source_geo", "= self.get(gl.GL_MAX_3D_TEXTURE_SIZE) #: Maximum number of color attachments in a", "other flags. This is a simple way to ensure that", "index_buffer: Buffer = None, mode: int = None, index_element_size: int", "gl.glGetFloatv(enum, value) return value.value def get_str(self, enum: gl.GLenum) -> str:", "if source_geo else None, tess_control_shader=source_tc.get_source(defines=defines) if source_tc else None, tess_evaluation_shader=source_te.get_source(defines=defines)", "self, size, components=components, data=data, dtype=dtype, wrap_x=wrap_x, wrap_y=wrap_y, filter=filter, ) def", "= None, wrap_x: gl.GLenum = None, wrap_y: gl.GLenum = None,", "# Hardcoded states # This should always be enabled gl.glEnable(gl.GL_TEXTURE_CUBE_MAP_SEAMLESS)", "self.get( gl.GL_MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS ) #: Maximum supported texture image units that", "we want to render into :param arcade.gl.Texture depth_attachment: Depth texture", "0x0302, 0x0001 # VertexArray: Primitives #: Primitive mode POINTS =", ":type: tuple (major, minor) version \"\"\" return self._gl_version def gc(self):", "= self.get( gl.GL_MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS ) #: Maximum supported texture image units", "= 0x2701 #: Texture interpolation: Minification filter for mipmaps NEAREST_MIPMAP_LINEAR", "float: \"\"\"float: Get or set the point size.\"\"\" return self._point_size", "draw buffers when using dual-source blending self.MAX_DUAL_SOURCE_DRAW_BUFFERS = self.get(gl.GL_MAX_DUAL_SOURCE_DRAW_BUFFERS) #:", "self._flags @property def viewport(self) -> Tuple[int, int, int, int]: \"\"\"", "for creating resources, global states and commonly used enums. All", "Enable ``gl_PointSize`` in shaders. PROGRAM_POINT_SIZE = gl.GL_PROGRAM_POINT_SIZE # Blend functions", "context belongs to a ``pyglet.Window`` normally accessed through ``window.ctx``. The", "data: Optional[Any] = None, reserve: int = 0, usage: str", "self.MAX_3D_TEXTURE_SIZE = self.get(gl.GL_MAX_3D_TEXTURE_SIZE) #: Maximum number of color attachments in", "for uniform buffer sizes and offset self.UNIFORM_BUFFER_OFFSET_ALIGNMENT = self.get( gl.GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT", "set the primitive restart index. Default is -1\"\"\" return self._primitive_restart_index", "REPEAT = gl.GL_REPEAT # Texture wrap mode: Clamp to border", "Buffer: \"\"\"Create a new OpenGL Buffer object. :param Any data:", "the GL can handle self.MAX_TEXTURE_SIZE = self.get(gl.GL_MAX_TEXTURE_SIZE) #: Maximum number", "are:\", modes) self._gc_mode = value @property def error(self) -> Union[str,", "value = c_int() gl.glGetIntegerv(gl.GL_PATCH_VERTICES, value) return value.value @patch_vertices.setter def patch_vertices(self,", "outputs written by a geometry shader self.MAX_GEOMETRY_OUTPUT_COMPONENTS = self.get( gl.GL_MAX_GEOMETRY_OUTPUT_COMPONENTS", "(created, freed) self.texture = (0, 0) self.framebuffer = (0, 0)", "from the geometry shader self.MAX_GEOMETRY_TEXTURE_IMAGE_UNITS = self.get( gl.GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS ) #:", "): \"\"\" Create a Geomtry instance. :param list content: List", "should always be enabled gl.glEnable(gl.GL_TEXTURE_CUBE_MAP_SEAMLESS) # Set primitive restart index", "= self.get(gl.GL_MAX_VERTEX_ATTRIB_BINDINGS) self.MAX_TEXTURE_IMAGE_UNITS = self.get(gl.GL_MAX_TEXTURE_IMAGE_UNITS) # TODO: Missing in pyglet", "-> int: \"\"\" Get or set number of vertices that", "property is read-only :type: :py:class:`arcade.gl.Framebuffer` \"\"\" return self.active_framebuffer @property def", "int mode: The default draw mode (optional) :param int index_element_size:", "def patch_vertices(self) -> int: \"\"\" Get or set number of", "usage: str = \"static\" ) -> Buffer: \"\"\"Create a new", "Depth testing DEPTH_TEST = gl.GL_DEPTH_TEST #: Context flag: Face culling", "what pixels of the screen OpenGL should render to. Normally", "self.SAMPLE_BUFFERS = self.get(gl.GL_SAMPLE_BUFFERS) #: An estimate of the number of", "an integer\") gl.glPatchParameteri(gl.GL_PATCH_VERTICES, value) @property def point_size(self) -> float: \"\"\"float:", "the window is the default render target self.active_framebuffer: Framebuffer =", "want to render into :param arcade.gl.Texture depth_attachment: Depth texture :rtype:", "texture self.MAX_COLOR_TEXTURE_SAMPLES = self.get(gl.GL_MAX_COLOR_TEXTURE_SAMPLES) #: the number of words for", "= self.get(gl.GL_MAX_DEPTH_TEXTURE_SAMPLES) #: Maximum number of simultaneous outputs that may", "= self.get(gl.GL_MAX_FRAGMENT_UNIFORM_BLOCKS) #: Maximum number of components of inputs read", "self.get(gl.GL_MAX_GEOMETRY_UNIFORM_BLOCKS) #: Maximum number of individual floating-point, integer, or boolean", "gl.glFinish() # --- Resource methods --- def buffer( self, *,", "mode shortcuts #: Blend mode shortcut for default blend mode:", "self.active_framebuffer.viewport @viewport.setter def viewport(self, value: Tuple[int, int, int, int]): self.active_framebuffer.viewport", "Tuple[GLenum,GLenum] filter: Minification and magnification filter \"\"\" return Texture( self,", "default draw mode (optional) :param int mode: The default draw", "Primitive mode LINE_STRIP_ADJACENCY = gl.GL_LINE_STRIP_ADJACENCY # 11 #: Primitive mode", "query object for measuring rendering calls in opengl. :rtype: :py:class:`~arcade.gl.Query`", "has occurred. Example:: err = ctx.error if err: raise RuntimeError(\"OpenGL", "and offset self.UNIFORM_BUFFER_OFFSET_ALIGNMENT = self.get( gl.GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT ) #: Value indicates", "Window from pyglet import gl from .buffer import Buffer from", "bytes or an object supporting the buffer protocol. \"\"\" return", "function:: ctx.blend_func = ctx.ONE, ctx.ONE :type: tuple (src, dst) \"\"\"", "The size of the texture :param Any data: The texture", "@point_size.setter def point_size(self, value: float): gl.glPointSize(self._point_size) self._point_size = value @property", "= self.get(gl.GL_MAX_INTEGER_SAMPLES) #: Maximum samples for a framebuffer self.MAX_SAMPLES =", "bound textures in the first units self.default_texture_unit = self.limits.MAX_TEXTURE_IMAGE_UNITS -", ".query import Query from .glsl import ShaderSource from .types import", "4:3 screen ctx.viewport = 0, 0, 800, 600 # 1080p", "self._gc_mode = \"auto\" self.gc_mode = gc_mode #: Collected objects to", "in pyglet # self.MAX_TEXTURE_MAX_ANISOTROPY = self.get_float(gl.GL_MAX_TEXTURE_MAX_ANISOTROPY) err = self._ctx.error if", "mode TRIANGLES_ADJACENCY = gl.GL_TRIANGLES_ADJACENCY # 12 #: Primitive mode TRIANGLE_STRIP_ADJACENCY", "rectangular texture that the GL can handle self.MAX_RECTANGLE_TEXTURE_SIZE = self.get(gl.GL_MAX_RECTANGLE_TEXTURE_SIZE)", "= None, reserve: int = 0, usage: str = \"static\"", "- freed, ) def decr(self, key): created, freed = getattr(self,", "number of sample buffers associated with the framebuffer self.SAMPLE_BUFFERS =", "context self.MAX_UNIFORM_BUFFER_BINDINGS = self.get(gl.GL_MAX_UNIFORM_BUFFER_BINDINGS) #: Maximum number of uniform buffer", "None, filter: Tuple[gl.GLenum, gl.GLenum] = None ) -> Texture: \"\"\"Create", "variable storage for a fragment shader self.MAX_FRAGMENT_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_FRAGMENT_UNIFORM_COMPONENTS", "= 1.0 self._flags: Set[int] = set() # Normal garbage collection", "evaluation shader source (optional) :param dict defines: Substitute #defines values", "def gl_version(self) -> Tuple[int, int]: \"\"\" The OpenGL version as", "size, data=data, depth=True) def geometry( self, content: Optional[Sequence[BufferDescription]] = None,", "premultipled alpha: ``SRC_ALPHA, ONE`` BLEND_PREMULTIPLIED_ALPHA = 0x0302, 0x0001 # VertexArray:", "of the window's framebuffer:: # 4:3 screen ctx.viewport = 0,", "vertex shader source :param str fragment_shader: fragment shader source (optional)", "= 0x2703 #: Texture wrap mode: Repeat REPEAT = gl.GL_REPEAT", "5 #: Primitive mode TRIANGLE_FAN = gl.GL_TRIANGLE_FAN # 6 #:", "self.get( gl.GL_MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS ) #: A rough estimate of the largest", "only needed when ``gc_mode`` is ``context_gc``. \"\"\" # Loop the", "out_attributes = source_vs.out_attributes return Program( self, vertex_shader=source_vs.get_source(defines=defines), fragment_shader=source_fs.get_source(defines=defines) if source_fs", "#: Blend function ONE_MINUS_DST_COLOR = 0x0307 # Blend equations #:", "values in the source (optional) :rtype: :py:class:`~arcade.gl.Program` \"\"\" source_vs =", "Texture( self, size, components=components, data=data, dtype=dtype, wrap_x=wrap_x, wrap_y=wrap_y, filter=filter, )", "gl.glPointSize(self._point_size) self._point_size = value @property def primitive_restart_index(self) -> int: \"\"\"Get", "= 0x800B #: Blend equations: Minimum of source and destination", "the window's framebuffer:: # 4:3 screen ctx.viewport = 0, 0,", "number of components of output written by a vertex shader", "value must be at least 64 self.MAX_3D_TEXTURE_SIZE = self.get(gl.GL_MAX_3D_TEXTURE_SIZE) #:", "1920, 1080 # Using the current framebuffer size ctx.viewport =", "for measuring rendering calls in opengl. :rtype: :py:class:`~arcade.gl.Query` \"\"\" return", "source_fs else None, geometry_shader=source_geo.get_source(defines=defines) if source_geo else None, tess_control_shader=source_tc.get_source(defines=defines) if", "a fragment shader self.MAX_FRAGMENT_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_FRAGMENT_UNIFORM_COMPONENTS ) #: maximum", "f1, f2, f4 / i1, i2, i4 / u1, u2,", "or set the point size.\"\"\" return self._point_size @point_size.setter def point_size(self,", "mipmaps LINEAR_MIPMAP_LINEAR = 0x2703 #: Texture wrap mode: Repeat REPEAT", "Texture: \"\"\"Create a 2D Texture. Wrap modes: ``GL_REPEAT``, ``GL_MIRRORED_REPEAT``, ``GL_CLAMP_TO_EDGE``,", "= 0x2601 #: Texture interpolation: Minification filter for mipmaps NEAREST_MIPMAP_NEAREST", "# (created, freed) self.texture = (0, 0) self.framebuffer = (0,", "#: Maximum size in basic machine units of a uniform", "Texture interpolation: Minification filter for mipmaps LINEAR_MIPMAP_LINEAR = 0x2703 #:", "a color multisample texture self.MAX_COLOR_TEXTURE_SAMPLES = self.get(gl.GL_MAX_COLOR_TEXTURE_SAMPLES) #: the number", "the garbage collection mode for OpenGL resources. Supported modes are:", "= ( ShaderSource(tess_control_shader, gl.GL_TESS_CONTROL_SHADER) if tess_control_shader else None ) source_te", "ContextStats: def __init__(self, warn_threshold=100): self.warn_threshold = warn_threshold # (created, freed)", "= gl.GL_TRIANGLE_FAN # 6 #: Primitive mode LINES_ADJACENCY = gl.GL_LINES_ADJACENCY", "blocks per vertex shader. self.MAX_VERTEX_UNIFORM_BLOCKS = self.get(gl.GL_MAX_VERTEX_UNIFORM_BLOCKS) # self.MAX_VERTEX_ATTRIB_RELATIVE_OFFSET =", "add new ones so we need while len(self.objects): obj =", "boolean values that can be #: held in uniform variable", "= self.get( gl.GL_MAX_VERTEX_UNIFORM_COMPONENTS ) #: Maximum number of 4-vectors that", "the current context self.MINOR_VERSION = self.get(gl.GL_MINOR_VERSION) #: Major version number", "version \"\"\" return self._gl_version def gc(self): \"\"\" Run garbage collection", "% self.warn_threshold == 0 and created > 0: LOG.debug( \"%s", "4: RGBA) :param str dtype: The data type of each", "if err: from warnings import warn warn(\"Error happened while querying", "blending is enabled ctx.enable_only(ctx.BLEND) # Make sure only depth test", "(optional) :param str tess_evaluation_shader: tessellation evaluation shader source (optional) :param", "wrap_x: How the texture wraps in x direction :param GLenum", "= self._screen self.stats: ContextStats = ContextStats(warn_threshold=1000) # Hardcoded states #", "source_fs: if source_geo: out_attributes = source_geo.out_attributes else: out_attributes = source_vs.out_attributes", "texture wraps in y direction :param Tuple[GLenum,GLenum] filter: Minification and", ":param str dtype: The data type of each component: f1,", "destination - source FUNC_REVERSE_SUBTRACT = 0x800B #: Blend equations: Minimum", "#: Value indicating the number of sample buffers associated with", "Auto ctx.gc_mode = \"auto\" \"\"\" return self._gc_mode @gc_mode.setter def gc_mode(self,", "0x2700 #: Texture interpolation: Minification filter for mipmaps LINEAR_MIPMAP_NEAREST =", "int = 0, usage: str = \"static\" ) -> Buffer:", "source (optional) :param dict defines: Substitute #defines values in the", "= (0, 0) self.framebuffer = (0, 0) self.buffer = (0,", "self.MAX_VERTEX_ATTRIB_BINDINGS = self.get(gl.GL_MAX_VERTEX_ATTRIB_BINDINGS) self.MAX_TEXTURE_IMAGE_UNITS = self.get(gl.GL_MAX_TEXTURE_IMAGE_UNITS) # TODO: Missing in", "#: Blend function ZERO = 0x0000 #: Blend function ONE", "self.active_framebuffer.viewport = value @property def blend_func(self) -> Tuple[int, int]: \"\"\"", ":rtype: :py:class:`~arcade.gl.Program` \"\"\" source_vs = ShaderSource(vertex_shader, gl.GL_VERTEX_SHADER) source_fs = (", "key): created, freed = getattr(self, key) setattr(self, key, (created, freed", "that the GL can handle. The value must be at", "Maximum number of uniform blocks per geometry shader self.MAX_GEOMETRY_UNIFORM_BLOCKS =", "int]: \"\"\" The OpenGL version as a 2 component tuple", "ctx def enable(self, *args): \"\"\" Enables one or more context", "border color CLAMP_TO_BORDER = gl.GL_CLAMP_TO_BORDER # Texture wrap mode: Repeat", "int, int]): self.active_framebuffer.viewport = value @property def blend_func(self) -> Tuple[int,", "allocations passed threshold (%s) [created = %s] [freed = %s]", "\"\"\"Create a 2D depth texture :param Tuple[int, int] size: The", "units self.default_texture_unit = self.limits.MAX_TEXTURE_IMAGE_UNITS - 1 # Detect the default", "this context belongs to. :type: ``pyglet.Window`` \"\"\" return self._window_ref() @property", ":param Any data: The buffer data, This can be ``bytes``", "int], *, data=None) -> Texture: \"\"\"Create a 2D depth texture", "== 0 and created > 0: LOG.debug( \"%s allocations passed", "when gc_mode is \"context_gc\" self.objects = deque() @property def window(self)", "rasterized geometry in window coordinates self.SUBPIXEL_BITS = self.get(gl.GL_SUBPIXEL_BITS) #: A", "self.MAX_ELEMENTS_INDICES = self.get(gl.GL_MAX_ELEMENTS_INDICES) #: Recommended maximum number of vertex array", "Maximum number of texels allowed in the texel array of", "0x2600 #: Texture interpolation: Linear interpolate LINEAR = 0x2601 #:", "number of samples in a color multisample texture self.MAX_COLOR_TEXTURE_SAMPLES =", ") #: Maximum number of samples supported in integer format", "int]): self._blend_func = value gl.glBlendFunc(value[0], value[1]) # def blend_equation(self) #", "0, 0, *ctx.screen.size :type: tuple (x, y, width, height) \"\"\"", "Substitute #defines values in the source (optional) :rtype: :py:class:`~arcade.gl.Program` \"\"\"", "``GL_NEAREST_MIPMAP_LINEAR``, ``GL_LINEAR_MIPMAP_LINEAR`` Magnifying filters: ``GL_NEAREST``, ``GL_LINEAR`` :param Tuple[int, int] size:", "def gc(self): \"\"\" Run garbage collection of OpenGL objects for", "control shader source (optional) :param str tess_evaluation_shader: tessellation evaluation shader", "else None ) source_tc = ( ShaderSource(tess_control_shader, gl.GL_TESS_CONTROL_SHADER) if tess_control_shader", "fragment_shader: fragment shader source (optional) :param str geometry_shader: geometry shader", "vertex shader uniform variables in all uniform blocks self.MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS =", "ctx.enable(ctx.BLEND) # Multiple flags ctx.enable(ctx.DEPTH_TEST, ctx.CULL_FACE) \"\"\" self._flags.update(args) for flag", "cast, c_float from collections import deque import logging import weakref", "Program: \"\"\"Create a :py:class:`~arcade.gl.Program` given the vertex, fragment and geometry", "4-component generic vertex attributes accessible to a vertex shader. self.MAX_VERTEX_ATTRIBS", "Get the currently active framebuffer. This property is read-only :type:", "self, *, vertex_shader: str, fragment_shader: str = None, geometry_shader: str", "from .glsl import ShaderSource from .types import BufferDescription LOG =", "the largest texture that the GL can handle self.MAX_TEXTURE_SIZE =", "``GL_LINEAR`` :param Tuple[int, int] size: The size of the texture", "states # This should always be enabled gl.glEnable(gl.GL_TEXTURE_CUBE_MAP_SEAMLESS) # Set", "geometry shader source (optional) :param str tess_control_shader: tessellation control shader", "ctx.disable(ctx.DEPTH_TEST, ctx.CULL_FACE) \"\"\" self._flags -= set(args) for flag in args:", "dict defines: Substitute #defines values in the source (optional) :rtype:", "set the point size.\"\"\" return self._point_size @point_size.setter def point_size(self, value:", "(optional) :param Buffer index_buffer: Index/element buffer (optional) :param int mode:", "not source_fs: if source_geo: out_attributes = source_geo.out_attributes else: out_attributes =", "Major version number of the OpenGL API supported by the", "--- Resource methods --- def buffer( self, *, data: Optional[Any]", "type of each component: f1, f2, f4 / i1, i2,", "of source and destination MIN = 0x8007 #: Blend equations:", "{ gl.GL_INVALID_ENUM: \"GL_INVALID_ENUM\", gl.GL_INVALID_VALUE: \"GL_INVALID_VALUE\", gl.GL_INVALID_OPERATION: \"GL_INVALID_OPERATION\", gl.GL_INVALID_FRAMEBUFFER_OPERATION: \"GL_INVALID_FRAMEBUFFER_OPERATION\", gl.GL_OUT_OF_MEMORY:", "6 #: Primitive mode LINES_ADJACENCY = gl.GL_LINES_ADJACENCY # 10 #:", "fragment shader self.MAX_FRAGMENT_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_FRAGMENT_UNIFORM_COMPONENTS ) #: maximum number", "int = 4, dtype: str = \"f1\", data: Any =", "in integer format multisample buffers self.MAX_INTEGER_SAMPLES = self.get(gl.GL_MAX_INTEGER_SAMPLES) #: Maximum", "#: Maximum number of 4-vectors that may be held in", "str = \"static\" ) -> Buffer: \"\"\"Create a new OpenGL", "self.BLEND in self._flags: gl.glEnable(self.BLEND) else: gl.glDisable(self.BLEND) if self.DEPTH_TEST in self._flags:", "Context class contains methods for creating resources, global states and", "(what we expect in python) self._gc_mode = \"auto\" self.gc_mode =", "(src, dst) \"\"\" return self._blend_func @blend_func.setter def blend_func(self, value: Tuple[int,", "Deleting one object might add new ones so we need", "supported size for renderbuffers self.MAX_RENDERBUFFER_SIZE = self.get(gl.GL_MAX_RENDERBUFFER_SIZE) #: Maximum number", "held in uniform variable storage for a fragment shader self.MAX_FRAGMENT_UNIFORM_COMPONENTS", "Primitive mode LINE_STRIP = gl.GL_LINE_STRIP # 3 #: Primitive mode", "Minification filter for mipmaps NEAREST_MIPMAP_NEAREST = 0x2700 #: Texture interpolation:", "Any data: The texture data (optional). Can be bytes or", "to access texture maps from the vertex shader self.MAX_COMBINED_TEXTURE_IMAGE_UNITS =", "Minification filter for mipmaps LINEAR_MIPMAP_LINEAR = 0x2703 #: Texture wrap", "warn_threshold=100): self.warn_threshold = warn_threshold # (created, freed) self.texture = (0,", "self.MAX_VERTEX_ATTRIB_RELATIVE_OFFSET = self.get(gl.GL_MAX_VERTEX_ATTRIB_RELATIVE_OFFSET) # self.MAX_VERTEX_ATTRIB_BINDINGS = self.get(gl.GL_MAX_VERTEX_ATTRIB_BINDINGS) self.MAX_TEXTURE_IMAGE_UNITS = self.get(gl.GL_MAX_TEXTURE_IMAGE_UNITS)", "used to make up a single patch primitive. Patch primitives", "buffer object self.MAX_TEXTURE_BUFFER_SIZE = self.get(gl.GL_MAX_TEXTURE_BUFFER_SIZE) #: Maximum number of uniform", "if self.DEPTH_TEST in self._flags: gl.glEnable(self.DEPTH_TEST) else: gl.glDisable(self.DEPTH_TEST) if self.CULL_FACE in", "for OpenGL resources. Supported modes are: # default: Auto ctx.gc_mode", "def incr(self, key): created, freed = getattr(self, key) setattr(self, key,", "Enable only some flags. This will disable all other flags.", "#: Maximum number of uniform buffer binding points on the", "= self.get(gl.GL_MAX_TEXTURE_IMAGE_UNITS) # TODO: Missing in pyglet # self.MAX_TEXTURE_MAX_ANISOTROPY =", "value) return value.value @patch_vertices.setter def patch_vertices(self, value: int): if not", "= value gl.glBlendFunc(value[0], value[1]) # def blend_equation(self) # def front_face(self)", "or ``arcade.gl.BLEND``). \"\"\" #: The active context active: Optional[\"Context\"] =", "number of uniform blocks per fragment shader. self.MAX_FRAGMENT_UNIFORM_BLOCKS = self.get(gl.GL_MAX_FRAGMENT_UNIFORM_BLOCKS)", "values that can be #: held in uniform variable storage", "unit we use when doing operations on textures to avoid", "viewport(self, value: Tuple[int, int, int, int]): self.active_framebuffer.viewport = value @property", "self.buffer = (0, 0) self.program = (0, 0) self.vertex_array =", "instance. :param list content: List of :py:class:`~arcade.gl.BufferDescription` (optional) :param Buffer", "-> Tuple[int, int, int, int]: \"\"\" Get or set the", "class Limits: \"\"\"OpenGL Limitations\"\"\" def __init__(self, ctx): self._ctx = ctx", "#: Maximum number of uniform blocks per fragment shader. self.MAX_FRAGMENT_UNIFORM_BLOCKS", "key, (created, freed + 1)) class Limits: \"\"\"OpenGL Limitations\"\"\" def", "gl.GL_BLEND #: Context flag: Depth testing DEPTH_TEST = gl.GL_DEPTH_TEST #:", "a framebuffer self.MAX_COLOR_ATTACHMENTS = self.get(gl.GL_MAX_COLOR_ATTACHMENTS) #: Maximum number of samples", "version as a 2 component tuple :type: tuple (major, minor)", "maps from the geometry shader self.MAX_GEOMETRY_TEXTURE_IMAGE_UNITS = self.get( gl.GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS )", "= gl.GL_POINTS # 0 #: Primitive mode LINES = gl.GL_LINES", "self.MAX_GEOMETRY_TEXTURE_IMAGE_UNITS = self.get( gl.GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS ) #: Maximum number of uniform", "= self.get( gl.GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS ) #: Maximum number of individual floating-point,", "(tessellation) PATCHES = gl.GL_PATCHES # The most common error enums", "and destination MIN = 0x8007 #: Blend equations: Maximum of", "operations on textures to avoid # affecting currently bound textures", "present) and subsequently used for tessellation. :type: int \"\"\" value", "geometry_shader: str = None, tess_control_shader: str = None, tess_evaluation_shader: str", "shader self.MAX_FRAGMENT_UNIFORM_VECTORS = self.get(gl.GL_MAX_FRAGMENT_UNIFORM_VECTORS) #: Maximum number of uniform blocks", "variable storage for a vertex shader self.MAX_VERTEX_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_VERTEX_UNIFORM_COMPONENTS", "import pyglet from pyglet.window import Window from pyglet import gl", "gl.GL_STACK_OVERFLOW: \"GL_STACK_OVERFLOW\", } def __init__(self, window: pyglet.window.Window, gc_mode: str =", "#: Maximum number of components of output written by a", "= gl.GL_CULL_FACE #: Context flag: Enable ``gl_PointSize`` in shaders. PROGRAM_POINT_SIZE", "uniform block self.MAX_UNIFORM_BLOCK_SIZE = self.get(gl.GL_MAX_UNIFORM_BLOCK_SIZE) #: The number 4-vectors for", "enabled ctx.enable_only(ctx.DEPTH_TEST, ctx.CULL_FACE) \"\"\" self._flags = set(args) if self.BLEND in", "of sample mask words self.MAX_SAMPLE_MASK_WORDS = self.get(gl.GL_MAX_SAMPLE_MASK_WORDS) #: Maximum number", "self._flags: Set[int] = set() # Normal garbage collection as default", "= ctx #: Minor version number of the OpenGL API", "= self.get( gl.GL_MAX_GEOMETRY_OUTPUT_COMPONENTS ) #: Maximum supported texture image units", "gc_mode(self) -> str: \"\"\" Set the garbage collection mode for", "in basic machine units of a uniform block self.MAX_UNIFORM_BLOCK_SIZE =", "mode TRIANGLE_FAN = gl.GL_TRIANGLE_FAN # 6 #: Primitive mode LINES_ADJACENCY", "Primitive mode LINES = gl.GL_LINES # 1 #: Primitive mode", "content: List of :py:class:`~arcade.gl.BufferDescription` (optional) :param Buffer index_buffer: Index/element buffer", ":param str geometry_shader: geometry shader source (optional) :param str tess_control_shader:", "shader. self.MAX_FRAGMENT_UNIFORM_BLOCKS = self.get(gl.GL_MAX_FRAGMENT_UNIFORM_BLOCKS) #: Maximum number of components of", "= self.get(gl.GL_MAX_SAMPLE_MASK_WORDS) #: Maximum number of texels allowed in the", "default draw mode (optional) :param int index_element_size: Byte size of", "array indices self.MAX_ELEMENTS_INDICES = self.get(gl.GL_MAX_ELEMENTS_INDICES) #: Recommended maximum number of", "#: An estimate of the number of bits of subpixel", "size in basic machine units of a uniform block self.MAX_UNIFORM_BLOCK_SIZE", "GL can handle. The value must be at least 64", "int): if not isinstance(value, int): raise TypeError(\"patch_vertices must be an", "self.get(gl.GL_MAX_COLOR_TEXTURE_SAMPLES) #: the number of words for fragment shader uniform", "Create a Geomtry instance. :param list content: List of :py:class:`~arcade.gl.BufferDescription`", "value gl.glPrimitiveRestartIndex(value) def finish(self) -> None: \"\"\"Wait until all OpenGL", "needed when ``gc_mode`` is ``context_gc``. \"\"\" # Loop the array", "PROGRAM_POINT_SIZE = gl.GL_PROGRAM_POINT_SIZE # Blend functions #: Blend function ZERO", "texture maps from the geometry shader self.MAX_GEOMETRY_TEXTURE_IMAGE_UNITS = self.get( gl.GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS", "str = None, geometry_shader: str = None, tess_control_shader: str =", "gl.GL_INVALID_VALUE: \"GL_INVALID_VALUE\", gl.GL_INVALID_OPERATION: \"GL_INVALID_OPERATION\", gl.GL_INVALID_FRAMEBUFFER_OPERATION: \"GL_INVALID_FRAMEBUFFER_OPERATION\", gl.GL_OUT_OF_MEMORY: \"GL_OUT_OF_MEMORY\", gl.GL_STACK_UNDERFLOW: \"GL_STACK_UNDERFLOW\",", "if source_te else None, out_attributes=out_attributes, ) def query(self): \"\"\" Create", "number of uniform blocks per geometry shader self.MAX_GEOMETRY_UNIFORM_BLOCKS = self.get(gl.GL_MAX_GEOMETRY_UNIFORM_BLOCKS)", "#: Maximum samples for a framebuffer self.MAX_SAMPLES = self.get(gl.GL_MAX_SAMPLES) #:", "ONE_MINUS_DST_COLOR = 0x0307 # Blend equations #: source + destination", "primitive restart index. Default is -1\"\"\" return self._primitive_restart_index @primitive_restart_index.setter def", "passed threshold (%s) [created = %s] [freed = %s] [active", "#: Maximum number of sample mask words self.MAX_SAMPLE_MASK_WORDS = self.get(gl.GL_MAX_SAMPLE_MASK_WORDS)", "ctypes import c_int, c_char_p, cast, c_float from collections import deque", "self._flags: gl.glEnable(self.DEPTH_TEST) else: gl.glDisable(self.DEPTH_TEST) if self.CULL_FACE in self._flags: gl.glEnable(self.CULL_FACE) else:", "- source FUNC_REVERSE_SUBTRACT = 0x800B #: Blend equations: Minimum of", "garbage collection as default (what we expect in python) self._gc_mode", "self.get(gl.GL_MAX_VARYING_VECTORS) #: Maximum number of 4-component generic vertex attributes accessible", "only some flags. This will disable all other flags. This", "The default draw mode (optional) :param int index_element_size: Byte size", "#: Maximum number of 4-component generic vertex attributes accessible to", "primitives are consumed by the tessellation control shader (if present)", "in modes: raise ValueError(\"Unsupported gc_mode. Supported modes are:\", modes) self._gc_mode", "to border color CLAMP_TO_BORDER = gl.GL_CLAMP_TO_BORDER # Texture wrap mode:", "error enums _errors = { gl.GL_INVALID_ENUM: \"GL_INVALID_ENUM\", gl.GL_INVALID_VALUE: \"GL_INVALID_VALUE\", gl.GL_INVALID_OPERATION:", "dtype: The data type of each component: f1, f2, f4", "Set[int] = set() # Normal garbage collection as default (what", "Disable one or more context flags:: # Single flag ctx.disable(ctx.BLEND)", "use when doing operations on textures to avoid # affecting", "per geometry shader self.MAX_GEOMETRY_UNIFORM_BLOCKS = self.get(gl.GL_MAX_GEOMETRY_UNIFORM_BLOCKS) #: Maximum number of", "import Query from .glsl import ShaderSource from .types import BufferDescription", "None: \"\"\"Wait until all OpenGL rendering commands are completed\"\"\" gl.glFinish()", "geometry shader self.MAX_GEOMETRY_OUTPUT_COMPONENTS = self.get( gl.GL_MAX_GEOMETRY_OUTPUT_COMPONENTS ) #: Maximum supported", "shader source (optional) :param str geometry_shader: geometry shader source (optional)", "in all uniform blocks self.MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS ) #:", "self.get(gl.GL_MAX_DUAL_SOURCE_DRAW_BUFFERS) #: Recommended maximum number of vertex array indices self.MAX_ELEMENTS_INDICES", "gl.GL_MAX_VERTEX_UNIFORM_COMPONENTS ) #: Maximum number of 4-vectors that may be", "- destination FUNC_SUBTRACT = 0x800A #: Blend equations: destination -", "self._point_size = 1.0 self._flags: Set[int] = set() # Normal garbage", "self.gc_mode = gc_mode #: Collected objects to gc when gc_mode", "gl.GL_MAX_GEOMETRY_OUTPUT_COMPONENTS ) #: Maximum supported texture image units that can", "int): self._primitive_restart_index = value gl.glPrimitiveRestartIndex(value) def finish(self) -> None: \"\"\"Wait", "gl.GL_REPEAT # Texture wrap mode: Clamp to border pixel CLAMP_TO_EDGE", "limit\"\"\" value = c_int() gl.glGetIntegerv(enum, value) return value.value def get_float(self,", "that can be #: held in uniform variable storage for", "R, 2: RG, 3: RGB, 4: RGBA) :param str dtype:", "single patch primitive. Patch primitives are consumed by the tessellation", "created > 0: LOG.debug( \"%s allocations passed threshold (%s) [created", "for default blend mode: ``SRC_ALPHA, ONE_MINUS_SRC_ALPHA`` BLEND_DEFAULT = 0x0302, 0x0303", "one or more context flags:: # Single flag ctx.disable(ctx.BLEND) #", "The size of the texture :param int components: Number of", "return Geometry(self, content, index_buffer=index_buffer, mode=mode, index_element_size=index_element_size) def program( self, *,", "Tracking active program self.active_program: Optional[Program] = None # Tracking active", "1 # Detect the default framebuffer self._screen = DefaultFrameBuffer(self) #", "Get or set number of vertices that will be used", ":rtype: :py:class:`~arcade.gl.Framebuffer` \"\"\" return Framebuffer( self, color_attachments=color_attachments, depth_attachment=depth_attachment ) def", "*ctx.screen.size :type: tuple (x, y, width, height) \"\"\" return self.active_framebuffer.viewport", "\"\"\"float: Get or set the point size.\"\"\" return self._point_size @point_size.setter", "GL can handle self.MAX_CUBE_MAP_TEXTURE_SIZE = self.get(gl.GL_MAX_CUBE_MAP_TEXTURE_SIZE) #: Maximum number of", "self.get(gl.GL_MINOR_VERSION) #: Major version number of the OpenGL API supported", "data (optional). Can be bytes or an object supporting the", "Create a query object for measuring rendering calls in opengl.", "\"\"\" Set the garbage collection mode for OpenGL resources. Supported", "should render to. Normally it would be the size of", "estimate of the largest texture that the GL can handle", "number of sample mask words self.MAX_SAMPLE_MASK_WORDS = self.get(gl.GL_MAX_SAMPLE_MASK_WORDS) #: Maximum", "of components (1: R, 2: RG, 3: RGB, 4: RGBA)", "framebuffer. The viewport simply describes what pixels of the screen", "flags ctx.disable(ctx.DEPTH_TEST, ctx.CULL_FACE) \"\"\" self._flags -= set(args) for flag in", "return self._gl_version def gc(self): \"\"\" Run garbage collection of OpenGL", "Tuple[int, int] size: The size of the texture :param Any", "depth_texture(self, size: Tuple[int, int], *, data=None) -> Texture: \"\"\"Create a", "gl.GL_OUT_OF_MEMORY: \"GL_OUT_OF_MEMORY\", gl.GL_STACK_UNDERFLOW: \"GL_STACK_UNDERFLOW\", gl.GL_STACK_OVERFLOW: \"GL_STACK_OVERFLOW\", } def __init__(self, window:", "gl.GL_LINE_STRIP # 3 #: Primitive mode TRIANGLES = gl.GL_TRIANGLES #", "Limitations\"\"\" def __init__(self, ctx): self._ctx = ctx #: Minor version", "Geometry, VertexArray from .framebuffer import Framebuffer, DefaultFrameBuffer from typing import", "``GL_NEAREST_MIPMAP_NEAREST``, ``GL_LINEAR_MIPMAP_NEAREST`` ``GL_NEAREST_MIPMAP_LINEAR``, ``GL_LINEAR_MIPMAP_LINEAR`` Magnifying filters: ``GL_NEAREST``, ``GL_LINEAR`` :param Tuple[int,", "of uniform blocks per geometry shader self.MAX_GEOMETRY_UNIFORM_BLOCKS = self.get(gl.GL_MAX_GEOMETRY_UNIFORM_BLOCKS) #:", "= 0x0307 # Blend equations #: source + destination FUNC_ADD", "Blend mode shortcut for default blend mode: ``SRC_ALPHA, ONE_MINUS_SRC_ALPHA`` BLEND_DEFAULT", "FUNC_REVERSE_SUBTRACT = 0x800B #: Blend equations: Minimum of source and", "wrap_y=wrap_y, filter=filter, ) def depth_texture(self, size: Tuple[int, int], *, data=None)", "self.get(gl.GL_MAX_DEPTH_TEXTURE_SAMPLES) #: Maximum number of simultaneous outputs that may be", "self.CONTEXT_PROFILE_MASK = self.get(gl.GL_CONTEXT_PROFILE_MASK) #: Minimum required alignment for uniform buffer", "or the blend function:: ctx.blend_func = ctx.ONE, ctx.ONE :type: tuple", "indices self.MAX_ELEMENTS_INDICES = self.get(gl.GL_MAX_ELEMENTS_INDICES) #: Recommended maximum number of vertex", "alpha: ``SRC_ALPHA, ONE`` BLEND_PREMULTIPLIED_ALPHA = 0x0302, 0x0001 # VertexArray: Primitives", "= getattr(self, key) setattr(self, key, (created, freed + 1)) class", "\"\"\" value = c_int() gl.glGetIntegerv(gl.GL_PATCH_VERTICES, value) return value.value @patch_vertices.setter def", "Maximum number of components of the inputs read by the", "texture buffer object self.MAX_TEXTURE_BUFFER_SIZE = self.get(gl.GL_MAX_TEXTURE_BUFFER_SIZE) #: Maximum number of", "int]: \"\"\" Get or the blend function:: ctx.blend_func = ctx.ONE,", "@property def window(self) -> Window: \"\"\" The window this context", "LINEAR_MIPMAP_NEAREST = 0x2701 #: Texture interpolation: Minification filter for mipmaps", "viewport # to avoid background color affecting areas outside the", "and created > 0: LOG.debug( \"%s allocations passed threshold (%s)", "or an object supporting the buffer protocol. \"\"\" return Texture(self,", "Value indicating the number of sample buffers associated with the", "#: held in uniform variable storage for a fragment shader", "no errors has occurred. Example:: err = ctx.error if err:", "``window.ctx``. The Context class contains methods for creating resources, global", "components: int = 4, dtype: str = \"f1\", data: Any", "of words for geometry shader uniform variables in all uniform", "units of a uniform block self.MAX_UNIFORM_BLOCK_SIZE = self.get(gl.GL_MAX_UNIFORM_BLOCK_SIZE) #: The", "integer, or boolean values that can be #: held in", "Maximum number of individual floating-point, integer, or boolean values that", "class Context: \"\"\" Represents an OpenGL context. This context belongs", "framebuffer( self, *, color_attachments: Union[Texture, List[Texture]] = None, depth_attachment: Texture", "# def cull_face(self) @property def patch_vertices(self) -> int: \"\"\" Get", "the texture :param Any data: The texture data (optional). Can", "the GL can handle. The value must be at least", "in args: gl.glDisable(flag) def is_enabled(self, flag) -> bool: \"\"\" Check", "def enable_only(self, *args): \"\"\" Enable only some flags. This will", "the size of the window's framebuffer:: # 4:3 screen ctx.viewport", "blocks per fragment shader. self.MAX_FRAGMENT_UNIFORM_BLOCKS = self.get(gl.GL_MAX_FRAGMENT_UNIFORM_BLOCKS) #: Maximum number", "per fragment shader. self.MAX_FRAGMENT_UNIFORM_BLOCKS = self.get(gl.GL_MAX_FRAGMENT_UNIFORM_BLOCKS) #: Maximum number of", "# 3 #: Primitive mode TRIANGLES = gl.GL_TRIANGLES # 4", "= getattr(self, key) setattr(self, key, (created + 1, freed)) if", "default: Auto ctx.gc_mode = \"auto\" \"\"\" return self._gc_mode @gc_mode.setter def", "a geometry shader is present the out attributes will be", ") #: Maximum number of uniform blocks per program self.MAX_COMBINED_UNIFORM_BLOCKS", "of texels allowed in the texel array of a texture", "ctx.CULL_FACE) \"\"\" self._flags -= set(args) for flag in args: gl.glDisable(flag)", "gl.GL_LINES_ADJACENCY # 10 #: Primitive mode LINE_STRIP_ADJACENCY = gl.GL_LINE_STRIP_ADJACENCY #", "= self.get(gl.GL_MAX_DUAL_SOURCE_DRAW_BUFFERS) #: Recommended maximum number of vertex array indices", "16 or 32 bit unsigned integer) \"\"\" return Geometry(self, content,", "modes) self._gc_mode = value @property def error(self) -> Union[str, None]:", "in self._flags: gl.glEnable(self.BLEND) else: gl.glDisable(self.BLEND) if self.DEPTH_TEST in self._flags: gl.glEnable(self.DEPTH_TEST)", "else: gl.glDisable(self.PROGRAM_POINT_SIZE) def disable(self, *args): \"\"\" Disable one or more", "subpixel resolution #: that are used to position rasterized geometry", "vertex array vertices self.MAX_ELEMENTS_VERTICES = self.get(gl.GL_MAX_ELEMENTS_VERTICES) #: Maximum number of", "and commonly used enums. All enums also exist in the", "uniform variable storage for a vertex shader self.MAX_VERTEX_UNIFORM_COMPONENTS = self.get(", "\"\"\"Wait until all OpenGL rendering commands are completed\"\"\" gl.glFinish() #", "0x0303 #: Blend mode shortcut for additive blending: ``ONE, ONE``", "for a fragment shader self.MAX_FRAGMENT_UNIFORM_VECTORS = self.get(gl.GL_MAX_FRAGMENT_UNIFORM_VECTORS) #: Maximum number", "default gl.glEnable(gl.GL_PRIMITIVE_RESTART) self._primitive_restart_index = -1 self.primitive_restart_index = self._primitive_restart_index # We", "= 0x0302, 0x0001 # VertexArray: Primitives #: Primitive mode POINTS", "uniform blocks self.MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS ) #: Maximum supported", "texture, and must be at least 256 self.MAX_ARRAY_TEXTURE_LAYERS = self.get(gl.GL_MAX_ARRAY_TEXTURE_LAYERS)", "self.get(gl.GL_MAX_SAMPLE_MASK_WORDS) #: Maximum number of texels allowed in the texel", "i1, i2, i4 / u1, u2, u4 :param Any data:", "value: str): modes = [\"auto\", \"context_gc\"] if value not in", "@patch_vertices.setter def patch_vertices(self, value: int): if not isinstance(value, int): raise", "destination MIN = 0x8007 #: Blend equations: Maximum of source", "= None ) -> Texture: \"\"\"Create a 2D Texture. Wrap", "int: \"\"\" Get or set number of vertices that will", ") #: Maximum number of 4-vectors that may be held", "GLenum wrap_y: How the texture wraps in y direction :param", "value indicating what context profile is used (core, compat etc.)", "self.DEPTH_TEST in self._flags: gl.glEnable(self.DEPTH_TEST) else: gl.glDisable(self.DEPTH_TEST) if self.CULL_FACE in self._flags:", "can be ``bytes`` or an object supporting the buffer protocol.", "None, depth_attachment: Texture = None ) -> Framebuffer: \"\"\"Create a", "or more context flags:: # Single flag ctx.enable(ctx.BLEND) # Multiple", "4 (8, 16 or 32 bit unsigned integer) \"\"\" return", "values that can #: be held in uniform variable storage", "(optional) :rtype: :py:class:`~arcade.gl.Program` \"\"\" source_vs = ShaderSource(vertex_shader, gl.GL_VERTEX_SHADER) source_fs =", "rough estimate of the largest texture that the GL can", "BufferDescription LOG = logging.getLogger(__name__) class Context: \"\"\" Represents an OpenGL", "blend_equation(self) # def front_face(self) # def cull_face(self) @property def patch_vertices(self)", "#: The active context active: Optional[\"Context\"] = None # ---", "value) return value.value def get_str(self, enum: gl.GLenum) -> str: \"\"\"Get", "bit unsigned integer) \"\"\" return Geometry(self, content, index_buffer=index_buffer, mode=mode, index_element_size=index_element_size)", "self.get( gl.GL_MAX_GEOMETRY_INPUT_COMPONENTS ) #: Maximum number of components of outputs", "function SRC_ALPHA = 0x0302 #: Blend function ONE_MINUS_SRC_ALPHA = 0x0303", "0) self.geometry = (0, 0) def incr(self, key): created, freed", "Primitive mode TRIANGLE_STRIP = gl.GL_TRIANGLE_STRIP # 5 #: Primitive mode", "self._primitive_restart_index @primitive_restart_index.setter def primitive_restart_index(self, value: int): self._primitive_restart_index = value gl.glPrimitiveRestartIndex(value)", "textures we want to render into :param arcade.gl.Texture depth_attachment: Depth", "0x0305 #: Blend function DST_COLOR = 0x0306 #: Blend function", "# Blend mode shortcuts #: Blend mode shortcut for default", "Tracking active framebuffer. On context creation the window is the", "gl.GL_TRIANGLES # 4 #: Primitive mode TRIANGLE_STRIP = gl.GL_TRIANGLE_STRIP #", "in all uniform blocks self.MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS ) #:", "uniform variables in all uniform blocks self.MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS", "class contains methods for creating resources, global states and commonly", "= gl.GL_TRIANGLES # 4 #: Primitive mode TRIANGLE_STRIP = gl.GL_TRIANGLE_STRIP", "int = 4, ): \"\"\" Create a Geomtry instance. :param", "be held in uniform variable storage for a vertex shader", "Patch mode (tessellation) PATCHES = gl.GL_PATCHES # The most common", "components of the inputs read by the fragment shader self.MAX_FRAGMENT_INPUT_COMPONENTS", "Minification and magnification filter \"\"\" return Texture( self, size, components=components,", "Face culling CULL_FACE = gl.GL_CULL_FACE #: Context flag: Enable ``gl_PointSize``", "bool: \"\"\" Check if a context flag is enabled :type:", "Loop the array until all objects are gone. # Deleting", "-1 by default gl.glEnable(gl.GL_PRIMITIVE_RESTART) self._primitive_restart_index = -1 self.primitive_restart_index = self._primitive_restart_index", "= gl.glGetError() if err == gl.GL_NO_ERROR: return None return self._errors.get(err,", "Window: \"\"\" The window this context belongs to. :type: ``pyglet.Window``", "warn_threshold # (created, freed) self.texture = (0, 0) self.framebuffer =", "VertexArray: Primitives #: Primitive mode POINTS = gl.GL_POINTS # 0", "in uniform variable storage for the vertex shader self.MAX_VERTEX_UNIFORM_VECTORS =", "#: The value gives a rough estimate of the largest", "DEPTH_TEST = gl.GL_DEPTH_TEST #: Context flag: Face culling CULL_FACE =", "TRIANGLE_STRIP = gl.GL_TRIANGLE_STRIP # 5 #: Primitive mode TRIANGLE_FAN =", "direction :param GLenum wrap_y: How the texture wraps in y", "the texture wraps in x direction :param GLenum wrap_y: How", "in a color multisample texture self.MAX_COLOR_TEXTURE_SAMPLES = self.get(gl.GL_MAX_COLOR_TEXTURE_SAMPLES) #: the", "the default framebuffer self._screen = DefaultFrameBuffer(self) # Tracking active program", "\"\"\" #: The active context active: Optional[\"Context\"] = None #", "Represents an OpenGL context. This context belongs to a ``pyglet.Window``", "of the largest rectangular texture that the GL can handle", "are not lingering from other sections of your code base::", "minor) version \"\"\" return self._gl_version def gc(self): \"\"\" Run garbage", "Primitive mode LINES_ADJACENCY = gl.GL_LINES_ADJACENCY # 10 #: Primitive mode", "# type: List[str] if not source_fs: if source_geo: out_attributes =", "GL can handle self.MAX_TEXTURE_SIZE = self.get(gl.GL_MAX_TEXTURE_SIZE) #: Maximum number of", "Blend mode shortcuts #: Blend mode shortcut for default blend", "gl.GL_POINTS # 0 #: Primitive mode LINES = gl.GL_LINES #", "# Multiple flags ctx.enable(ctx.DEPTH_TEST, ctx.CULL_FACE) \"\"\" self._flags.update(args) for flag in", "vertex shader. self.MAX_VERTEX_ATTRIBS = self.get(gl.GL_MAX_VERTEX_ATTRIBS) #: Maximum supported texture image", "import Any, Dict, List, Tuple, Union, Sequence, Set import pyglet", "context active: Optional[\"Context\"] = None # --- Store the most", "buffer binding points on the context self.MAX_UNIFORM_BUFFER_BINDINGS = self.get(gl.GL_MAX_UNIFORM_BUFFER_BINDINGS) #:", "will be located there out_attributes = [] # type: List[str]", "0x800B #: Blend equations: Minimum of source and destination MIN", "shader self.MAX_FRAGMENT_INPUT_COMPONENTS = self.get( gl.GL_MAX_FRAGMENT_INPUT_COMPONENTS ) #: Maximum number of", "#: Collected objects to gc when gc_mode is \"context_gc\" self.objects", "source and destination MIN = 0x8007 #: Blend equations: Maximum", "mode (tessellation) PATCHES = gl.GL_PATCHES # The most common error", "be held in uniform variable storage for a fragment shader", "# self.MAX_VERTEX_ATTRIB_RELATIVE_OFFSET = self.get(gl.GL_MAX_VERTEX_ATTRIB_RELATIVE_OFFSET) # self.MAX_VERTEX_ATTRIB_BINDINGS = self.get(gl.GL_MAX_VERTEX_ATTRIB_BINDINGS) self.MAX_TEXTURE_IMAGE_UNITS =", "= None ) -> Framebuffer: \"\"\"Create a Framebuffer. :param List[arcade.gl.Texture]", "Single flag ctx.disable(ctx.BLEND) # Multiple flags ctx.disable(ctx.DEPTH_TEST, ctx.CULL_FACE) \"\"\" self._flags", "in the ``gl`` module. (``ctx.BLEND`` or ``arcade.gl.BLEND``). \"\"\" #: The", "(8, 16 or 32 bit unsigned integer) \"\"\" return Geometry(self,", "words self.MAX_SAMPLE_MASK_WORDS = self.get(gl.GL_MAX_SAMPLE_MASK_WORDS) #: Maximum number of texels allowed", "tuple (major, minor) version \"\"\" return self._gl_version def gc(self): \"\"\"", "def point_size(self, value: float): gl.glPointSize(self._point_size) self._point_size = value @property def", "ShaderSource(vertex_shader, gl.GL_VERTEX_SHADER) source_fs = ( ShaderSource(fragment_shader, gl.GL_FRAGMENT_SHADER) if fragment_shader else", "to access texture maps from the geometry shader self.MAX_GEOMETRY_TEXTURE_IMAGE_UNITS =", "to a ``pyglet.Window`` normally accessed through ``window.ctx``. The Context class", "function ONE_MINUS_SRC_ALPHA = 0x0303 #: Blend function DST_ALPHA = 0x0304", "number of individual 4-vectors of floating-point, integer, #: or boolean", ":param str tess_evaluation_shader: tessellation evaluation shader source (optional) :param dict", "constants # Texture #: Texture interpolation: Nearest pixel NEAREST =", "source_geo.out_attributes else: out_attributes = source_vs.out_attributes return Program( self, vertex_shader=source_vs.get_source(defines=defines), fragment_shader=source_fs.get_source(defines=defines)", "of the number of bits of subpixel resolution #: that", "Buffer usage. 'static', 'dynamic' or 'stream' :rtype: :py:class:`~arcade.gl.Buffer` \"\"\" #", "(``ctx.BLEND`` or ``arcade.gl.BLEND``). \"\"\" #: The active context active: Optional[\"Context\"]", "self.MAX_INTEGER_SAMPLES = self.get(gl.GL_MAX_INTEGER_SAMPLES) #: Maximum samples for a framebuffer self.MAX_SAMPLES", "to the same value as the viewport # to avoid", "return Buffer(self, data, reserve=reserve, usage=usage) def framebuffer( self, *, color_attachments:", "used to access texture maps from the vertex shader self.MAX_COMBINED_TEXTURE_IMAGE_UNITS", "self.get(gl.GL_MAX_VERTEX_ATTRIB_RELATIVE_OFFSET) # self.MAX_VERTEX_ATTRIB_BINDINGS = self.get(gl.GL_MAX_VERTEX_ATTRIB_BINDINGS) self.MAX_TEXTURE_IMAGE_UNITS = self.get(gl.GL_MAX_TEXTURE_IMAGE_UNITS) # TODO:", "words for vertex shader uniform variables in all uniform blocks", "\"\"\" # create_with_size return Buffer(self, data, reserve=reserve, usage=usage) def framebuffer(", "render target self.active_framebuffer: Framebuffer = self._screen self.stats: ContextStats = ContextStats(warn_threshold=1000)", ") # If we don't have a fragment shader we", "def enable(self, *args): \"\"\" Enables one or more context flags::", "-> int: \"\"\"Get or set the primitive restart index. Default", "a :py:class:`~arcade.gl.Program` given the vertex, fragment and geometry shader. :param", "viewport for the currently active framebuffer. The viewport simply describes", "the texture :param int components: Number of components (1: R,", "patch primitive. Patch primitives are consumed by the tessellation control", "Buffer(self, data, reserve=reserve, usage=usage) def framebuffer( self, *, color_attachments: Union[Texture,", "#: Blend function SRC_ALPHA = 0x0302 #: Blend function ONE_MINUS_SRC_ALPHA", "gl.glEnable(self.CULL_FACE) else: gl.glDisable(self.CULL_FACE) if self.PROGRAM_POINT_SIZE in self._flags: gl.glEnable(self.PROGRAM_POINT_SIZE) else: gl.glDisable(self.PROGRAM_POINT_SIZE)", "blend mode: ``SRC_ALPHA, ONE_MINUS_SRC_ALPHA`` BLEND_DEFAULT = 0x0302, 0x0303 #: Blend", "Primitives #: Primitive mode POINTS = gl.GL_POINTS # 0 #:", "values that can be held in uniform variable storage for", "tessellation control shader (if present) and subsequently used for tessellation.", "\"\"\" return self._screen @property def fbo(self) -> Framebuffer: \"\"\" Get", "self.get(gl.GL_MAX_FRAGMENT_UNIFORM_BLOCKS) #: Maximum number of components of inputs read by", "import c_int, c_char_p, cast, c_float from collections import deque import", "color_attachments: Union[Texture, List[Texture]] = None, depth_attachment: Texture = None )", "shader source (optional) :param str tess_evaluation_shader: tessellation evaluation shader source", "window coordinates self.SUBPIXEL_BITS = self.get(gl.GL_SUBPIXEL_BITS) #: A mask value indicating", "self.MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS = self.get( gl.GL_MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS ) #: Number of words for", "# This should always be enabled gl.glEnable(gl.GL_TEXTURE_CUBE_MAP_SEAMLESS) # Set primitive", "# def front_face(self) # def cull_face(self) @property def patch_vertices(self) ->", "held in uniform variable storage for a vertex shader self.MAX_VERTEX_UNIFORM_COMPONENTS", "args: gl.glDisable(flag) def is_enabled(self, flag) -> bool: \"\"\" Check if", "int = None, index_element_size: int = 4, ): \"\"\" Create", "functions #: Blend function ZERO = 0x0000 #: Blend function", "of 4-vectors that may be held in uniform variable storage", "str, fragment_shader: str = None, geometry_shader: str = None, tess_control_shader:", "ensure that context flag states are not lingering from other", "supported in integer format multisample buffers self.MAX_INTEGER_SAMPLES = self.get(gl.GL_MAX_INTEGER_SAMPLES) #:", "Maximum samples for a framebuffer self.MAX_SAMPLES = self.get(gl.GL_MAX_SAMPLES) #: A", "Multiple flags ctx.disable(ctx.DEPTH_TEST, ctx.CULL_FACE) \"\"\" self._flags -= set(args) for flag", "of components of outputs written by a geometry shader self.MAX_GEOMETRY_OUTPUT_COMPONENTS", "Blend equations: Minimum of source and destination MIN = 0x8007", "each component: f1, f2, f4 / i1, i2, i4 /", "the number of bits of subpixel resolution #: that are", "= self.BLEND_DEFAULT self._point_size = 1.0 self._flags: Set[int] = set() #", "culling is enabled ctx.enable_only(ctx.DEPTH_TEST, ctx.CULL_FACE) \"\"\" self._flags = set(args) if", "floating-point, integer, or boolean values that can be #: held", "(0, 0) self.program = (0, 0) self.vertex_array = (0, 0)", "the OpenGL API supported by the current context. self.MAJOR_VERSION =", "= %s]\", key, self.warn_threshold, created, freed, created - freed, )", "to. :type: ``pyglet.Window`` \"\"\" return self._window_ref() @property def screen(self) ->", "1, 2 or 4 (8, 16 or 32 bit unsigned", "self.MAX_VERTEX_UNIFORM_BLOCKS = self.get(gl.GL_MAX_VERTEX_UNIFORM_BLOCKS) # self.MAX_VERTEX_ATTRIB_RELATIVE_OFFSET = self.get(gl.GL_MAX_VERTEX_ATTRIB_RELATIVE_OFFSET) # self.MAX_VERTEX_ATTRIB_BINDINGS =", "= self.objects.pop() obj.delete() @property def gc_mode(self) -> str: \"\"\" Set", "samples supported in integer format multisample buffers self.MAX_INTEGER_SAMPLES = self.get(gl.GL_MAX_INTEGER_SAMPLES)", "= None, wrap_y: gl.GLenum = None, filter: Tuple[gl.GLenum, gl.GLenum] =", "self.get(gl.GL_MAX_VERTEX_UNIFORM_BLOCKS) # self.MAX_VERTEX_ATTRIB_RELATIVE_OFFSET = self.get(gl.GL_MAX_VERTEX_ATTRIB_RELATIVE_OFFSET) # self.MAX_VERTEX_ATTRIB_BINDINGS = self.get(gl.GL_MAX_VERTEX_ATTRIB_BINDINGS) self.MAX_TEXTURE_IMAGE_UNITS", "decr(self, key): created, freed = getattr(self, key) setattr(self, key, (created,", "@property def blend_func(self) -> Tuple[int, int]: \"\"\" Get or the", ".types import BufferDescription LOG = logging.getLogger(__name__) class Context: \"\"\" Represents", "of bits of subpixel resolution #: that are used to", "self.BLEND_DEFAULT self._point_size = 1.0 self._flags: Set[int] = set() # Normal", "integer) \"\"\" return Geometry(self, content, index_buffer=index_buffer, mode=mode, index_element_size=index_element_size) def program(", "Geometry(self, content, index_buffer=index_buffer, mode=mode, index_element_size=index_element_size) def program( self, *, vertex_shader:", "import Framebuffer, DefaultFrameBuffer from typing import Optional from .texture import", "obj = self.objects.pop() obj.delete() @property def gc_mode(self) -> str: \"\"\"", "#: that are used to position rasterized geometry in window", "= \"auto\" \"\"\" return self._gc_mode @gc_mode.setter def gc_mode(self, value: str):", "in window coordinates self.SUBPIXEL_BITS = self.get(gl.GL_SUBPIXEL_BITS) #: A mask value", "value[1]) # def blend_equation(self) # def front_face(self) # def cull_face(self)", "buffers when using dual-source blending self.MAX_DUAL_SOURCE_DRAW_BUFFERS = self.get(gl.GL_MAX_DUAL_SOURCE_DRAW_BUFFERS) #: Recommended", "context flags:: # Single flag ctx.disable(ctx.BLEND) # Multiple flags ctx.disable(ctx.DEPTH_TEST,", "to ensure that context flag states are not lingering from", "None, geometry_shader=source_geo.get_source(defines=defines) if source_geo else None, tess_control_shader=source_tc.get_source(defines=defines) if source_tc else", "source_te = ( ShaderSource(tess_evaluation_shader, gl.GL_TESS_EVALUATION_SHADER) if tess_evaluation_shader else None )", "shortcuts #: Blend mode shortcut for default blend mode: ``SRC_ALPHA,", "gl.GLenum = None, wrap_y: gl.GLenum = None, filter: Tuple[gl.GLenum, gl.GLenum]", "objects to gc when gc_mode is \"context_gc\" self.objects = deque()", "to border pixel CLAMP_TO_EDGE = gl.GL_CLAMP_TO_EDGE # Texture wrap mode:", "Texture: \"\"\"Create a 2D depth texture :param Tuple[int, int] size:", ":rtype: :py:class:`~arcade.gl.Query` \"\"\" return Query(self) class ContextStats: def __init__(self, warn_threshold=100):", "draw mode (optional) :param int mode: The default draw mode" ]
[ "British Columbia Licensed under the Apache License, Version 2.0 (the", "db.Column(db.Integer, nullable=True) number_of_students = db.Column(db.Integer, nullable=True) exam_method = db.Column(db.String(15), nullable=False)", "notes = db.Column(db.String(400), nullable=True) exam_received_date = db.Column(db.DateTime, nullable=True) session_number =", "booking_id = db.Column(db.Integer, db.ForeignKey(\"booking.booking_id\", ondelete=\"set null\"), nullable=True) exam_type_id = db.Column(db.Integer,", "Unless required by applicable law or agreed to in writing,", "by applicable law or agreed to in writing, software distributed", "Base from qsystem import db class Exam(Base): exam_id = db.Column(db.Integer,", "= db.Column(db.String(50), nullable=False) examinee_name = db.Column(db.String(50), nullable=True) expiry_date = db.Column(db.DateTime,", "nullable=False) examinee_name = db.Column(db.String(50), nullable=True) expiry_date = db.Column(db.DateTime, nullable=True) notes", "permissions and limitations under the License.''' from app.models.bookings import Base", "software distributed under the License is distributed on an \"AS", "distributed under the License is distributed on an \"AS IS\"", "Columbia Licensed under the Apache License, Version 2.0 (the \"License\");", "nullable=False) event_id = db.Column(db.String(25), nullable=False) exam_name = db.Column(db.String(50), nullable=False) examinee_name", "= db.Column(db.String(50), nullable=True) offsite_location = db.Column(db.String(50), nullable=True) booking = db.relationship(\"Booking\")", "CONDITIONS OF ANY KIND, either express or implied. See the", "= db.Column(db.Integer, primary_key=True, autoincrement=True, nullable=False) booking_id = db.Column(db.Integer, db.ForeignKey(\"booking.booking_id\", ondelete=\"set", "db.Column(db.Integer, nullable=True) exam_method = db.Column(db.String(15), nullable=False) deleted_date = db.Column(db.String(50), nullable=True)", "Version 2.0 (the \"License\"); you may not use this file", "Province of British Columbia Licensed under the Apache License, Version", "writing, software distributed under the License is distributed on an", "not use this file except in compliance with the License.", "2.0 (the \"License\"); you may not use this file except", "Apache License, Version 2.0 (the \"License\"); you may not use", "copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable", "express or implied. See the License for the specific language", "booking = db.relationship(\"Booking\") exam_type = db.relationship(\"ExamType\") office = db.relationship(\"Office\") def", "nullable=True) session_number = db.Column(db.Integer, nullable=True) number_of_students = db.Column(db.Integer, nullable=True) exam_method", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "in compliance with the License. You may obtain a copy", "db.Column(db.String(50), nullable=True) offsite_location = db.Column(db.String(50), nullable=True) booking = db.relationship(\"Booking\") exam_type", "of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law", "nullable=True) exam_received_date = db.Column(db.DateTime, nullable=True) session_number = db.Column(db.Integer, nullable=True) number_of_students", "you may not use this file except in compliance with", "db class Exam(Base): exam_id = db.Column(db.Integer, primary_key=True, autoincrement=True, nullable=False) booking_id", "is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "the License. You may obtain a copy of the License", "agreed to in writing, software distributed under the License is", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "__repr__(self): return '<Exam Name: (name={self.exam_name!r})>'.format(self=self) def __init__(self, **kwargs): super(Exam, self).__init__(**kwargs)", "db.Column(db.Integer, nullable=False, default=0) exam_returned_tracking_number = db.Column(db.String(50), nullable=True) offsite_location = db.Column(db.String(50),", "distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to", "office = db.relationship(\"Office\") def __repr__(self): return '<Exam Name: (name={self.exam_name!r})>'.format(self=self) def", "db.Column(db.String(400), nullable=True) exam_received_date = db.Column(db.DateTime, nullable=True) session_number = db.Column(db.Integer, nullable=True)", "use this file except in compliance with the License. You", "qsystem import db class Exam(Base): exam_id = db.Column(db.Integer, primary_key=True, autoincrement=True,", "the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or", "specific language governing permissions and limitations under the License.''' from", "the specific language governing permissions and limitations under the License.'''", "ANY KIND, either express or implied. See the License for", "nullable=False) exam_name = db.Column(db.String(50), nullable=False) examinee_name = db.Column(db.String(50), nullable=True) expiry_date", "exam_returned_ind = db.Column(db.Integer, nullable=False, default=0) exam_returned_tracking_number = db.Column(db.String(50), nullable=True) offsite_location", "http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in", "may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless", "obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required", "db.Column(db.String(50), nullable=True) expiry_date = db.Column(db.DateTime, nullable=True) notes = db.Column(db.String(400), nullable=True)", "= db.Column(db.Integer, nullable=True) number_of_students = db.Column(db.Integer, nullable=True) exam_method = db.Column(db.String(15),", "nullable=True) number_of_students = db.Column(db.Integer, nullable=True) exam_method = db.Column(db.String(15), nullable=False) deleted_date", "exam_method = db.Column(db.String(15), nullable=False) deleted_date = db.Column(db.String(50), nullable=True) exam_returned_ind =", "exam_returned_tracking_number = db.Column(db.String(50), nullable=True) offsite_location = db.Column(db.String(50), nullable=True) booking =", "offsite_location = db.Column(db.String(50), nullable=True) booking = db.relationship(\"Booking\") exam_type = db.relationship(\"ExamType\")", "either express or implied. See the License for the specific", "db.ForeignKey(\"booking.booking_id\", ondelete=\"set null\"), nullable=True) exam_type_id = db.Column(db.Integer, db.ForeignKey(\"examtype.exam_type_id\"), nullable=False) office_id", "limitations under the License.''' from app.models.bookings import Base from qsystem", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "db.Column(db.String(25), nullable=False) exam_name = db.Column(db.String(50), nullable=False) examinee_name = db.Column(db.String(50), nullable=True)", "under the License is distributed on an \"AS IS\" BASIS,", "\"License\"); you may not use this file except in compliance", "db.Column(db.String(15), nullable=False) deleted_date = db.Column(db.String(50), nullable=True) exam_returned_ind = db.Column(db.Integer, nullable=False,", "= db.relationship(\"Office\") def __repr__(self): return '<Exam Name: (name={self.exam_name!r})>'.format(self=self) def __init__(self,", "License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES", "Licensed under the Apache License, Version 2.0 (the \"License\"); you", "db.Column(db.DateTime, nullable=True) notes = db.Column(db.String(400), nullable=True) exam_received_date = db.Column(db.DateTime, nullable=True)", "nullable=False, default=0) exam_returned_tracking_number = db.Column(db.String(50), nullable=True) offsite_location = db.Column(db.String(50), nullable=True)", "db.relationship(\"Booking\") exam_type = db.relationship(\"ExamType\") office = db.relationship(\"Office\") def __repr__(self): return", "with the License. You may obtain a copy of the", "= db.relationship(\"ExamType\") office = db.relationship(\"Office\") def __repr__(self): return '<Exam Name:", "License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed", "= db.Column(db.Integer, nullable=False, default=0) exam_returned_tracking_number = db.Column(db.String(50), nullable=True) offsite_location =", "db.Column(db.String(50), nullable=True) exam_returned_ind = db.Column(db.Integer, nullable=False, default=0) exam_returned_tracking_number = db.Column(db.String(50),", "License for the specific language governing permissions and limitations under", "exam_name = db.Column(db.String(50), nullable=False) examinee_name = db.Column(db.String(50), nullable=True) expiry_date =", "examinee_name = db.Column(db.String(50), nullable=True) expiry_date = db.Column(db.DateTime, nullable=True) notes =", "number_of_students = db.Column(db.Integer, nullable=True) exam_method = db.Column(db.String(15), nullable=False) deleted_date =", "from qsystem import db class Exam(Base): exam_id = db.Column(db.Integer, primary_key=True,", "db.Column(db.Integer, db.ForeignKey(\"office.office_id\"), nullable=False) event_id = db.Column(db.String(25), nullable=False) exam_name = db.Column(db.String(50),", "this file except in compliance with the License. You may", "= db.Column(db.String(50), nullable=True) expiry_date = db.Column(db.DateTime, nullable=True) notes = db.Column(db.String(400),", "the License.''' from app.models.bookings import Base from qsystem import db", "(the \"License\"); you may not use this file except in", "exam_received_date = db.Column(db.DateTime, nullable=True) session_number = db.Column(db.Integer, nullable=True) number_of_students =", "nullable=True) expiry_date = db.Column(db.DateTime, nullable=True) notes = db.Column(db.String(400), nullable=True) exam_received_date", "db.Column(db.Integer, db.ForeignKey(\"examtype.exam_type_id\"), nullable=False) office_id = db.Column(db.Integer, db.ForeignKey(\"office.office_id\"), nullable=False) event_id =", "= db.Column(db.DateTime, nullable=True) notes = db.Column(db.String(400), nullable=True) exam_received_date = db.Column(db.DateTime,", "nullable=False) deleted_date = db.Column(db.String(50), nullable=True) exam_returned_ind = db.Column(db.Integer, nullable=False, default=0)", "applicable law or agreed to in writing, software distributed under", "nullable=True) notes = db.Column(db.String(400), nullable=True) exam_received_date = db.Column(db.DateTime, nullable=True) session_number", "= db.Column(db.Integer, db.ForeignKey(\"office.office_id\"), nullable=False) event_id = db.Column(db.String(25), nullable=False) exam_name =", "nullable=True) exam_method = db.Column(db.String(15), nullable=False) deleted_date = db.Column(db.String(50), nullable=True) exam_returned_ind", "the License is distributed on an \"AS IS\" BASIS, WITHOUT", "'''Copyright 2018 Province of British Columbia Licensed under the Apache", "under the License.''' from app.models.bookings import Base from qsystem import", "You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0", "app.models.bookings import Base from qsystem import db class Exam(Base): exam_id", "class Exam(Base): exam_id = db.Column(db.Integer, primary_key=True, autoincrement=True, nullable=False) booking_id =", "= db.Column(db.String(15), nullable=False) deleted_date = db.Column(db.String(50), nullable=True) exam_returned_ind = db.Column(db.Integer,", "deleted_date = db.Column(db.String(50), nullable=True) exam_returned_ind = db.Column(db.Integer, nullable=False, default=0) exam_returned_tracking_number", "from app.models.bookings import Base from qsystem import db class Exam(Base):", "db.ForeignKey(\"office.office_id\"), nullable=False) event_id = db.Column(db.String(25), nullable=False) exam_name = db.Column(db.String(50), nullable=False)", "the Apache License, Version 2.0 (the \"License\"); you may not", "file except in compliance with the License. You may obtain", "except in compliance with the License. You may obtain a", "or implied. See the License for the specific language governing", "KIND, either express or implied. See the License for the", "= db.Column(db.Integer, db.ForeignKey(\"booking.booking_id\", ondelete=\"set null\"), nullable=True) exam_type_id = db.Column(db.Integer, db.ForeignKey(\"examtype.exam_type_id\"),", "import Base from qsystem import db class Exam(Base): exam_id =", "ondelete=\"set null\"), nullable=True) exam_type_id = db.Column(db.Integer, db.ForeignKey(\"examtype.exam_type_id\"), nullable=False) office_id =", "to in writing, software distributed under the License is distributed", "db.Column(db.String(50), nullable=False) examinee_name = db.Column(db.String(50), nullable=True) expiry_date = db.Column(db.DateTime, nullable=True)", "or agreed to in writing, software distributed under the License", "governing permissions and limitations under the License.''' from app.models.bookings import", "db.Column(db.Integer, primary_key=True, autoincrement=True, nullable=False) booking_id = db.Column(db.Integer, db.ForeignKey(\"booking.booking_id\", ondelete=\"set null\"),", "law or agreed to in writing, software distributed under the", "OR CONDITIONS OF ANY KIND, either express or implied. See", "import db class Exam(Base): exam_id = db.Column(db.Integer, primary_key=True, autoincrement=True, nullable=False)", "nullable=True) booking = db.relationship(\"Booking\") exam_type = db.relationship(\"ExamType\") office = db.relationship(\"Office\")", "= db.Column(db.String(50), nullable=True) booking = db.relationship(\"Booking\") exam_type = db.relationship(\"ExamType\") office", "event_id = db.Column(db.String(25), nullable=False) exam_name = db.Column(db.String(50), nullable=False) examinee_name =", "compliance with the License. You may obtain a copy of", "nullable=True) exam_type_id = db.Column(db.Integer, db.ForeignKey(\"examtype.exam_type_id\"), nullable=False) office_id = db.Column(db.Integer, db.ForeignKey(\"office.office_id\"),", "null\"), nullable=True) exam_type_id = db.Column(db.Integer, db.ForeignKey(\"examtype.exam_type_id\"), nullable=False) office_id = db.Column(db.Integer,", "OF ANY KIND, either express or implied. See the License", "under the Apache License, Version 2.0 (the \"License\"); you may", "on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "db.ForeignKey(\"examtype.exam_type_id\"), nullable=False) office_id = db.Column(db.Integer, db.ForeignKey(\"office.office_id\"), nullable=False) event_id = db.Column(db.String(25),", "= db.Column(db.String(400), nullable=True) exam_received_date = db.Column(db.DateTime, nullable=True) session_number = db.Column(db.Integer,", "<reponame>pixelater/queue-management '''Copyright 2018 Province of British Columbia Licensed under the", "Exam(Base): exam_id = db.Column(db.Integer, primary_key=True, autoincrement=True, nullable=False) booking_id = db.Column(db.Integer,", "db.relationship(\"Office\") def __repr__(self): return '<Exam Name: (name={self.exam_name!r})>'.format(self=self) def __init__(self, **kwargs):", "nullable=False) office_id = db.Column(db.Integer, db.ForeignKey(\"office.office_id\"), nullable=False) event_id = db.Column(db.String(25), nullable=False)", "= db.Column(db.String(50), nullable=True) exam_returned_ind = db.Column(db.Integer, nullable=False, default=0) exam_returned_tracking_number =", "primary_key=True, autoincrement=True, nullable=False) booking_id = db.Column(db.Integer, db.ForeignKey(\"booking.booking_id\", ondelete=\"set null\"), nullable=True)", "= db.Column(db.DateTime, nullable=True) session_number = db.Column(db.Integer, nullable=True) number_of_students = db.Column(db.Integer,", "and limitations under the License.''' from app.models.bookings import Base from", "exam_type_id = db.Column(db.Integer, db.ForeignKey(\"examtype.exam_type_id\"), nullable=False) office_id = db.Column(db.Integer, db.ForeignKey(\"office.office_id\"), nullable=False)", "License, Version 2.0 (the \"License\"); you may not use this", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "for the specific language governing permissions and limitations under the", "See the License for the specific language governing permissions and", "session_number = db.Column(db.Integer, nullable=True) number_of_students = db.Column(db.Integer, nullable=True) exam_method =", "exam_id = db.Column(db.Integer, primary_key=True, autoincrement=True, nullable=False) booking_id = db.Column(db.Integer, db.ForeignKey(\"booking.booking_id\",", "a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by", "License.''' from app.models.bookings import Base from qsystem import db class", "default=0) exam_returned_tracking_number = db.Column(db.String(50), nullable=True) offsite_location = db.Column(db.String(50), nullable=True) booking", "= db.relationship(\"Booking\") exam_type = db.relationship(\"ExamType\") office = db.relationship(\"Office\") def __repr__(self):", "db.Column(db.DateTime, nullable=True) session_number = db.Column(db.Integer, nullable=True) number_of_students = db.Column(db.Integer, nullable=True)", "License. You may obtain a copy of the License at", "nullable=False) booking_id = db.Column(db.Integer, db.ForeignKey(\"booking.booking_id\", ondelete=\"set null\"), nullable=True) exam_type_id =", "2018 Province of British Columbia Licensed under the Apache License,", "= db.Column(db.String(25), nullable=False) exam_name = db.Column(db.String(50), nullable=False) examinee_name = db.Column(db.String(50),", "db.relationship(\"ExamType\") office = db.relationship(\"Office\") def __repr__(self): return '<Exam Name: (name={self.exam_name!r})>'.format(self=self)", "def __repr__(self): return '<Exam Name: (name={self.exam_name!r})>'.format(self=self) def __init__(self, **kwargs): super(Exam,", "db.Column(db.String(50), nullable=True) booking = db.relationship(\"Booking\") exam_type = db.relationship(\"ExamType\") office =", "the License for the specific language governing permissions and limitations", "may not use this file except in compliance with the", "nullable=True) exam_returned_ind = db.Column(db.Integer, nullable=False, default=0) exam_returned_tracking_number = db.Column(db.String(50), nullable=True)", "of British Columbia Licensed under the Apache License, Version 2.0", "in writing, software distributed under the License is distributed on", "required by applicable law or agreed to in writing, software", "implied. See the License for the specific language governing permissions", "expiry_date = db.Column(db.DateTime, nullable=True) notes = db.Column(db.String(400), nullable=True) exam_received_date =", "autoincrement=True, nullable=False) booking_id = db.Column(db.Integer, db.ForeignKey(\"booking.booking_id\", ondelete=\"set null\"), nullable=True) exam_type_id", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "nullable=True) offsite_location = db.Column(db.String(50), nullable=True) booking = db.relationship(\"Booking\") exam_type =", "an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY", "language governing permissions and limitations under the License.''' from app.models.bookings", "db.Column(db.Integer, db.ForeignKey(\"booking.booking_id\", ondelete=\"set null\"), nullable=True) exam_type_id = db.Column(db.Integer, db.ForeignKey(\"examtype.exam_type_id\"), nullable=False)", "= db.Column(db.Integer, nullable=True) exam_method = db.Column(db.String(15), nullable=False) deleted_date = db.Column(db.String(50),", "exam_type = db.relationship(\"ExamType\") office = db.relationship(\"Office\") def __repr__(self): return '<Exam", "office_id = db.Column(db.Integer, db.ForeignKey(\"office.office_id\"), nullable=False) event_id = db.Column(db.String(25), nullable=False) exam_name", "= db.Column(db.Integer, db.ForeignKey(\"examtype.exam_type_id\"), nullable=False) office_id = db.Column(db.Integer, db.ForeignKey(\"office.office_id\"), nullable=False) event_id" ]
[ "<filename>leetcode/1672 Richest Customer Wealth.py class Solution(object): def maximumWealth(self, accounts): \"\"\"", ":rtype: int \"\"\" # Runtime: 36 ms # Memory: 13.5", "\"\"\" :type accounts: List[List[int]] :rtype: int \"\"\" # Runtime: 36", "# Runtime: 36 ms # Memory: 13.5 MB return max(map(sum,", "def maximumWealth(self, accounts): \"\"\" :type accounts: List[List[int]] :rtype: int \"\"\"", "\"\"\" # Runtime: 36 ms # Memory: 13.5 MB return", "List[List[int]] :rtype: int \"\"\" # Runtime: 36 ms # Memory:", "Runtime: 36 ms # Memory: 13.5 MB return max(map(sum, accounts))", "accounts: List[List[int]] :rtype: int \"\"\" # Runtime: 36 ms #", "accounts): \"\"\" :type accounts: List[List[int]] :rtype: int \"\"\" # Runtime:", "Customer Wealth.py class Solution(object): def maximumWealth(self, accounts): \"\"\" :type accounts:", "Wealth.py class Solution(object): def maximumWealth(self, accounts): \"\"\" :type accounts: List[List[int]]", "Solution(object): def maximumWealth(self, accounts): \"\"\" :type accounts: List[List[int]] :rtype: int", ":type accounts: List[List[int]] :rtype: int \"\"\" # Runtime: 36 ms", "Richest Customer Wealth.py class Solution(object): def maximumWealth(self, accounts): \"\"\" :type", "int \"\"\" # Runtime: 36 ms # Memory: 13.5 MB", "maximumWealth(self, accounts): \"\"\" :type accounts: List[List[int]] :rtype: int \"\"\" #", "class Solution(object): def maximumWealth(self, accounts): \"\"\" :type accounts: List[List[int]] :rtype:" ]
[ "= helper(extra_attrs=['special', 'data_custom', 'fake'], **kwargs) output = secondary_helper(data, **internals) expected_output", "to be null. output = secondary_helper(None) self.assertEqual(output, \"\") def test_format(self):", "data-value=\"1\" data-xeditable=\"xeditable\"> ExampleModel 1 </a> \"\"\" self.assertHTMLEqual(output, expected_output) # Verify", "= helper(instance) self.assertEqual(output, '<a href=\"#1\">ExampleModel 1</a>') # Verify text override", "custom values output = helper(\"True-ish value\", true_value=\"Yes\", false_value=\"No\") self.assertEqual(output, 'Yes')", "= secondary_helper(instance) self.assertEqual(output, '<a href=\"#1\">RelatedModel object</a>') # Verify ``key`` access", "helper normally expects in a callback context internals = {'field_name':", "-*- encoding: utf-8 -*- from datetime import datetime from functools", "self.assertEqual(output, 'Yes') output = helper(\"\", true_value=\"Yes\", false_value=\"No\") self.assertEqual(output, 'No') def", "with provision of a value data = ExampleModel.objects.get(pk=1) # This", "name') with self.assertRaises(AttributeError) as cm: output = secondary_helper(data) self.assertEqual(str(cm.exception), \"'ExampleModel'", "link_to_model works. \"\"\" helper = helpers.link_to_model # Verify that a", "data\")) def test_itemgetter(self): \"\"\" Verifies that itemgetter works. \"\"\" helper", "secondary_helper(data) self.assertEqual(output, (data, None)) # Verify ``arg`` argument secondary_helper =", "call with self.assertRaises(ValueError) as cm: tertiary_helper(data, **internals) self.assertEqual(str(cm.exception), \"'make_xeditable' cannot", "in anticipated to appear in rendered string 'special': \"SPECIAL DATA\",", "test_attrgetter(self): \"\"\" Verifies that attrgetter works. \"\"\" helper = helpers.attrgetter", "stuff not in anticipated to appear in rendered string 'special':", "that format works. \"\"\" helper = helpers.format # Verify simple", "override output = helper(instance, text=\"Special text\") self.assertEqual(output, '<a href=\"#1\">Special text</a>')", "determine a value for 'url'.\") # Verify kwargs accumulate kwargs1", "ellipsis works for strings data = str(range(10)) secondary_helper = helper(slice(0,", "'&#10004;') output = helper(\"\") self.assertEqual(output, '&#10008;') # Verify custom values", "self.assertEqual(output, \"{0:,}\".format(data)) # Verify ``cast`` argument data = \"1234.56789\" secondary_helper", "self.assertEqual(output, '<a href=\"#1\">RelatedModel object</a>') # Verify ``key`` access version of", "'source': \"SOURCE DATA\", 'title': \"TITLE DATA\", 'placeholder': \"PLACEHOLDER DATA\", #", "to a related field instance = ExampleModel.objects.get(pk=2) secondary_helper = helper(key=lambda", "self.assertEqual(output, '&#10008;') # Verify custom values output = helper(\"True-ish value\",", "{ 'other_arg': True } secondary_helper = helper(**kwargs1) expected_kwargs = dict(kwargs1,", "secondary_helper(data) self.assertEqual(output, data[-1]) # Verify slicing access secondary_helper = helper(slice(1,", "self.assertEqual(output, '<a href=\"#1\">Special text</a>') def test_make_boolean_checkmark(self): \"\"\" Verifies that make_boolean_checkmark", "self.assertEqual(secondary_helper.keywords, expected_kwargs) tertiary_helper = secondary_helper(**kwargs2) expected_kwargs = dict(kwargs1, **dict(kwargs2, extra_attrs=[]))", "\"Arg data\")) def test_itemgetter(self): \"\"\" Verifies that itemgetter works. \"\"\"", "be customized secondary_helper = helper(slice(0, 5), ellipsis=\"custom\") output = secondary_helper(data)", "secondary_helper = helper(slice(1, 3)) output = secondary_helper(data) self.assertEqual(output, data[1:3]) #", "helper(**kwargs1) expected_kwargs = dict(kwargs1, extra_attrs=[]) self.assertEqual(secondary_helper.keywords, expected_kwargs) tertiary_helper = secondary_helper(**kwargs2)", "= helpers.format_date # Verify simple use data = datetime.now() secondary_helper", "to appear in HTML as # \"data-*\" attributes. secondary_helper =", "strings data = str(range(10)) secondary_helper = helper(slice(0, 5), ellipsis=True) output", "True } secondary_helper = helper(**kwargs1) expected_kwargs = dict(kwargs1, extra_attrs=[]) self.assertEqual(secondary_helper.keywords,", "# Verify ``key`` access to transition an instance to a", "with self.assertRaises(AttributeError) as cm: output = secondary_helper(data) self.assertEqual(str(cm.exception), \"'ExampleModel' object", "callback context internals = {'field_name': 'name'} # Verify chain calls", "Extra stuff not in anticipated to appear in rendered string", "models.DateTimeField that are allowed # to be null. output =", "a value for 'url'.\") # Verify kwargs accumulate kwargs1 =", "helper = helpers.itemgetter # Verify simple index access data =", "# Verify text override output = helper(instance, text=\"Special text\") self.assertEqual(output,", "a model without get_absolute_url() raises a complaint related = RelatedM2MModel.objects.get(pk=1)", "as cm: helper(related) self.assertEqual(str(cm.exception), \"'RelatedM2MModel' object has no attribute 'get_absolute_url'\")", "list(range(5)) secondary_helper = helper(-1) output = secondary_helper(data) self.assertEqual(output, data[-1]) #", "\"view\" kwarg means the url is required from the call", "import helpers import six from .testcase import DatatableViewTestCase from .test_app.models", "secondary_helper(instance, text=\"Special text\") self.assertEqual(output, '<a href=\"#1\">Special text</a>') def test_make_boolean_checkmark(self): \"\"\"", "import DatatableViewTestCase from .test_app.models import ExampleModel, RelatedM2MModel if get_version().split('.') <", "helpers.through_filter target_function = lambda data, arg=None: (data, arg) # Verify", "= helpers.attrgetter # Verify simple attr lookup data = ExampleModel.objects.get(pk=1)", "data = datetime.now() secondary_helper = helper(\"%m/%d/%Y\") output = secondary_helper(data) self.assertEqual(output,", "None)) # Verify ``arg`` argument secondary_helper = helper(target_function, arg=\"Arg data\")", "helper(**kwargs) output = secondary_helper(data, **internals) expected_output = \"\"\" <a href=\"#\"", "def test_make_xeditable(self): \"\"\" Verifies that make_xeditable works. \"\"\" helper =", "ExampleModel.objects.get(pk=1) kwargs = { 'pk': \"PK DATA\", 'type': \"TYPE DATA\",", "object</a>') # Verify ``key`` access version of custom text output", "types data = range(10) output = secondary_helper(data) self.assertEqual(output, data[:5]) def", "Verify simple use data = datetime.now() secondary_helper = helper(\"%m/%d/%Y\") output", "DATA\" data-title=\"TITLE DATA\" data-type=\"TYPE DATA\" data-url=\"URL DATA\" data-value=\"1\" data-special=\"SPECIAL DATA\"", "data-source=\"SOURCE DATA\" data-title=\"TITLE DATA\" data-type=\"TYPE DATA\" data-url=\"URL DATA\" data-value=\"1\" data-xeditable=\"xeditable\">", "simple use data = datetime.now() secondary_helper = helper(\"%m/%d/%Y\") output =", "secondary_helper = helper(\"%m/%d/%Y\") output = secondary_helper(data) self.assertEqual(output, data.strftime(\"%m/%d/%Y\")) # Verify", "Verify ``cast`` argument data = \"1234.56789\" secondary_helper = helper(\"{0:.2f}\", cast=float)", "helper(instance, text=\"Special text\") self.assertEqual(output, '<a href=\"#1\">Special text</a>') # Verify ``key``", "required from the call with self.assertRaises(ValueError) as cm: tertiary_helper(data, **internals)", "DATA\", 'source': \"SOURCE DATA\", 'title': \"TITLE DATA\", 'placeholder': \"PLACEHOLDER DATA\",", "# Verify ellipsis does nothing for non-string data types data", "= [test_data_fixture] def test_link_to_model(self): \"\"\" Verifies that link_to_model works. \"\"\"", "data = range(10) output = secondary_helper(data) self.assertEqual(output, data[:5]) def test_attrgetter(self):", "simple use data = 1234567890 secondary_helper = helper(\"{0:,}\") output =", "Verify that no \"view\" kwarg means the url is required", "\"\"\" helper = helpers.itemgetter # Verify simple index access data", "'other_arg': True } secondary_helper = helper(**kwargs1) expected_kwargs = dict(kwargs1, extra_attrs=[])", "\"SOURCE DATA\", 'title': \"TITLE DATA\", 'placeholder': \"PLACEHOLDER DATA\", # Extra", "an instance to a related field instance = ExampleModel.objects.get(pk=2) secondary_helper", "up as attributes data = ExampleModel.objects.get(pk=1) kwargs = { 'pk':", "helper(-1) output = secondary_helper(data) self.assertEqual(output, data[-1]) # Verify slicing access", "complaint. # This helps promise that the helper won't blow", "extra_attrs=[]) self.assertEqual(secondary_helper.keywords, expected_kwargs) tertiary_helper = secondary_helper(**kwargs2) expected_kwargs = dict(kwargs1, **dict(kwargs2,", "functools import partial from django import get_version from datatableview import", "'placeholder': \"PLACEHOLDER DATA\", # Extra stuff not in anticipated to", "does nothing for non-string data types data = range(10) output", "1234567890 secondary_helper = helper(\"{0:,}\") output = secondary_helper(data) self.assertEqual(output, \"{0:,}\".format(data)) #", "make_xeditable works. \"\"\" helper = helpers.make_xeditable # Items that the", "\"TYPE DATA\", 'url': \"URL DATA\", 'source': \"SOURCE DATA\", 'title': \"TITLE", "expected_kwargs = dict(kwargs1, extra_attrs=[]) self.assertEqual(secondary_helper.keywords, expected_kwargs) tertiary_helper = secondary_helper(**kwargs2) expected_kwargs", "= secondary_helper() self.assertEqual(type(secondary_helper), partial) self.assertEqual(type(tertiary_helper), partial) # Verify chain ends", "Verify chain calls don't trigger rendering secondary_helper = helper() tertiary_helper", "calls don't trigger rendering secondary_helper = helper() tertiary_helper = secondary_helper()", "= secondary_helper(**kwargs2) expected_kwargs = dict(kwargs1, **dict(kwargs2, extra_attrs=[])) self.assertEqual(tertiary_helper.keywords, expected_kwargs) #", "self.assertEqual(output, data[1:3]) # Verify ellipsis works for strings data =", "DATA\", # Extra stuff not in anticipated to appear in", "end up as attributes data = ExampleModel.objects.get(pk=1) kwargs = {", "objects get swallowed without complaint. # This helps promise that", "= helper(**kwargs) output = secondary_helper(data, **internals) expected_output = \"\"\" <a", "django import get_version from datatableview import helpers import six from", "= secondary_helper(data) self.assertEqual(output, data[-1]) # Verify slicing access secondary_helper =", "as # \"data-*\" attributes. secondary_helper = helper(extra_attrs=['special', 'data_custom', 'fake'], **kwargs)", "use data = \"Data string\" secondary_helper = helper(target_function) output =", "<a href=\"#\" data-name=\"name\" data-pk=\"PK DATA\" data-placeholder=\"PLACEHOLDER DATA\" data-source=\"SOURCE DATA\" data-title=\"TITLE", "'data_custom', 'fake'], **kwargs) output = secondary_helper(data, **internals) expected_output = \"\"\"", "text=\"Special text\") self.assertEqual(output, '<a href=\"#1\">Special text</a>') def test_make_boolean_checkmark(self): \"\"\" Verifies", "text override output = helper(instance, text=\"Special text\") self.assertEqual(output, '<a href=\"#1\">Special", "helper = helpers.format # Verify simple use data = 1234567890", "allowed # to be null. output = secondary_helper(None) self.assertEqual(output, \"\")", "= secondary_helper(data, **internals) expected_output = \"\"\" <a href=\"#\" data-name=\"name\" data-pk=\"PK", "output = secondary_helper(data) self.assertEqual(output, (data, None)) # Verify ``arg`` argument", "rendering secondary_helper = helper() tertiary_helper = secondary_helper() self.assertEqual(type(secondary_helper), partial) self.assertEqual(type(tertiary_helper),", "= helpers.format # Verify simple use data = 1234567890 secondary_helper", "of a value data = ExampleModel.objects.get(pk=1) # This needs a", "'<a href=\"#1\">Special text</a>') # Verify ``key`` access to transition an", "'url': \"URL DATA\", 'source': \"SOURCE DATA\", 'title': \"TITLE DATA\", 'placeholder':", "'No') def test_format_date(self): \"\"\" Verifies that format_date works. \"\"\" helper", "customized secondary_helper = helper(slice(0, 5), ellipsis=\"custom\") output = secondary_helper(data) self.assertEqual(output,", "bad attribrute lookup data = ExampleModel.objects.get(pk=1) secondary_helper = helper('bad field", "\"DATA-CUSTOM DATA\", } secondary_helper = helper(**kwargs) output = secondary_helper(data, **internals)", "self.assertEqual(output, 'No') def test_format_date(self): \"\"\" Verifies that format_date works. \"\"\"", "= ExampleModel.objects.get(pk=1) # This needs a \"url\" arg because we", "dict(kwargs1, extra_attrs=[]) self.assertEqual(secondary_helper.keywords, expected_kwargs) tertiary_helper = secondary_helper(**kwargs2) expected_kwargs = dict(kwargs1,", "= ExampleModel.objects.get(pk=2) secondary_helper = helper(key=lambda o: o.related) output = secondary_helper(instance)", "``cast`` argument data = \"1234.56789\" secondary_helper = helper(\"{0:.2f}\", cast=float) output", "test_through_filter(self): \"\"\" Verifies that through_filter works. \"\"\" helper = helpers.through_filter", "data = str(range(10)) secondary_helper = helper(slice(0, 5), ellipsis=True) output =", "def test_through_filter(self): \"\"\" Verifies that through_filter works. \"\"\" helper =", "from functools import partial from django import get_version from datatableview", "\"\"\" helper = helpers.format_date # Verify simple use data =", "``key`` access to transition an instance to a related field", "HTML as # \"data-*\" attributes. secondary_helper = helper(extra_attrs=['special', 'data_custom', 'fake'],", "data-name=\"name\" data-pk=\"PK DATA\" data-placeholder=\"PLACEHOLDER DATA\" data-source=\"SOURCE DATA\" data-title=\"TITLE DATA\" data-type=\"TYPE", "field instance = ExampleModel.objects.get(pk=2) secondary_helper = helper(key=lambda o: o.related) output", "explicit additions via ``extra_attrs`` allows kwargs to appear in HTML", "text\") self.assertEqual(output, '<a href=\"#1\">Special text</a>') def test_make_boolean_checkmark(self): \"\"\" Verifies that", "= secondary_helper(data) self.assertEqual(output, data.strftime(\"%m/%d/%Y\")) # Verify that None objects get", "field name') with self.assertRaises(AttributeError) as cm: output = secondary_helper(data) self.assertEqual(str(cm.exception),", "from .testcase import DatatableViewTestCase from .test_app.models import ExampleModel, RelatedM2MModel if", "lookup data = ExampleModel.objects.get(pk=1) secondary_helper = helper('pk') output = secondary_helper(data)", "data.pk) # Verify bad attribrute lookup data = ExampleModel.objects.get(pk=1) secondary_helper", "DATA\", 'url': \"URL DATA\", 'source': \"SOURCE DATA\", 'title': \"TITLE DATA\",", "helper(\"%m/%d/%Y\") output = secondary_helper(data) self.assertEqual(output, data.strftime(\"%m/%d/%Y\")) # Verify that None", "= range(10) output = secondary_helper(data) self.assertEqual(output, data[:5]) def test_attrgetter(self): \"\"\"", "str(range(10)) secondary_helper = helper(slice(0, 5), ellipsis=True) output = secondary_helper(data) self.assertEqual(output,", "datatableview import helpers import six from .testcase import DatatableViewTestCase from", "DATA\" data-value=\"1\" data-special=\"SPECIAL DATA\" data-custom=\"DATA-CUSTOM DATA\" data-xeditable=\"xeditable\"> ExampleModel 1 </a>", "Verify simple use output = helper(\"True-ish value\") self.assertEqual(output, '&#10004;') output", "rendered string 'special': \"SPECIAL DATA\", 'data_custom': \"DATA-CUSTOM DATA\", } secondary_helper", "# Extra stuff not in anticipated to appear in rendered", "= ExampleModel.objects.get(pk=1) secondary_helper = helper('bad field name') with self.assertRaises(AttributeError) as", "= helpers.link_to_model # Verify that a model without get_absolute_url() raises", "ellipsis does nothing for non-string data types data = range(10)", "data-xeditable=\"xeditable\"> ExampleModel 1 </a> \"\"\" self.assertHTMLEqual(output, expected_output) # Verify that", "\"\"\" helper = helpers.link_to_model # Verify that a model without", "secondary_helper = helper(**kwargs1) expected_kwargs = dict(kwargs1, extra_attrs=[]) self.assertEqual(secondary_helper.keywords, expected_kwargs) tertiary_helper", "self.assertEqual(str(cm.exception), \"'RelatedM2MModel' object has no attribute 'get_absolute_url'\") # Verify simple", "Verifies that attrgetter works. \"\"\" helper = helpers.attrgetter # Verify", "ellipsis=\"custom\") output = secondary_helper(data) self.assertEqual(output, data[:5] + \"custom\") # Verify", "related field instance = ExampleModel.objects.get(pk=2) secondary_helper = helper(key=lambda o: o.related)", "attribrute lookup data = ExampleModel.objects.get(pk=1) secondary_helper = helper('bad field name')", "six.string_types)) # Verify that no \"view\" kwarg means the url", "secondary_helper = helper(target_function, arg=\"Arg data\") output = secondary_helper(data) self.assertEqual(output, (data,", "expected_output = \"\"\" <a href=\"#\" data-name=\"name\" data-pk=\"PK DATA\" data-placeholder=\"PLACEHOLDER DATA\"", "DATA\", 'title': \"TITLE DATA\", 'placeholder': \"PLACEHOLDER DATA\", # Extra stuff", "secondary_helper(data) self.assertEqual(output, (data, \"Arg data\")) def test_itemgetter(self): \"\"\" Verifies that", "(data, arg) # Verify simple use data = \"Data string\"", "ExampleModel.objects.get(pk=1) secondary_helper = helper('pk') output = secondary_helper(data) self.assertEqual(output, data.pk) #", "= tertiary_helper(data, url=\"/\", **internals) self.assertTrue(isinstance(output, six.string_types)) # Verify that no", "= secondary_helper(data) self.assertEqual(output, \"{0:,}\".format(data)) # Verify ``cast`` argument data =", "= secondary_helper(data) self.assertEqual(output, data.pk) # Verify bad attribrute lookup data", "= helper(\"\") self.assertEqual(output, '&#10008;') # Verify custom values output =", "that are allowed # to be null. output = secondary_helper(None)", "helper(target_function, arg=\"Arg data\") output = secondary_helper(data) self.assertEqual(output, (data, \"Arg data\"))", "'name'} # Verify chain calls don't trigger rendering secondary_helper =", "data = ExampleModel.objects.get(pk=1) # This needs a \"url\" arg because", "DATA\" data-title=\"TITLE DATA\" data-type=\"TYPE DATA\" data-url=\"URL DATA\" data-value=\"1\" data-xeditable=\"xeditable\"> ExampleModel", "works. \"\"\" helper = helpers.format_date # Verify simple use data", "arg=None: (data, arg) # Verify simple use data = \"Data", "partial) # Verify chain ends with provision of a value", "= dict(kwargs1, **dict(kwargs2, extra_attrs=[])) self.assertEqual(tertiary_helper.keywords, expected_kwargs) # Verify default kwarg", "href=\"#1\">RelatedModel object</a>') # Verify ``key`` access version of custom text", "raises a complaint related = RelatedM2MModel.objects.get(pk=1) with self.assertRaises(AttributeError) as cm:", "DATA\" data-value=\"1\" data-xeditable=\"xeditable\"> ExampleModel 1 </a> \"\"\" self.assertHTMLEqual(output, expected_output) #", "means the url is required from the call with self.assertRaises(ValueError)", "Verify custom values output = helper(\"True-ish value\", true_value=\"Yes\", false_value=\"No\") self.assertEqual(output,", "} secondary_helper = helper(**kwargs1) expected_kwargs = dict(kwargs1, extra_attrs=[]) self.assertEqual(secondary_helper.keywords, expected_kwargs)", "helpers import six from .testcase import DatatableViewTestCase from .test_app.models import", "cm: helper(related) self.assertEqual(str(cm.exception), \"'RelatedM2MModel' object has no attribute 'get_absolute_url'\") #", "**kwargs) output = secondary_helper(data, **internals) expected_output = \"\"\" <a href=\"#\"", "output = helper(\"True-ish value\", true_value=\"Yes\", false_value=\"No\") self.assertEqual(output, 'Yes') output =", "= secondary_helper(data) self.assertEqual(output, (data, None)) # Verify ``arg`` argument secondary_helper", "test_itemgetter(self): \"\"\" Verifies that itemgetter works. \"\"\" helper = helpers.itemgetter", "'title': \"TITLE DATA\", 'placeholder': \"PLACEHOLDER DATA\", # Extra stuff not", "ellipsis=True) output = secondary_helper(data) self.assertEqual(output, data[:5] + \"...\") # Verify", "datetime import datetime from functools import partial from django import", "won't blow up for models.DateTimeField that are allowed # to", "self.assertHTMLEqual(output, expected_output) # Verify that explicit additions via ``extra_attrs`` allows", "that attrgetter works. \"\"\" helper = helpers.attrgetter # Verify simple", "helper = helpers.through_filter target_function = lambda data, arg=None: (data, arg)", "\"custom\") # Verify ellipsis does nothing for non-string data types", "= ExampleModel.objects.get(pk=1) kwargs = { 'pk': \"PK DATA\", 'type': \"TYPE", "swallowed without complaint. # This helps promise that the helper", "false_value=\"No\") self.assertEqual(output, 'Yes') output = helper(\"\", true_value=\"Yes\", false_value=\"No\") self.assertEqual(output, 'No')", "as cm: output = secondary_helper(data) self.assertEqual(str(cm.exception), \"'ExampleModel' object has no", "the helper normally expects in a callback context internals =", "self.assertEqual(output, '<a href=\"#1\">Special text</a>') # Verify ``key`` access to transition", "helper(key=lambda o: o.related) output = secondary_helper(instance) self.assertEqual(output, '<a href=\"#1\">RelatedModel object</a>')", "helper = helpers.link_to_model # Verify that a model without get_absolute_url()", "helper won't blow up for models.DateTimeField that are allowed #", "= \"\"\" <a href=\"#\" data-name=\"name\" data-pk=\"PK DATA\" data-placeholder=\"PLACEHOLDER DATA\" data-source=\"SOURCE", "# Verify simple use instance = ExampleModel.objects.get(pk=1) output = helper(instance)", "expects in a callback context internals = {'field_name': 'name'} #", "\"\"\" <a href=\"#\" data-name=\"name\" data-pk=\"PK DATA\" data-placeholder=\"PLACEHOLDER DATA\" data-source=\"SOURCE DATA\"", "= secondary_helper(data) self.assertEqual(output, data[:5]) def test_attrgetter(self): \"\"\" Verifies that attrgetter", "Verify slicing access secondary_helper = helper(slice(1, 3)) output = secondary_helper(data)", "data[:5]) def test_attrgetter(self): \"\"\" Verifies that attrgetter works. \"\"\" helper", "= secondary_helper(None) self.assertEqual(output, \"\") def test_format(self): \"\"\" Verifies that format", "secondary_helper(data) self.assertEqual(output, data[:5] + \"custom\") # Verify ellipsis does nothing", "helper(\"\", true_value=\"Yes\", false_value=\"No\") self.assertEqual(output, 'No') def test_format_date(self): \"\"\" Verifies that", "output = secondary_helper(data) self.assertEqual(output, data[-1]) # Verify slicing access secondary_helper", "HelpersTests(DatatableViewTestCase): fixtures = [test_data_fixture] def test_link_to_model(self): \"\"\" Verifies that link_to_model", "'Yes') output = helper(\"\", true_value=\"Yes\", false_value=\"No\") self.assertEqual(output, 'No') def test_format_date(self):", "get_absolute_url() raises a complaint related = RelatedM2MModel.objects.get(pk=1) with self.assertRaises(AttributeError) as", "# This needs a \"url\" arg because we want to", "output = helper(\"True-ish value\") self.assertEqual(output, '&#10004;') output = helper(\"\") self.assertEqual(output,", "output = secondary_helper(data) self.assertEqual(output, data[:5]) def test_attrgetter(self): \"\"\" Verifies that", "# Verify that None objects get swallowed without complaint. #", "data-value=\"1\" data-special=\"SPECIAL DATA\" data-custom=\"DATA-CUSTOM DATA\" data-xeditable=\"xeditable\"> ExampleModel 1 </a> \"\"\"", "datetime.now() secondary_helper = helper(\"%m/%d/%Y\") output = secondary_helper(data) self.assertEqual(output, data.strftime(\"%m/%d/%Y\")) #", "successful use output = tertiary_helper(data, url=\"/\", **internals) self.assertTrue(isinstance(output, six.string_types)) #", "href=\"#1\">ExampleModel 1</a>') # Verify text override output = helper(instance, text=\"Special", "with self.assertRaises(AttributeError) as cm: helper(related) self.assertEqual(str(cm.exception), \"'RelatedM2MModel' object has no", "import ExampleModel, RelatedM2MModel if get_version().split('.') < ['1', '7']: test_data_fixture =", ".testcase import DatatableViewTestCase from .test_app.models import ExampleModel, RelatedM2MModel if get_version().split('.')", "tertiary_helper(data, **internals) self.assertEqual(str(cm.exception), \"'make_xeditable' cannot determine a value for 'url'.\")", "from datetime import datetime from functools import partial from django", "5), ellipsis=True) output = secondary_helper(data) self.assertEqual(output, data[:5] + \"...\") #", "self.assertRaises(AttributeError) as cm: helper(related) self.assertEqual(str(cm.exception), \"'RelatedM2MModel' object has no attribute", "kwargs accumulate kwargs1 = { 'type': 'textarea' } kwargs2 =", "that through_filter works. \"\"\" helper = helpers.through_filter target_function = lambda", "{'field_name': 'name'} # Verify chain calls don't trigger rendering secondary_helper", "= secondary_helper(data) self.assertEqual(output, (data, \"Arg data\")) def test_itemgetter(self): \"\"\" Verifies", "< ['1', '7']: test_data_fixture = 'test_data_legacy.json' else: test_data_fixture = 'test_data.json'", "that no \"view\" kwarg means the url is required from", "``extra_attrs`` allows kwargs to appear in HTML as # \"data-*\"", "DATA\", } secondary_helper = helper(**kwargs) output = secondary_helper(data, **internals) expected_output", "helper(\"{0:.2f}\", cast=float) output = secondary_helper(data) self.assertEqual(output, \"{0:.2f}\".format(float(data))) def test_through_filter(self): \"\"\"", "self.assertEqual(output, data[:5]) def test_attrgetter(self): \"\"\" Verifies that attrgetter works. \"\"\"", "helps promise that the helper won't blow up for models.DateTimeField", "# Verify chain ends with provision of a value data", "output = secondary_helper(data, **internals) expected_output = \"\"\" <a href=\"#\" data-name=\"name\"", "cannot determine a value for 'url'.\") # Verify kwargs accumulate", "</a> \"\"\" self.assertHTMLEqual(output, expected_output) # Verify that explicit additions via", "data-url=\"URL DATA\" data-value=\"1\" data-special=\"SPECIAL DATA\" data-custom=\"DATA-CUSTOM DATA\" data-xeditable=\"xeditable\"> ExampleModel 1", "secondary_helper(**kwargs2) expected_kwargs = dict(kwargs1, **dict(kwargs2, extra_attrs=[])) self.assertEqual(tertiary_helper.keywords, expected_kwargs) # Verify", "5), ellipsis=\"custom\") output = secondary_helper(data) self.assertEqual(output, data[:5] + \"custom\") #", "a related field instance = ExampleModel.objects.get(pk=2) secondary_helper = helper(key=lambda o:", "# Verify ellipsis works for strings data = str(range(10)) secondary_helper", "is required from the call with self.assertRaises(ValueError) as cm: tertiary_helper(data,", "secondary_helper = helper(key=lambda o: o.related) output = secondary_helper(instance) self.assertEqual(output, '<a", "\"{0:.2f}\".format(float(data))) def test_through_filter(self): \"\"\" Verifies that through_filter works. \"\"\" helper", "needs a \"url\" arg because we want to test successful", "# Verify ``cast`` argument data = \"1234.56789\" secondary_helper = helper(\"{0:.2f}\",", "Items that the helper normally expects in a callback context", "false_value=\"No\") self.assertEqual(output, 'No') def test_format_date(self): \"\"\" Verifies that format_date works.", "has no attribute 'bad field name'\") def test_make_xeditable(self): \"\"\" Verifies", "output = helper(instance) self.assertEqual(output, '<a href=\"#1\">ExampleModel 1</a>') # Verify text", "data = \"Data string\" secondary_helper = helper(target_function) output = secondary_helper(data)", "ExampleModel.objects.get(pk=1) # This needs a \"url\" arg because we want", "output = secondary_helper(data) self.assertEqual(output, data[1:3]) # Verify ellipsis works for", "\"{0:,}\".format(data)) # Verify ``cast`` argument data = \"1234.56789\" secondary_helper =", "= 'test_data.json' class HelpersTests(DatatableViewTestCase): fixtures = [test_data_fixture] def test_link_to_model(self): \"\"\"", "= { 'pk': \"PK DATA\", 'type': \"TYPE DATA\", 'url': \"URL", "cm: tertiary_helper(data, **internals) self.assertEqual(str(cm.exception), \"'make_xeditable' cannot determine a value for", "helper(\"True-ish value\", true_value=\"Yes\", false_value=\"No\") self.assertEqual(output, 'Yes') output = helper(\"\", true_value=\"Yes\",", "# Verify that a model without get_absolute_url() raises a complaint", "'test_data_legacy.json' else: test_data_fixture = 'test_data.json' class HelpersTests(DatatableViewTestCase): fixtures = [test_data_fixture]", "works. \"\"\" helper = helpers.through_filter target_function = lambda data, arg=None:", "Verifies that through_filter works. \"\"\" helper = helpers.through_filter target_function =", "data = ExampleModel.objects.get(pk=1) secondary_helper = helper('pk') output = secondary_helper(data) self.assertEqual(output,", "appear in HTML as # \"data-*\" attributes. secondary_helper = helper(extra_attrs=['special',", "'test_data.json' class HelpersTests(DatatableViewTestCase): fixtures = [test_data_fixture] def test_link_to_model(self): \"\"\" Verifies", "self.assertEqual(tertiary_helper.keywords, expected_kwargs) # Verify default kwarg names end up as", "data = \"1234.56789\" secondary_helper = helper(\"{0:.2f}\", cast=float) output = secondary_helper(data)", "{ 'pk': \"PK DATA\", 'type': \"TYPE DATA\", 'url': \"URL DATA\",", "secondary_helper = helper(target_function) output = secondary_helper(data) self.assertEqual(output, (data, None)) #", "output = secondary_helper(instance, text=\"Special text\") self.assertEqual(output, '<a href=\"#1\">Special text</a>') def", "argument secondary_helper = helper(target_function, arg=\"Arg data\") output = secondary_helper(data) self.assertEqual(output,", "attributes. secondary_helper = helper(extra_attrs=['special', 'data_custom', 'fake'], **kwargs) output = secondary_helper(data,", "output = secondary_helper(instance) self.assertEqual(output, '<a href=\"#1\">RelatedModel object</a>') # Verify ``key``", "access to transition an instance to a related field instance", "helper(\"True-ish value\") self.assertEqual(output, '&#10004;') output = helper(\"\") self.assertEqual(output, '&#10008;') #", "3)) output = secondary_helper(data) self.assertEqual(output, data[1:3]) # Verify ellipsis works", "= helper('bad field name') with self.assertRaises(AttributeError) as cm: output =", "o.related) output = secondary_helper(instance) self.assertEqual(output, '<a href=\"#1\">RelatedModel object</a>') # Verify", "self.assertEqual(str(cm.exception), \"'ExampleModel' object has no attribute 'bad field name'\") def", "= helper() tertiary_helper = secondary_helper() self.assertEqual(type(secondary_helper), partial) self.assertEqual(type(tertiary_helper), partial) #", "helper('pk') output = secondary_helper(data) self.assertEqual(output, data.pk) # Verify bad attribrute", "for strings data = str(range(10)) secondary_helper = helper(slice(0, 5), ellipsis=True)", "1</a>') # Verify text override output = helper(instance, text=\"Special text\")", "helper(slice(0, 5), ellipsis=True) output = secondary_helper(data) self.assertEqual(output, data[:5] + \"...\")", "data-url=\"URL DATA\" data-value=\"1\" data-xeditable=\"xeditable\"> ExampleModel 1 </a> \"\"\" self.assertHTMLEqual(output, expected_output)", "\"\"\" Verifies that link_to_model works. \"\"\" helper = helpers.link_to_model #", "\"TITLE DATA\", 'placeholder': \"PLACEHOLDER DATA\", # Extra stuff not in", "data = ExampleModel.objects.get(pk=1) secondary_helper = helper('bad field name') with self.assertRaises(AttributeError)", "\"Data string\" secondary_helper = helper(target_function) output = secondary_helper(data) self.assertEqual(output, (data,", "# Verify ellipsis can be customized secondary_helper = helper(slice(0, 5),", "instance to a related field instance = ExampleModel.objects.get(pk=2) secondary_helper =", "= helper(target_function) output = secondary_helper(data) self.assertEqual(output, (data, None)) # Verify", "``arg`` argument secondary_helper = helper(target_function, arg=\"Arg data\") output = secondary_helper(data)", "ends with provision of a value data = ExampleModel.objects.get(pk=1) #", "self.assertEqual(output, data[-1]) # Verify slicing access secondary_helper = helper(slice(1, 3))", "Verify ellipsis works for strings data = str(range(10)) secondary_helper =", "= secondary_helper(data) self.assertEqual(output, data[:5] + \"custom\") # Verify ellipsis does", "# Verify chain calls don't trigger rendering secondary_helper = helper()", "expected_kwargs) # Verify default kwarg names end up as attributes", "that explicit additions via ``extra_attrs`` allows kwargs to appear in", "the call with self.assertRaises(ValueError) as cm: tertiary_helper(data, **internals) self.assertEqual(str(cm.exception), \"'make_xeditable'", "secondary_helper(data) self.assertEqual(output, data[1:3]) # Verify ellipsis works for strings data", "simple use output = helper(\"True-ish value\") self.assertEqual(output, '&#10004;') output =", "= helper(**kwargs1) expected_kwargs = dict(kwargs1, extra_attrs=[]) self.assertEqual(secondary_helper.keywords, expected_kwargs) tertiary_helper =", "through_filter works. \"\"\" helper = helpers.through_filter target_function = lambda data,", "{ 'type': 'textarea' } kwargs2 = { 'other_arg': True }", "= secondary_helper(instance, text=\"Special text\") self.assertEqual(output, '<a href=\"#1\">Special text</a>') def test_make_boolean_checkmark(self):", "don't trigger rendering secondary_helper = helper() tertiary_helper = secondary_helper() self.assertEqual(type(secondary_helper),", "blow up for models.DateTimeField that are allowed # to be", "Verify default kwarg names end up as attributes data =", "import datetime from functools import partial from django import get_version", "'fake'], **kwargs) output = secondary_helper(data, **internals) expected_output = \"\"\" <a", "true_value=\"Yes\", false_value=\"No\") self.assertEqual(output, 'Yes') output = helper(\"\", true_value=\"Yes\", false_value=\"No\") self.assertEqual(output,", "= { 'other_arg': True } secondary_helper = helper(**kwargs1) expected_kwargs =", "helpers.link_to_model # Verify that a model without get_absolute_url() raises a", "DATA\" data-type=\"TYPE DATA\" data-url=\"URL DATA\" data-value=\"1\" data-xeditable=\"xeditable\"> ExampleModel 1 </a>", "tertiary_helper(data, url=\"/\", **internals) self.assertTrue(isinstance(output, six.string_types)) # Verify that no \"view\"", "has no attribute 'get_absolute_url'\") # Verify simple use instance =", "RelatedM2MModel.objects.get(pk=1) with self.assertRaises(AttributeError) as cm: helper(related) self.assertEqual(str(cm.exception), \"'RelatedM2MModel' object has", "output = helper(\"\") self.assertEqual(output, '&#10008;') # Verify custom values output", "that make_boolean_checkmark works. \"\"\" helper = helpers.make_boolean_checkmark # Verify simple", "= str(range(10)) secondary_helper = helper(slice(0, 5), ellipsis=True) output = secondary_helper(data)", "DATA\" data-type=\"TYPE DATA\" data-url=\"URL DATA\" data-value=\"1\" data-special=\"SPECIAL DATA\" data-custom=\"DATA-CUSTOM DATA\"", "self.assertEqual(output, data.pk) # Verify bad attribrute lookup data = ExampleModel.objects.get(pk=1)", "\"\"\" Verifies that make_boolean_checkmark works. \"\"\" helper = helpers.make_boolean_checkmark #", "self.assertEqual(output, data.strftime(\"%m/%d/%Y\")) # Verify that None objects get swallowed without", "Verify ``key`` access to transition an instance to a related", "DATA\" data-placeholder=\"PLACEHOLDER DATA\" data-source=\"SOURCE DATA\" data-title=\"TITLE DATA\" data-type=\"TYPE DATA\" data-url=\"URL", "helper = helpers.make_boolean_checkmark # Verify simple use output = helper(\"True-ish", "class HelpersTests(DatatableViewTestCase): fixtures = [test_data_fixture] def test_link_to_model(self): \"\"\" Verifies that", "= helpers.make_boolean_checkmark # Verify simple use output = helper(\"True-ish value\")", "'<a href=\"#1\">Special text</a>') def test_make_boolean_checkmark(self): \"\"\" Verifies that make_boolean_checkmark works.", "= helper(\"%m/%d/%Y\") output = secondary_helper(data) self.assertEqual(output, data.strftime(\"%m/%d/%Y\")) # Verify that", "helper(extra_attrs=['special', 'data_custom', 'fake'], **kwargs) output = secondary_helper(data, **internals) expected_output =", "appear in rendered string 'special': \"SPECIAL DATA\", 'data_custom': \"DATA-CUSTOM DATA\",", "with self.assertRaises(ValueError) as cm: tertiary_helper(data, **internals) self.assertEqual(str(cm.exception), \"'make_xeditable' cannot determine", "secondary_helper = helper() tertiary_helper = secondary_helper() self.assertEqual(type(secondary_helper), partial) self.assertEqual(type(tertiary_helper), partial)", "\"SPECIAL DATA\", 'data_custom': \"DATA-CUSTOM DATA\", } secondary_helper = helper(**kwargs) output", "Verify bad attribrute lookup data = ExampleModel.objects.get(pk=1) secondary_helper = helper('bad", ".test_app.models import ExampleModel, RelatedM2MModel if get_version().split('.') < ['1', '7']: test_data_fixture", "normally expects in a callback context internals = {'field_name': 'name'}", "\"PK DATA\", 'type': \"TYPE DATA\", 'url': \"URL DATA\", 'source': \"SOURCE", "= list(range(5)) secondary_helper = helper(-1) output = secondary_helper(data) self.assertEqual(output, data[-1])", "datetime from functools import partial from django import get_version from", "use data = datetime.now() secondary_helper = helper(\"%m/%d/%Y\") output = secondary_helper(data)", "kwargs1 = { 'type': 'textarea' } kwargs2 = { 'other_arg':", "\"\"\" helper = helpers.attrgetter # Verify simple attr lookup data", "use output = helper(\"True-ish value\") self.assertEqual(output, '&#10004;') output = helper(\"\")", "self.assertEqual(output, \"\") def test_format(self): \"\"\" Verifies that format works. \"\"\"", "helper(\"\") self.assertEqual(output, '&#10008;') # Verify custom values output = helper(\"True-ish", "model without get_absolute_url() raises a complaint related = RelatedM2MModel.objects.get(pk=1) with", "works for strings data = str(range(10)) secondary_helper = helper(slice(0, 5),", "1 </a> \"\"\" self.assertHTMLEqual(output, expected_output) # Verify that explicit additions", "= secondary_helper(data) self.assertEqual(output, data[:5] + \"...\") # Verify ellipsis can", "transition an instance to a related field instance = ExampleModel.objects.get(pk=2)", "allows kwargs to appear in HTML as # \"data-*\" attributes.", "can be customized secondary_helper = helper(slice(0, 5), ellipsis=\"custom\") output =", "= \"1234.56789\" secondary_helper = helper(\"{0:.2f}\", cast=float) output = secondary_helper(data) self.assertEqual(output,", "Verify ``key`` access version of custom text output = secondary_helper(instance,", "without get_absolute_url() raises a complaint related = RelatedM2MModel.objects.get(pk=1) with self.assertRaises(AttributeError)", "instance = ExampleModel.objects.get(pk=2) secondary_helper = helper(key=lambda o: o.related) output =", "data\") output = secondary_helper(data) self.assertEqual(output, (data, \"Arg data\")) def test_itemgetter(self):", "data.strftime(\"%m/%d/%Y\")) # Verify that None objects get swallowed without complaint.", "to appear in rendered string 'special': \"SPECIAL DATA\", 'data_custom': \"DATA-CUSTOM", "encoding: utf-8 -*- from datetime import datetime from functools import", "use output = tertiary_helper(data, url=\"/\", **internals) self.assertTrue(isinstance(output, six.string_types)) # Verify", "= helper(\"\", true_value=\"Yes\", false_value=\"No\") self.assertEqual(output, 'No') def test_format_date(self): \"\"\" Verifies", "index access data = list(range(5)) secondary_helper = helper(-1) output =", "be null. output = secondary_helper(None) self.assertEqual(output, \"\") def test_format(self): \"\"\"", "output = secondary_helper(data) self.assertEqual(output, data[:5] + \"...\") # Verify ellipsis", "'7']: test_data_fixture = 'test_data_legacy.json' else: test_data_fixture = 'test_data.json' class HelpersTests(DatatableViewTestCase):", "string 'special': \"SPECIAL DATA\", 'data_custom': \"DATA-CUSTOM DATA\", } secondary_helper =", "kwargs to appear in HTML as # \"data-*\" attributes. secondary_helper", "argument data = \"1234.56789\" secondary_helper = helper(\"{0:.2f}\", cast=float) output =", "get_version from datatableview import helpers import six from .testcase import", "\"\"\" Verifies that itemgetter works. \"\"\" helper = helpers.itemgetter #", "expected_kwargs) tertiary_helper = secondary_helper(**kwargs2) expected_kwargs = dict(kwargs1, **dict(kwargs2, extra_attrs=[])) self.assertEqual(tertiary_helper.keywords,", "works. \"\"\" helper = helpers.format # Verify simple use data", "helper(slice(0, 5), ellipsis=\"custom\") output = secondary_helper(data) self.assertEqual(output, data[:5] + \"custom\")", "expected_output) # Verify that explicit additions via ``extra_attrs`` allows kwargs", "= helper(\"True-ish value\", true_value=\"Yes\", false_value=\"No\") self.assertEqual(output, 'Yes') output = helper(\"\",", "secondary_helper(data) self.assertEqual(output, \"{0:,}\".format(data)) # Verify ``cast`` argument data = \"1234.56789\"", "'get_absolute_url'\") # Verify simple use instance = ExampleModel.objects.get(pk=1) output =", "attribute 'get_absolute_url'\") # Verify simple use instance = ExampleModel.objects.get(pk=1) output", "= secondary_helper(data) self.assertEqual(str(cm.exception), \"'ExampleModel' object has no attribute 'bad field", "tertiary_helper = secondary_helper(**kwargs2) expected_kwargs = dict(kwargs1, **dict(kwargs2, extra_attrs=[])) self.assertEqual(tertiary_helper.keywords, expected_kwargs)", "from .test_app.models import ExampleModel, RelatedM2MModel if get_version().split('.') < ['1', '7']:", "RelatedM2MModel if get_version().split('.') < ['1', '7']: test_data_fixture = 'test_data_legacy.json' else:", "a complaint related = RelatedM2MModel.objects.get(pk=1) with self.assertRaises(AttributeError) as cm: helper(related)", "target_function = lambda data, arg=None: (data, arg) # Verify simple", "test_data_fixture = 'test_data.json' class HelpersTests(DatatableViewTestCase): fixtures = [test_data_fixture] def test_link_to_model(self):", "data-placeholder=\"PLACEHOLDER DATA\" data-source=\"SOURCE DATA\" data-title=\"TITLE DATA\" data-type=\"TYPE DATA\" data-url=\"URL DATA\"", "simple index access data = list(range(5)) secondary_helper = helper(-1) output", "= lambda data, arg=None: (data, arg) # Verify simple use", "\"'ExampleModel' object has no attribute 'bad field name'\") def test_make_xeditable(self):", "# Verify simple index access data = list(range(5)) secondary_helper =", "\"url\" arg because we want to test successful use output", "data, arg=None: (data, arg) # Verify simple use data =", "\"\"\" helper = helpers.make_boolean_checkmark # Verify simple use output =", "helper() tertiary_helper = secondary_helper() self.assertEqual(type(secondary_helper), partial) self.assertEqual(type(tertiary_helper), partial) # Verify", "\"\"\" Verifies that through_filter works. \"\"\" helper = helpers.through_filter target_function", "from datatableview import helpers import six from .testcase import DatatableViewTestCase", "the helper won't blow up for models.DateTimeField that are allowed", "def test_format_date(self): \"\"\" Verifies that format_date works. \"\"\" helper =", "test_make_boolean_checkmark(self): \"\"\" Verifies that make_boolean_checkmark works. \"\"\" helper = helpers.make_boolean_checkmark", "secondary_helper(data, **internals) expected_output = \"\"\" <a href=\"#\" data-name=\"name\" data-pk=\"PK DATA\"", "Verify simple use data = 1234567890 secondary_helper = helper(\"{0:,}\") output", "Verify chain ends with provision of a value data =", "'type': 'textarea' } kwargs2 = { 'other_arg': True } secondary_helper", "'url'.\") # Verify kwargs accumulate kwargs1 = { 'type': 'textarea'", "without complaint. # This helps promise that the helper won't", "data-special=\"SPECIAL DATA\" data-custom=\"DATA-CUSTOM DATA\" data-xeditable=\"xeditable\"> ExampleModel 1 </a> \"\"\" self.assertHTMLEqual(output,", "object has no attribute 'bad field name'\") def test_make_xeditable(self): \"\"\"", "we want to test successful use output = tertiary_helper(data, url=\"/\",", "simple use instance = ExampleModel.objects.get(pk=1) output = helper(instance) self.assertEqual(output, '<a", "= helper(slice(0, 5), ellipsis=True) output = secondary_helper(data) self.assertEqual(output, data[:5] +", "cast=float) output = secondary_helper(data) self.assertEqual(output, \"{0:.2f}\".format(float(data))) def test_through_filter(self): \"\"\" Verifies", "that the helper won't blow up for models.DateTimeField that are", "href=\"#1\">Special text</a>') def test_make_boolean_checkmark(self): \"\"\" Verifies that make_boolean_checkmark works. \"\"\"", "context internals = {'field_name': 'name'} # Verify chain calls don't", "import partial from django import get_version from datatableview import helpers", "= datetime.now() secondary_helper = helper(\"%m/%d/%Y\") output = secondary_helper(data) self.assertEqual(output, data.strftime(\"%m/%d/%Y\"))", "via ``extra_attrs`` allows kwargs to appear in HTML as #", "# Verify that explicit additions via ``extra_attrs`` allows kwargs to", "custom text output = secondary_helper(instance, text=\"Special text\") self.assertEqual(output, '<a href=\"#1\">Special", "test_data_fixture = 'test_data_legacy.json' else: test_data_fixture = 'test_data.json' class HelpersTests(DatatableViewTestCase): fixtures", "want to test successful use output = tertiary_helper(data, url=\"/\", **internals)", "from django import get_version from datatableview import helpers import six", "# Verify kwargs accumulate kwargs1 = { 'type': 'textarea' }", "names end up as attributes data = ExampleModel.objects.get(pk=1) kwargs =", "slicing access secondary_helper = helper(slice(1, 3)) output = secondary_helper(data) self.assertEqual(output,", "format_date works. \"\"\" helper = helpers.format_date # Verify simple use", "['1', '7']: test_data_fixture = 'test_data_legacy.json' else: test_data_fixture = 'test_data.json' class", "DATA\" data-source=\"SOURCE DATA\" data-title=\"TITLE DATA\" data-type=\"TYPE DATA\" data-url=\"URL DATA\" data-value=\"1\"", "test_make_xeditable(self): \"\"\" Verifies that make_xeditable works. \"\"\" helper = helpers.make_xeditable", "self.assertEqual(type(tertiary_helper), partial) # Verify chain ends with provision of a", "helper('bad field name') with self.assertRaises(AttributeError) as cm: output = secondary_helper(data)", "def test_format(self): \"\"\" Verifies that format works. \"\"\" helper =", "chain ends with provision of a value data = ExampleModel.objects.get(pk=1)", "secondary_helper() self.assertEqual(type(secondary_helper), partial) self.assertEqual(type(tertiary_helper), partial) # Verify chain ends with", "ExampleModel.objects.get(pk=1) output = helper(instance) self.assertEqual(output, '<a href=\"#1\">ExampleModel 1</a>') # Verify", "for non-string data types data = range(10) output = secondary_helper(data)", "true_value=\"Yes\", false_value=\"No\") self.assertEqual(output, 'No') def test_format_date(self): \"\"\" Verifies that format_date", "Verifies that itemgetter works. \"\"\" helper = helpers.itemgetter # Verify", "'textarea' } kwargs2 = { 'other_arg': True } secondary_helper =", "secondary_helper = helper('bad field name') with self.assertRaises(AttributeError) as cm: output", "text</a>') def test_make_boolean_checkmark(self): \"\"\" Verifies that make_boolean_checkmark works. \"\"\" helper", "\"\"\" helper = helpers.make_xeditable # Items that the helper normally", "the url is required from the call with self.assertRaises(ValueError) as", "output = secondary_helper(data) self.assertEqual(output, data.strftime(\"%m/%d/%Y\")) # Verify that None objects", "values output = helper(\"True-ish value\", true_value=\"Yes\", false_value=\"No\") self.assertEqual(output, 'Yes') output", "works. \"\"\" helper = helpers.make_xeditable # Items that the helper", "self.assertEqual(output, '&#10004;') output = helper(\"\") self.assertEqual(output, '&#10008;') # Verify custom", "in HTML as # \"data-*\" attributes. secondary_helper = helper(extra_attrs=['special', 'data_custom',", "output = tertiary_helper(data, url=\"/\", **internals) self.assertTrue(isinstance(output, six.string_types)) # Verify that", "expected_kwargs = dict(kwargs1, **dict(kwargs2, extra_attrs=[])) self.assertEqual(tertiary_helper.keywords, expected_kwargs) # Verify default", "secondary_helper(data) self.assertEqual(output, data.pk) # Verify bad attribrute lookup data =", "that link_to_model works. \"\"\" helper = helpers.link_to_model # Verify that", "Verifies that format_date works. \"\"\" helper = helpers.format_date # Verify", "itemgetter works. \"\"\" helper = helpers.itemgetter # Verify simple index", "ExampleModel 1 </a> \"\"\" self.assertHTMLEqual(output, expected_output) # Verify that explicit", "six from .testcase import DatatableViewTestCase from .test_app.models import ExampleModel, RelatedM2MModel", "tertiary_helper = secondary_helper() self.assertEqual(type(secondary_helper), partial) self.assertEqual(type(tertiary_helper), partial) # Verify chain", "for 'url'.\") # Verify kwargs accumulate kwargs1 = { 'type':", "} secondary_helper = helper(**kwargs) output = secondary_helper(data, **internals) expected_output =", "output = secondary_helper(data) self.assertEqual(output, \"{0:,}\".format(data)) # Verify ``cast`` argument data", "works. \"\"\" helper = helpers.link_to_model # Verify that a model", "# Verify ``key`` access version of custom text output =", "output = secondary_helper(data) self.assertEqual(output, (data, \"Arg data\")) def test_itemgetter(self): \"\"\"", "self.assertEqual(output, data[:5] + \"...\") # Verify ellipsis can be customized", "if get_version().split('.') < ['1', '7']: test_data_fixture = 'test_data_legacy.json' else: test_data_fixture", "from the call with self.assertRaises(ValueError) as cm: tertiary_helper(data, **internals) self.assertEqual(str(cm.exception),", "attrgetter works. \"\"\" helper = helpers.attrgetter # Verify simple attr", "# Verify simple use data = 1234567890 secondary_helper = helper(\"{0:,}\")", "complaint related = RelatedM2MModel.objects.get(pk=1) with self.assertRaises(AttributeError) as cm: helper(related) self.assertEqual(str(cm.exception),", "helpers.format # Verify simple use data = 1234567890 secondary_helper =", "no attribute 'get_absolute_url'\") # Verify simple use instance = ExampleModel.objects.get(pk=1)", "= helper(target_function, arg=\"Arg data\") output = secondary_helper(data) self.assertEqual(output, (data, \"Arg", "\"\"\" helper = helpers.through_filter target_function = lambda data, arg=None: (data,", "in a callback context internals = {'field_name': 'name'} # Verify", "helper(slice(1, 3)) output = secondary_helper(data) self.assertEqual(output, data[1:3]) # Verify ellipsis", "# Verify default kwarg names end up as attributes data", "self.assertEqual(output, data[:5] + \"custom\") # Verify ellipsis does nothing for", "partial) self.assertEqual(type(tertiary_helper), partial) # Verify chain ends with provision of", "that make_xeditable works. \"\"\" helper = helpers.make_xeditable # Items that", "self.assertRaises(AttributeError) as cm: output = secondary_helper(data) self.assertEqual(str(cm.exception), \"'ExampleModel' object has", "works. \"\"\" helper = helpers.itemgetter # Verify simple index access", "not in anticipated to appear in rendered string 'special': \"SPECIAL", "href=\"#1\">Special text</a>') # Verify ``key`` access to transition an instance", "are allowed # to be null. output = secondary_helper(None) self.assertEqual(output,", "Verify that explicit additions via ``extra_attrs`` allows kwargs to appear", "= { 'type': 'textarea' } kwargs2 = { 'other_arg': True", "to test successful use output = tertiary_helper(data, url=\"/\", **internals) self.assertTrue(isinstance(output,", "access version of custom text output = secondary_helper(instance, text=\"Special text\")", "def test_link_to_model(self): \"\"\" Verifies that link_to_model works. \"\"\" helper =", "kwargs2 = { 'other_arg': True } secondary_helper = helper(**kwargs1) expected_kwargs", "works. \"\"\" helper = helpers.attrgetter # Verify simple attr lookup", "helper(related) self.assertEqual(str(cm.exception), \"'RelatedM2MModel' object has no attribute 'get_absolute_url'\") # Verify", "# Verify ``arg`` argument secondary_helper = helper(target_function, arg=\"Arg data\") output", "helper = helpers.make_xeditable # Items that the helper normally expects", "helper = helpers.format_date # Verify simple use data = datetime.now()", "Verify ellipsis does nothing for non-string data types data =", "\"\"\" Verifies that attrgetter works. \"\"\" helper = helpers.attrgetter #", "fixtures = [test_data_fixture] def test_link_to_model(self): \"\"\" Verifies that link_to_model works.", "helpers.attrgetter # Verify simple attr lookup data = ExampleModel.objects.get(pk=1) secondary_helper", "# Verify simple use data = \"Data string\" secondary_helper =", "# Verify that no \"view\" kwarg means the url is", "extra_attrs=[])) self.assertEqual(tertiary_helper.keywords, expected_kwargs) # Verify default kwarg names end up", "url=\"/\", **internals) self.assertTrue(isinstance(output, six.string_types)) # Verify that no \"view\" kwarg", "Verify text override output = helper(instance, text=\"Special text\") self.assertEqual(output, '<a", "text output = secondary_helper(instance, text=\"Special text\") self.assertEqual(output, '<a href=\"#1\">Special text</a>')", "= helpers.make_xeditable # Items that the helper normally expects in", "trigger rendering secondary_helper = helper() tertiary_helper = secondary_helper() self.assertEqual(type(secondary_helper), partial)", "no \"view\" kwarg means the url is required from the", "helpers.make_boolean_checkmark # Verify simple use output = helper(\"True-ish value\") self.assertEqual(output,", "\"...\") # Verify ellipsis can be customized secondary_helper = helper(slice(0,", "helper = helpers.attrgetter # Verify simple attr lookup data =", "for models.DateTimeField that are allowed # to be null. output", "Verify ellipsis can be customized secondary_helper = helper(slice(0, 5), ellipsis=\"custom\")", "kwarg names end up as attributes data = ExampleModel.objects.get(pk=1) kwargs", "data-pk=\"PK DATA\" data-placeholder=\"PLACEHOLDER DATA\" data-source=\"SOURCE DATA\" data-title=\"TITLE DATA\" data-type=\"TYPE DATA\"", "self.assertEqual(output, '<a href=\"#1\">ExampleModel 1</a>') # Verify text override output =", "helper(target_function) output = secondary_helper(data) self.assertEqual(output, (data, None)) # Verify ``arg``", "data[1:3]) # Verify ellipsis works for strings data = str(range(10))", "as cm: tertiary_helper(data, **internals) self.assertEqual(str(cm.exception), \"'make_xeditable' cannot determine a value", "data-type=\"TYPE DATA\" data-url=\"URL DATA\" data-value=\"1\" data-xeditable=\"xeditable\"> ExampleModel 1 </a> \"\"\"", "secondary_helper = helper(**kwargs) output = secondary_helper(data, **internals) expected_output = \"\"\"", "# Verify simple attr lookup data = ExampleModel.objects.get(pk=1) secondary_helper =", "arg because we want to test successful use output =", "# This helps promise that the helper won't blow up", "secondary_helper = helper(-1) output = secondary_helper(data) self.assertEqual(output, data[-1]) # Verify", "\"\"\" Verifies that format works. \"\"\" helper = helpers.format #", "= helper('pk') output = secondary_helper(data) self.assertEqual(output, data.pk) # Verify bad", "additions via ``extra_attrs`` allows kwargs to appear in HTML as", "helper(\"{0:,}\") output = secondary_helper(data) self.assertEqual(output, \"{0:,}\".format(data)) # Verify ``cast`` argument", "\"URL DATA\", 'source': \"SOURCE DATA\", 'title': \"TITLE DATA\", 'placeholder': \"PLACEHOLDER", "= helper(instance, text=\"Special text\") self.assertEqual(output, '<a href=\"#1\">Special text</a>') # Verify", "instance = ExampleModel.objects.get(pk=1) output = helper(instance) self.assertEqual(output, '<a href=\"#1\">ExampleModel 1</a>')", "helpers.make_xeditable # Items that the helper normally expects in a", "DATA\", 'data_custom': \"DATA-CUSTOM DATA\", } secondary_helper = helper(**kwargs) output =", "import six from .testcase import DatatableViewTestCase from .test_app.models import ExampleModel,", "self.assertEqual(str(cm.exception), \"'make_xeditable' cannot determine a value for 'url'.\") # Verify", "test successful use output = tertiary_helper(data, url=\"/\", **internals) self.assertTrue(isinstance(output, six.string_types))", "data-source=\"SOURCE DATA\" data-title=\"TITLE DATA\" data-type=\"TYPE DATA\" data-url=\"URL DATA\" data-value=\"1\" data-special=\"SPECIAL", "attribute 'bad field name'\") def test_make_xeditable(self): \"\"\" Verifies that make_xeditable", "data-title=\"TITLE DATA\" data-type=\"TYPE DATA\" data-url=\"URL DATA\" data-value=\"1\" data-special=\"SPECIAL DATA\" data-custom=\"DATA-CUSTOM", "name'\") def test_make_xeditable(self): \"\"\" Verifies that make_xeditable works. \"\"\" helper", "DATA\", 'placeholder': \"PLACEHOLDER DATA\", # Extra stuff not in anticipated", "attr lookup data = ExampleModel.objects.get(pk=1) secondary_helper = helper('pk') output =", "get swallowed without complaint. # This helps promise that the", "format works. \"\"\" helper = helpers.format # Verify simple use", "``key`` access version of custom text output = secondary_helper(instance, text=\"Special", "a value data = ExampleModel.objects.get(pk=1) # This needs a \"url\"", "in rendered string 'special': \"SPECIAL DATA\", 'data_custom': \"DATA-CUSTOM DATA\", }", "secondary_helper = helper('pk') output = secondary_helper(data) self.assertEqual(output, data.pk) # Verify", "data[:5] + \"...\") # Verify ellipsis can be customized secondary_helper", "'<a href=\"#1\">ExampleModel 1</a>') # Verify text override output = helper(instance,", "Verify kwargs accumulate kwargs1 = { 'type': 'textarea' } kwargs2", "arg=\"Arg data\") output = secondary_helper(data) self.assertEqual(output, (data, \"Arg data\")) def", "Verify simple attr lookup data = ExampleModel.objects.get(pk=1) secondary_helper = helper('pk')", "= helper(key=lambda o: o.related) output = secondary_helper(instance) self.assertEqual(output, '<a href=\"#1\">RelatedModel", "kwarg means the url is required from the call with", "value\") self.assertEqual(output, '&#10004;') output = helper(\"\") self.assertEqual(output, '&#10008;') # Verify", "= RelatedM2MModel.objects.get(pk=1) with self.assertRaises(AttributeError) as cm: helper(related) self.assertEqual(str(cm.exception), \"'RelatedM2MModel' object", "\"'make_xeditable' cannot determine a value for 'url'.\") # Verify kwargs", "# Verify bad attribrute lookup data = ExampleModel.objects.get(pk=1) secondary_helper =", "= helper(-1) output = secondary_helper(data) self.assertEqual(output, data[-1]) # Verify slicing", "= 1234567890 secondary_helper = helper(\"{0:,}\") output = secondary_helper(data) self.assertEqual(output, \"{0:,}\".format(data))", "range(10) output = secondary_helper(data) self.assertEqual(output, data[:5]) def test_attrgetter(self): \"\"\" Verifies", "This needs a \"url\" arg because we want to test", "arg) # Verify simple use data = \"Data string\" secondary_helper", "Verify that None objects get swallowed without complaint. # This", "attributes data = ExampleModel.objects.get(pk=1) kwargs = { 'pk': \"PK DATA\",", "# \"data-*\" attributes. secondary_helper = helper(extra_attrs=['special', 'data_custom', 'fake'], **kwargs) output", "= helper(slice(0, 5), ellipsis=\"custom\") output = secondary_helper(data) self.assertEqual(output, data[:5] +", "value for 'url'.\") # Verify kwargs accumulate kwargs1 = {", "ExampleModel.objects.get(pk=1) secondary_helper = helper('bad field name') with self.assertRaises(AttributeError) as cm:", "that the helper normally expects in a callback context internals", "'bad field name'\") def test_make_xeditable(self): \"\"\" Verifies that make_xeditable works.", "use data = 1234567890 secondary_helper = helper(\"{0:,}\") output = secondary_helper(data)", "'data_custom': \"DATA-CUSTOM DATA\", } secondary_helper = helper(**kwargs) output = secondary_helper(data,", "= ExampleModel.objects.get(pk=1) output = helper(instance) self.assertEqual(output, '<a href=\"#1\">ExampleModel 1</a>') #", "use instance = ExampleModel.objects.get(pk=1) output = helper(instance) self.assertEqual(output, '<a href=\"#1\">ExampleModel", "self.assertTrue(isinstance(output, six.string_types)) # Verify that no \"view\" kwarg means the", "test_format(self): \"\"\" Verifies that format works. \"\"\" helper = helpers.format", "[test_data_fixture] def test_link_to_model(self): \"\"\" Verifies that link_to_model works. \"\"\" helper", "= secondary_helper(data) self.assertEqual(output, data[1:3]) # Verify ellipsis works for strings", "Verify simple use instance = ExampleModel.objects.get(pk=1) output = helper(instance) self.assertEqual(output,", "because we want to test successful use output = tertiary_helper(data,", "'pk': \"PK DATA\", 'type': \"TYPE DATA\", 'url': \"URL DATA\", 'source':", "related = RelatedM2MModel.objects.get(pk=1) with self.assertRaises(AttributeError) as cm: helper(related) self.assertEqual(str(cm.exception), \"'RelatedM2MModel'", "self.assertEqual(output, (data, None)) # Verify ``arg`` argument secondary_helper = helper(target_function,", "\"\"\" Verifies that format_date works. \"\"\" helper = helpers.format_date #", "# Verify custom values output = helper(\"True-ish value\", true_value=\"Yes\", false_value=\"No\")", "# Items that the helper normally expects in a callback", "version of custom text output = secondary_helper(instance, text=\"Special text\") self.assertEqual(output,", "utf-8 -*- from datetime import datetime from functools import partial", "= helper(slice(1, 3)) output = secondary_helper(data) self.assertEqual(output, data[1:3]) # Verify", "data-title=\"TITLE DATA\" data-type=\"TYPE DATA\" data-url=\"URL DATA\" data-value=\"1\" data-xeditable=\"xeditable\"> ExampleModel 1", "secondary_helper(data) self.assertEqual(str(cm.exception), \"'ExampleModel' object has no attribute 'bad field name'\")", "value\", true_value=\"Yes\", false_value=\"No\") self.assertEqual(output, 'Yes') output = helper(\"\", true_value=\"Yes\", false_value=\"No\")", "internals = {'field_name': 'name'} # Verify chain calls don't trigger", "output = secondary_helper(None) self.assertEqual(output, \"\") def test_format(self): \"\"\" Verifies that", "get_version().split('.') < ['1', '7']: test_data_fixture = 'test_data_legacy.json' else: test_data_fixture =", "= 'test_data_legacy.json' else: test_data_fixture = 'test_data.json' class HelpersTests(DatatableViewTestCase): fixtures =", "simple use data = \"Data string\" secondary_helper = helper(target_function) output", "def test_itemgetter(self): \"\"\" Verifies that itemgetter works. \"\"\" helper =", "'<a href=\"#1\">RelatedModel object</a>') # Verify ``key`` access version of custom", "Verifies that make_boolean_checkmark works. \"\"\" helper = helpers.make_boolean_checkmark # Verify", "secondary_helper(None) self.assertEqual(output, \"\") def test_format(self): \"\"\" Verifies that format works.", "output = secondary_helper(data) self.assertEqual(output, \"{0:.2f}\".format(float(data))) def test_through_filter(self): \"\"\" Verifies that", "self.assertEqual(type(secondary_helper), partial) self.assertEqual(type(tertiary_helper), partial) # Verify chain ends with provision", "test_link_to_model(self): \"\"\" Verifies that link_to_model works. \"\"\" helper = helpers.link_to_model", "'&#10008;') # Verify custom values output = helper(\"True-ish value\", true_value=\"Yes\",", "url is required from the call with self.assertRaises(ValueError) as cm:", "o: o.related) output = secondary_helper(instance) self.assertEqual(output, '<a href=\"#1\">RelatedModel object</a>') #", "Verify ``arg`` argument secondary_helper = helper(target_function, arg=\"Arg data\") output =", "# Verify slicing access secondary_helper = helper(slice(1, 3)) output =", "secondary_helper = helper(slice(0, 5), ellipsis=\"custom\") output = secondary_helper(data) self.assertEqual(output, data[:5]", "Verify that a model without get_absolute_url() raises a complaint related", "secondary_helper = helper(extra_attrs=['special', 'data_custom', 'fake'], **kwargs) output = secondary_helper(data, **internals)", "= {'field_name': 'name'} # Verify chain calls don't trigger rendering", "= helper(\"{0:,}\") output = secondary_helper(data) self.assertEqual(output, \"{0:,}\".format(data)) # Verify ``cast``", "self.assertEqual(output, \"{0:.2f}\".format(float(data))) def test_through_filter(self): \"\"\" Verifies that through_filter works. \"\"\"", "= secondary_helper(data) self.assertEqual(output, \"{0:.2f}\".format(float(data))) def test_through_filter(self): \"\"\" Verifies that through_filter", "DATA\", 'type': \"TYPE DATA\", 'url': \"URL DATA\", 'source': \"SOURCE DATA\",", "anticipated to appear in rendered string 'special': \"SPECIAL DATA\", 'data_custom':", "-*- from datetime import datetime from functools import partial from", "data types data = range(10) output = secondary_helper(data) self.assertEqual(output, data[:5])", "**internals) expected_output = \"\"\" <a href=\"#\" data-name=\"name\" data-pk=\"PK DATA\" data-placeholder=\"PLACEHOLDER", "\"'RelatedM2MModel' object has no attribute 'get_absolute_url'\") # Verify simple use", "text\") self.assertEqual(output, '<a href=\"#1\">Special text</a>') # Verify ``key`` access to", "def test_make_boolean_checkmark(self): \"\"\" Verifies that make_boolean_checkmark works. \"\"\" helper =", "= ExampleModel.objects.get(pk=1) secondary_helper = helper('pk') output = secondary_helper(data) self.assertEqual(output, data.pk)", "self.assertRaises(ValueError) as cm: tertiary_helper(data, **internals) self.assertEqual(str(cm.exception), \"'make_xeditable' cannot determine a", "Verify simple use data = \"Data string\" secondary_helper = helper(target_function)", "secondary_helper(data) self.assertEqual(output, \"{0:.2f}\".format(float(data))) def test_through_filter(self): \"\"\" Verifies that through_filter works.", "object has no attribute 'get_absolute_url'\") # Verify simple use instance", "promise that the helper won't blow up for models.DateTimeField that", "= dict(kwargs1, extra_attrs=[]) self.assertEqual(secondary_helper.keywords, expected_kwargs) tertiary_helper = secondary_helper(**kwargs2) expected_kwargs =", "secondary_helper(instance) self.assertEqual(output, '<a href=\"#1\">RelatedModel object</a>') # Verify ``key`` access version", "that None objects get swallowed without complaint. # This helps", "Verifies that format works. \"\"\" helper = helpers.format # Verify", "string\" secondary_helper = helper(target_function) output = secondary_helper(data) self.assertEqual(output, (data, None))", "secondary_helper = helper(slice(0, 5), ellipsis=True) output = secondary_helper(data) self.assertEqual(output, data[:5]", "# -*- encoding: utf-8 -*- from datetime import datetime from", "helpers.itemgetter # Verify simple index access data = list(range(5)) secondary_helper", "= helpers.itemgetter # Verify simple index access data = list(range(5))", "= \"Data string\" secondary_helper = helper(target_function) output = secondary_helper(data) self.assertEqual(output,", "secondary_helper(data) self.assertEqual(output, data[:5]) def test_attrgetter(self): \"\"\" Verifies that attrgetter works.", "output = helper(\"\", true_value=\"Yes\", false_value=\"No\") self.assertEqual(output, 'No') def test_format_date(self): \"\"\"", "simple attr lookup data = ExampleModel.objects.get(pk=1) secondary_helper = helper('pk') output", "kwargs = { 'pk': \"PK DATA\", 'type': \"TYPE DATA\", 'url':", "\"data-*\" attributes. secondary_helper = helper(extra_attrs=['special', 'data_custom', 'fake'], **kwargs) output =", "# to be null. output = secondary_helper(None) self.assertEqual(output, \"\") def", "+ \"...\") # Verify ellipsis can be customized secondary_helper =", "DatatableViewTestCase from .test_app.models import ExampleModel, RelatedM2MModel if get_version().split('.') < ['1',", "Verifies that link_to_model works. \"\"\" helper = helpers.link_to_model # Verify", "\"\"\" self.assertHTMLEqual(output, expected_output) # Verify that explicit additions via ``extra_attrs``", "partial from django import get_version from datatableview import helpers import", "test_format_date(self): \"\"\" Verifies that format_date works. \"\"\" helper = helpers.format_date", "provision of a value data = ExampleModel.objects.get(pk=1) # This needs", "secondary_helper = helper(\"{0:.2f}\", cast=float) output = secondary_helper(data) self.assertEqual(output, \"{0:.2f}\".format(float(data))) def", "'type': \"TYPE DATA\", 'url': \"URL DATA\", 'source': \"SOURCE DATA\", 'title':", "dict(kwargs1, **dict(kwargs2, extra_attrs=[])) self.assertEqual(tertiary_helper.keywords, expected_kwargs) # Verify default kwarg names", "DATA\" data-url=\"URL DATA\" data-value=\"1\" data-xeditable=\"xeditable\"> ExampleModel 1 </a> \"\"\" self.assertHTMLEqual(output,", "output = secondary_helper(data) self.assertEqual(output, data.pk) # Verify bad attribrute lookup", "data = list(range(5)) secondary_helper = helper(-1) output = secondary_helper(data) self.assertEqual(output,", "+ \"custom\") # Verify ellipsis does nothing for non-string data", "This helps promise that the helper won't blow up for", "\"\"\" Verifies that make_xeditable works. \"\"\" helper = helpers.make_xeditable #", "**internals) self.assertEqual(str(cm.exception), \"'make_xeditable' cannot determine a value for 'url'.\") #", "(data, None)) # Verify ``arg`` argument secondary_helper = helper(target_function, arg=\"Arg", "field name'\") def test_make_xeditable(self): \"\"\" Verifies that make_xeditable works. \"\"\"", "data = 1234567890 secondary_helper = helper(\"{0:,}\") output = secondary_helper(data) self.assertEqual(output,", "ExampleModel.objects.get(pk=2) secondary_helper = helper(key=lambda o: o.related) output = secondary_helper(instance) self.assertEqual(output,", "} kwargs2 = { 'other_arg': True } secondary_helper = helper(**kwargs1)", "# Verify simple use output = helper(\"True-ish value\") self.assertEqual(output, '&#10004;')", "= helpers.through_filter target_function = lambda data, arg=None: (data, arg) #", "else: test_data_fixture = 'test_data.json' class HelpersTests(DatatableViewTestCase): fixtures = [test_data_fixture] def", "lambda data, arg=None: (data, arg) # Verify simple use data", "ellipsis can be customized secondary_helper = helper(slice(0, 5), ellipsis=\"custom\") output", "DATA\" data-url=\"URL DATA\" data-value=\"1\" data-special=\"SPECIAL DATA\" data-custom=\"DATA-CUSTOM DATA\" data-xeditable=\"xeditable\"> ExampleModel", "self.assertEqual(output, (data, \"Arg data\")) def test_itemgetter(self): \"\"\" Verifies that itemgetter", "import get_version from datatableview import helpers import six from .testcase", "to transition an instance to a related field instance =", "data-type=\"TYPE DATA\" data-url=\"URL DATA\" data-value=\"1\" data-special=\"SPECIAL DATA\" data-custom=\"DATA-CUSTOM DATA\" data-xeditable=\"xeditable\">", "data[:5] + \"custom\") # Verify ellipsis does nothing for non-string", "secondary_helper = helper(\"{0:,}\") output = secondary_helper(data) self.assertEqual(output, \"{0:,}\".format(data)) # Verify", "text=\"Special text\") self.assertEqual(output, '<a href=\"#1\">Special text</a>') # Verify ``key`` access", "that format_date works. \"\"\" helper = helpers.format_date # Verify simple", "# Verify simple use data = datetime.now() secondary_helper = helper(\"%m/%d/%Y\")", "data = ExampleModel.objects.get(pk=1) kwargs = { 'pk': \"PK DATA\", 'type':", "helper(instance) self.assertEqual(output, '<a href=\"#1\">ExampleModel 1</a>') # Verify text override output", "cm: output = secondary_helper(data) self.assertEqual(str(cm.exception), \"'ExampleModel' object has no attribute", "that itemgetter works. \"\"\" helper = helpers.itemgetter # Verify simple", "lookup data = ExampleModel.objects.get(pk=1) secondary_helper = helper('bad field name') with", "ExampleModel, RelatedM2MModel if get_version().split('.') < ['1', '7']: test_data_fixture = 'test_data_legacy.json'", "'special': \"SPECIAL DATA\", 'data_custom': \"DATA-CUSTOM DATA\", } secondary_helper = helper(**kwargs)", "**internals) self.assertTrue(isinstance(output, six.string_types)) # Verify that no \"view\" kwarg means", "def test_attrgetter(self): \"\"\" Verifies that attrgetter works. \"\"\" helper =", "**dict(kwargs2, extra_attrs=[])) self.assertEqual(tertiary_helper.keywords, expected_kwargs) # Verify default kwarg names end", "\"\") def test_format(self): \"\"\" Verifies that format works. \"\"\" helper", "non-string data types data = range(10) output = secondary_helper(data) self.assertEqual(output,", "output = secondary_helper(data) self.assertEqual(output, data[:5] + \"custom\") # Verify ellipsis", "that a model without get_absolute_url() raises a complaint related =", "works. \"\"\" helper = helpers.make_boolean_checkmark # Verify simple use output", "<filename>datatableview/tests/test_helpers.py # -*- encoding: utf-8 -*- from datetime import datetime", "output = secondary_helper(data) self.assertEqual(str(cm.exception), \"'ExampleModel' object has no attribute 'bad", "text</a>') # Verify ``key`` access to transition an instance to", "of custom text output = secondary_helper(instance, text=\"Special text\") self.assertEqual(output, '<a", "access secondary_helper = helper(slice(1, 3)) output = secondary_helper(data) self.assertEqual(output, data[1:3])", "a callback context internals = {'field_name': 'name'} # Verify chain", "nothing for non-string data types data = range(10) output =", "\"PLACEHOLDER DATA\", # Extra stuff not in anticipated to appear", "DATA\" data-custom=\"DATA-CUSTOM DATA\" data-xeditable=\"xeditable\"> ExampleModel 1 </a> \"\"\" self.assertHTMLEqual(output, expected_output)", "no attribute 'bad field name'\") def test_make_xeditable(self): \"\"\" Verifies that", "value data = ExampleModel.objects.get(pk=1) # This needs a \"url\" arg", "up for models.DateTimeField that are allowed # to be null.", "secondary_helper(data) self.assertEqual(output, data[:5] + \"...\") # Verify ellipsis can be", "output = helper(instance, text=\"Special text\") self.assertEqual(output, '<a href=\"#1\">Special text</a>') #", "chain calls don't trigger rendering secondary_helper = helper() tertiary_helper =", "\"1234.56789\" secondary_helper = helper(\"{0:.2f}\", cast=float) output = secondary_helper(data) self.assertEqual(output, \"{0:.2f}\".format(float(data)))", "secondary_helper(data) self.assertEqual(output, data.strftime(\"%m/%d/%Y\")) # Verify that None objects get swallowed", "null. output = secondary_helper(None) self.assertEqual(output, \"\") def test_format(self): \"\"\" Verifies", "Verifies that make_xeditable works. \"\"\" helper = helpers.make_xeditable # Items", "= helper(\"True-ish value\") self.assertEqual(output, '&#10004;') output = helper(\"\") self.assertEqual(output, '&#10008;')", "= helper(\"{0:.2f}\", cast=float) output = secondary_helper(data) self.assertEqual(output, \"{0:.2f}\".format(float(data))) def test_through_filter(self):", "make_boolean_checkmark works. \"\"\" helper = helpers.make_boolean_checkmark # Verify simple use", "(data, \"Arg data\")) def test_itemgetter(self): \"\"\" Verifies that itemgetter works.", "Verify simple index access data = list(range(5)) secondary_helper = helper(-1)", "data[-1]) # Verify slicing access secondary_helper = helper(slice(1, 3)) output", "as attributes data = ExampleModel.objects.get(pk=1) kwargs = { 'pk': \"PK", "\"\"\" helper = helpers.format # Verify simple use data =", "access data = list(range(5)) secondary_helper = helper(-1) output = secondary_helper(data)", "helpers.format_date # Verify simple use data = datetime.now() secondary_helper =", "accumulate kwargs1 = { 'type': 'textarea' } kwargs2 = {", "None objects get swallowed without complaint. # This helps promise", "href=\"#\" data-name=\"name\" data-pk=\"PK DATA\" data-placeholder=\"PLACEHOLDER DATA\" data-source=\"SOURCE DATA\" data-title=\"TITLE DATA\"", "a \"url\" arg because we want to test successful use", "default kwarg names end up as attributes data = ExampleModel.objects.get(pk=1)" ]
[ "traceback.TracebackException.from_exception(orig_error).format()) await ctx.send(error_msg) # 起動時のメッセージの関数 async def ready_greet(): channel =", "async def giratina(ctx): await ctx.send('https://img.gamewith.jp/article/thumbnail/rectangle/36417.png') # bokuseku.mp3 流し逃げ - https://qiita.com/sizumita/items/cafd00fe3e114d834ce3", "@bot.command() async def inm(ctx): await ctx.send('聖バリ「イキスギィイクイク!!!ンアッー!!!マクラがデカすぎる!!!」\\n\\n' f'{ctx.author.name}「聖なるバリア -ミラーフォース-、淫夢はもうやめてよ!淫夢ごっこは恥ずかしいよ!」\\n\\n聖バリ「{ctx.author.name}' '、おっ大丈夫か大丈夫か〜???バッチェ冷えてるぞ〜淫夢が大好きだってはっきりわかんだね」') #", "ギラティナのチャンネルのID GIRATINA_CHANNEL_ID = 940610524415144036 WIP_CHANNEL_ID = 940966825087361025 @bot.event async def", "<reponame>naari3/seibaribot import traceback from os import getenv import discord from", "async def ready_greet(): channel = bot.get_channel(GIRATINA_CHANNEL_ID) await channel.send('ギラティナ、オォン!') # Bot起動時に実行される関数", "message.attachments and message.channel.id == WIP_CHANNEL_ID: for attachment in message.attachments: #", "None: await ctx.channel.send('望月くん・・・ボイスチャンネルに来なさい') return # ボイスチャンネルに接続する await ctx.author.voice.channel.connect() # 音声を再生する", "await ctx.channel.send('望月くん・・・ボイスチャンネルに来なさい') return # ボイスチャンネルに接続する await ctx.author.voice.channel.connect() # 音声を再生する ctx.guild.voice_client.play(discord.FFmpegPCMAudio('bokuseku.mp3'))", "discord import Message from discord.ext import commands from discord.ext.commands import", "ドナルドの言葉狩り - https://qiita.com/sizumita/items/9d44ae7d1ce007391699 # メッセージの本文が ドナルド だった場合 if 'ドナルド' in", "チーバくんの、なのはな体操 @bot.command() async def chiibakun(ctx): await ctx.send('https://www.youtube.com/watch?v=dC0eie-WQss') # かおすちゃんを送信 @bot.command()", "async def chiibakun(ctx): await ctx.send('https://www.youtube.com/watch?v=dC0eie-WQss') # かおすちゃんを送信 @bot.command() async def", "940610524415144036 WIP_CHANNEL_ID = 940966825087361025 @bot.event async def on_command_error(ctx, error): orig_error", "def inm(ctx): await ctx.send('聖バリ「イキスギィイクイク!!!ンアッー!!!マクラがデカすぎる!!!」\\n\\n' f'{ctx.author.name}「聖なるバリア -ミラーフォース-、淫夢はもうやめてよ!淫夢ごっこは恥ずかしいよ!」\\n\\n聖バリ「{ctx.author.name}' '、おっ大丈夫か大丈夫か〜???バッチェ冷えてるぞ〜淫夢が大好きだってはっきりわかんだね」') # ギラティナの画像を送る @bot.command()", "@bot.command() async def giratina(ctx): await ctx.send('https://img.gamewith.jp/article/thumbnail/rectangle/36417.png') # bokuseku.mp3 流し逃げ -", "import asyncio client = discord.Client() # botの接頭辞を!にする bot = commands.Bot(command_prefix='!')", "ctx.author.voice.channel.connect() # 音声を再生する ctx.guild.voice_client.play(discord.FFmpegPCMAudio('bokuseku.mp3')) # 音声が再生中か確認する while ctx.guild.voice_client.is_playing(): await sleep(1)", "on_ready(): await ready_greet() # ピンポン @bot.command() async def ping(ctx): await", "discord.ext.commands import Context from asyncio import sleep import asyncio client", "ctx.guild.voice_client.play(discord.FFmpegPCMAudio('bokuseku.mp3')) # 音声が再生中か確認する while ctx.guild.voice_client.is_playing(): await sleep(1) # 切断する await", "commands.Bot(command_prefix='!') # ギラティナのチャンネルのID GIRATINA_CHANNEL_ID = 940610524415144036 WIP_CHANNEL_ID = 940966825087361025 @bot.event", "channel = bot.get_channel(GIRATINA_CHANNEL_ID) await channel.send('ギラティナ、オォン!') # Bot起動時に実行される関数 @bot.event async def", "input.jpg -i input.mp3 -vcodec libx264 -vb 50k -acodec aac -strict", "sleep import asyncio client = discord.Client() # botの接頭辞を!にする bot =", "is None: await ctx.channel.send('望月くん・・・ボイスチャンネルに来なさい') return # ボイスチャンネルに接続する await ctx.author.voice.channel.connect() #", "await proc.communicate() await message.channel.send(file=discord.File(\"output.mp4\")) await bot.process_commands(message) # チーバくんの、なのはな体操 @bot.command() async", "音声が再生中か確認する while ctx.guild.voice_client.is_playing(): await sleep(1) # 切断する await ctx.guild.voice_client.disconnect() token", "ctx.send('https://www.youtube.com/watch?v=dC0eie-WQss') # かおすちゃんを送信 @bot.command() async def kaosu(ctx): await ctx.send('https://pbs.twimg.com/media/E512yaSVIAQxfNn?format=jpg&name=large') #", "await ctx.send('https://img.gamewith.jp/article/thumbnail/rectangle/36417.png') # bokuseku.mp3 流し逃げ - https://qiita.com/sizumita/items/cafd00fe3e114d834ce3 @bot.command() async def", "ギラティナの画像を送る @bot.command() async def giratina(ctx): await ctx.send('https://img.gamewith.jp/article/thumbnail/rectangle/36417.png') # bokuseku.mp3 流し逃げ", "*command.split(\" \"), stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE) stdout, stderr = await proc.communicate() await", "commands from discord.ext.commands import Context from asyncio import sleep import", "message.author.bot: return # ドナルドの言葉狩り - https://qiita.com/sizumita/items/9d44ae7d1ce007391699 # メッセージの本文が ドナルド だった場合", "ctx.send('聖バリ「イキスギィイクイク!!!ンアッー!!!マクラがデカすぎる!!!」\\n\\n' f'{ctx.author.name}「聖なるバリア -ミラーフォース-、淫夢はもうやめてよ!淫夢ごっこは恥ずかしいよ!」\\n\\n聖バリ「{ctx.author.name}' '、おっ大丈夫か大丈夫か〜???バッチェ冷えてるぞ〜淫夢が大好きだってはっきりわかんだね」') # ギラティナの画像を送る @bot.command() async def giratina(ctx):", "inm(ctx): await ctx.send('聖バリ「イキスギィイクイク!!!ンアッー!!!マクラがデカすぎる!!!」\\n\\n' f'{ctx.author.name}「聖なるバリア -ミラーフォース-、淫夢はもうやめてよ!淫夢ごっこは恥ずかしいよ!」\\n\\n聖バリ「{ctx.author.name}' '、おっ大丈夫か大丈夫か〜???バッチェ冷えてるぞ〜淫夢が大好きだってはっきりわかんだね」') # ギラティナの画像を送る @bot.command() async", "\"), stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE) stdout, stderr = await proc.communicate() await message.channel.send(file=discord.File(\"output.mp4\"))", "# 音声が再生中か確認する while ctx.guild.voice_client.is_playing(): await sleep(1) # 切断する await ctx.guild.voice_client.disconnect()", "getattr(error, 'original', error) error_msg = ''.join( traceback.TracebackException.from_exception(orig_error).format()) await ctx.send(error_msg) #", "WIP_CHANNEL_ID: for attachment in message.attachments: # Attachmentの拡張子がmp3, wavのどれかだった場合 # https://discordpy.readthedocs.io/ja/latest/api.html#attachment", "# Bot起動時に実行される関数 @bot.event async def on_ready(): await ready_greet() # ピンポン", "送信するメッセージをランダムで決める # メッセージが送られてきたチャンネルに送る await message.channel.send('https://tenor.com/view/ronald-mcdonald-insanity-ronald-mcdonald-gif-21974293') # メッセージに場合 if message.attachments and", "yuv420p -shortest output.mp4\" proc = await asyncio.create_subprocess_exec( *command.split(\" \"), stdout=asyncio.subprocess.PIPE,", "bokuseku.mp3 流し逃げ - https://qiita.com/sizumita/items/cafd00fe3e114d834ce3 @bot.command() async def bokuseku(ctx): if ctx.author.voice", "- https://qiita.com/sizumita/items/cafd00fe3e114d834ce3 @bot.command() async def bokuseku(ctx): if ctx.author.voice is None:", "https://qiita.com/sizumita/items/cafd00fe3e114d834ce3 @bot.command() async def bokuseku(ctx): if ctx.author.voice is None: await", "2 -ar 48000 -pix_fmt yuv420p -shortest output.mp4\" proc = await", "stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE) stdout, stderr = await proc.communicate() await message.channel.send(file=discord.File(\"output.mp4\")) await", "WIP_CHANNEL_ID = 940966825087361025 @bot.event async def on_command_error(ctx, error): orig_error =", "# ギラティナの画像を送る @bot.command() async def giratina(ctx): await ctx.send('https://img.gamewith.jp/article/thumbnail/rectangle/36417.png') # bokuseku.mp3", "ピンポン @bot.command() async def ping(ctx): await ctx.send('pong') @bot.event async def", "音声を再生する ctx.guild.voice_client.play(discord.FFmpegPCMAudio('bokuseku.mp3')) # 音声が再生中か確認する while ctx.guild.voice_client.is_playing(): await sleep(1) # 切断する", "-ar 48000 -pix_fmt yuv420p -shortest output.mp4\" proc = await asyncio.create_subprocess_exec(", "await channel.send('ギラティナ、オォン!') # Bot起動時に実行される関数 @bot.event async def on_ready(): await ready_greet()", "メッセージが送られてきたチャンネルに送る await message.channel.send('https://tenor.com/view/ronald-mcdonald-insanity-ronald-mcdonald-gif-21974293') # メッセージに場合 if message.attachments and message.channel.id ==", "def giratina(ctx): await ctx.send('https://img.gamewith.jp/article/thumbnail/rectangle/36417.png') # bokuseku.mp3 流し逃げ - https://qiita.com/sizumita/items/cafd00fe3e114d834ce3 @bot.command()", "if attachment.content_type and \"audio\" in attachment.content_type: await attachment.save(\"input.mp3\") command =", "'、おっ大丈夫か大丈夫か〜???バッチェ冷えてるぞ〜淫夢が大好きだってはっきりわかんだね」') # ギラティナの画像を送る @bot.command() async def giratina(ctx): await ctx.send('https://img.gamewith.jp/article/thumbnail/rectangle/36417.png') #", "@bot.command() async def kaosu(ctx): await ctx.send('https://pbs.twimg.com/media/E512yaSVIAQxfNn?format=jpg&name=large') # イキス @bot.command() async", "wavのどれかだった場合 # https://discordpy.readthedocs.io/ja/latest/api.html#attachment if attachment.content_type and \"audio\" in attachment.content_type: await", "input.mp3 -vcodec libx264 -vb 50k -acodec aac -strict experimental -ab", "= 940966825087361025 @bot.event async def on_command_error(ctx, error): orig_error = getattr(error,", "メッセージの本文が ドナルド だった場合 if 'ドナルド' in str(message.content): # 送信するメッセージをランダムで決める #", "@bot.command() async def chiibakun(ctx): await ctx.send('https://www.youtube.com/watch?v=dC0eie-WQss') # かおすちゃんを送信 @bot.command() async", "while ctx.guild.voice_client.is_playing(): await sleep(1) # 切断する await ctx.guild.voice_client.disconnect() token =", "= \"ffmpeg -y -loop 1 -i input.jpg -i input.mp3 -vcodec", "attachment in message.attachments: # Attachmentの拡張子がmp3, wavのどれかだった場合 # https://discordpy.readthedocs.io/ja/latest/api.html#attachment if attachment.content_type", "command = \"ffmpeg -y -loop 1 -i input.jpg -i input.mp3", "async def inm(ctx): await ctx.send('聖バリ「イキスギィイクイク!!!ンアッー!!!マクラがデカすぎる!!!」\\n\\n' f'{ctx.author.name}「聖なるバリア -ミラーフォース-、淫夢はもうやめてよ!淫夢ごっこは恥ずかしいよ!」\\n\\n聖バリ「{ctx.author.name}' '、おっ大丈夫か大丈夫か〜???バッチェ冷えてるぞ〜淫夢が大好きだってはっきりわかんだね」') # ギラティナの画像を送る", "# 送信者がBotである場合は弾く if message.author.bot: return # ドナルドの言葉狩り - https://qiita.com/sizumita/items/9d44ae7d1ce007391699 #", "50k -acodec aac -strict experimental -ab 128k -ac 2 -ar", "message.channel.id == WIP_CHANNEL_ID: for attachment in message.attachments: # Attachmentの拡張子がmp3, wavのどれかだった場合", "# botの接頭辞を!にする bot = commands.Bot(command_prefix='!') # ギラティナのチャンネルのID GIRATINA_CHANNEL_ID = 940610524415144036", "await ctx.send(error_msg) # 起動時のメッセージの関数 async def ready_greet(): channel = bot.get_channel(GIRATINA_CHANNEL_ID)", "'ドナルド' in str(message.content): # 送信するメッセージをランダムで決める # メッセージが送られてきたチャンネルに送る await message.channel.send('https://tenor.com/view/ronald-mcdonald-insanity-ronald-mcdonald-gif-21974293') #", "-vb 50k -acodec aac -strict experimental -ab 128k -ac 2", "await sleep(1) # 切断する await ctx.guild.voice_client.disconnect() token = getenv('DISCORD_BOT_TOKEN') bot.run(token)", "# Attachmentの拡張子がmp3, wavのどれかだった場合 # https://discordpy.readthedocs.io/ja/latest/api.html#attachment if attachment.content_type and \"audio\" in", "await ready_greet() # ピンポン @bot.command() async def ping(ctx): await ctx.send('pong')", "# 送信するメッセージをランダムで決める # メッセージが送られてきたチャンネルに送る await message.channel.send('https://tenor.com/view/ronald-mcdonald-insanity-ronald-mcdonald-gif-21974293') # メッセージに場合 if message.attachments", "@bot.event async def on_message(message): # 送信者がBotである場合は弾く if message.author.bot: return #", "str(message.content): # 送信するメッセージをランダムで決める # メッセージが送られてきたチャンネルに送る await message.channel.send('https://tenor.com/view/ronald-mcdonald-insanity-ronald-mcdonald-gif-21974293') # メッセージに場合 if", "-pix_fmt yuv420p -shortest output.mp4\" proc = await asyncio.create_subprocess_exec( *command.split(\" \"),", "1 -i input.jpg -i input.mp3 -vcodec libx264 -vb 50k -acodec", "and message.channel.id == WIP_CHANNEL_ID: for attachment in message.attachments: # Attachmentの拡張子がmp3,", "# かおすちゃんを送信 @bot.command() async def kaosu(ctx): await ctx.send('https://pbs.twimg.com/media/E512yaSVIAQxfNn?format=jpg&name=large') # イキス", "Message from discord.ext import commands from discord.ext.commands import Context from", "from discord.ext import commands from discord.ext.commands import Context from asyncio", "起動時のメッセージの関数 async def ready_greet(): channel = bot.get_channel(GIRATINA_CHANNEL_ID) await channel.send('ギラティナ、オォン!') #", "async def bokuseku(ctx): if ctx.author.voice is None: await ctx.channel.send('望月くん・・・ボイスチャンネルに来なさい') return", "os import getenv import discord from discord import Message from", "GIRATINA_CHANNEL_ID = 940610524415144036 WIP_CHANNEL_ID = 940966825087361025 @bot.event async def on_command_error(ctx,", "await asyncio.create_subprocess_exec( *command.split(\" \"), stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE) stdout, stderr = await", "f'{ctx.author.name}「聖なるバリア -ミラーフォース-、淫夢はもうやめてよ!淫夢ごっこは恥ずかしいよ!」\\n\\n聖バリ「{ctx.author.name}' '、おっ大丈夫か大丈夫か〜???バッチェ冷えてるぞ〜淫夢が大好きだってはっきりわかんだね」') # ギラティナの画像を送る @bot.command() async def giratina(ctx): await", "# チーバくんの、なのはな体操 @bot.command() async def chiibakun(ctx): await ctx.send('https://www.youtube.com/watch?v=dC0eie-WQss') # かおすちゃんを送信", "ボイスチャンネルに接続する await ctx.author.voice.channel.connect() # 音声を再生する ctx.guild.voice_client.play(discord.FFmpegPCMAudio('bokuseku.mp3')) # 音声が再生中か確認する while ctx.guild.voice_client.is_playing():", "bokuseku(ctx): if ctx.author.voice is None: await ctx.channel.send('望月くん・・・ボイスチャンネルに来なさい') return # ボイスチャンネルに接続する", "error): orig_error = getattr(error, 'original', error) error_msg = ''.join( traceback.TracebackException.from_exception(orig_error).format())", "error_msg = ''.join( traceback.TracebackException.from_exception(orig_error).format()) await ctx.send(error_msg) # 起動時のメッセージの関数 async def", "ctx.send('https://pbs.twimg.com/media/E512yaSVIAQxfNn?format=jpg&name=large') # イキス @bot.command() async def inm(ctx): await ctx.send('聖バリ「イキスギィイクイク!!!ンアッー!!!マクラがデカすぎる!!!」\\n\\n' f'{ctx.author.name}「聖なるバリア", "'original', error) error_msg = ''.join( traceback.TracebackException.from_exception(orig_error).format()) await ctx.send(error_msg) # 起動時のメッセージの関数", "-acodec aac -strict experimental -ab 128k -ac 2 -ar 48000", "async def on_message(message): # 送信者がBotである場合は弾く if message.author.bot: return # ドナルドの言葉狩り", "-ab 128k -ac 2 -ar 48000 -pix_fmt yuv420p -shortest output.mp4\"", "-ac 2 -ar 48000 -pix_fmt yuv420p -shortest output.mp4\" proc =", "import Context from asyncio import sleep import asyncio client =", "proc.communicate() await message.channel.send(file=discord.File(\"output.mp4\")) await bot.process_commands(message) # チーバくんの、なのはな体操 @bot.command() async def", "getenv import discord from discord import Message from discord.ext import", "流し逃げ - https://qiita.com/sizumita/items/cafd00fe3e114d834ce3 @bot.command() async def bokuseku(ctx): if ctx.author.voice is", "await message.channel.send(file=discord.File(\"output.mp4\")) await bot.process_commands(message) # チーバくんの、なのはな体操 @bot.command() async def chiibakun(ctx):", "await attachment.save(\"input.mp3\") command = \"ffmpeg -y -loop 1 -i input.jpg", "in str(message.content): # 送信するメッセージをランダムで決める # メッセージが送られてきたチャンネルに送る await message.channel.send('https://tenor.com/view/ronald-mcdonald-insanity-ronald-mcdonald-gif-21974293') # メッセージに場合", "-shortest output.mp4\" proc = await asyncio.create_subprocess_exec( *command.split(\" \"), stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE)", "return # ボイスチャンネルに接続する await ctx.author.voice.channel.connect() # 音声を再生する ctx.guild.voice_client.play(discord.FFmpegPCMAudio('bokuseku.mp3')) # 音声が再生中か確認する", "asyncio.create_subprocess_exec( *command.split(\" \"), stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE) stdout, stderr = await proc.communicate()", "''.join( traceback.TracebackException.from_exception(orig_error).format()) await ctx.send(error_msg) # 起動時のメッセージの関数 async def ready_greet(): channel", "= commands.Bot(command_prefix='!') # ギラティナのチャンネルのID GIRATINA_CHANNEL_ID = 940610524415144036 WIP_CHANNEL_ID = 940966825087361025", "Attachmentの拡張子がmp3, wavのどれかだった場合 # https://discordpy.readthedocs.io/ja/latest/api.html#attachment if attachment.content_type and \"audio\" in attachment.content_type:", "ctx.author.voice is None: await ctx.channel.send('望月くん・・・ボイスチャンネルに来なさい') return # ボイスチャンネルに接続する await ctx.author.voice.channel.connect()", "and \"audio\" in attachment.content_type: await attachment.save(\"input.mp3\") command = \"ffmpeg -y", "# メッセージの本文が ドナルド だった場合 if 'ドナルド' in str(message.content): # 送信するメッセージをランダムで決める", "if 'ドナルド' in str(message.content): # 送信するメッセージをランダムで決める # メッセージが送られてきたチャンネルに送る await message.channel.send('https://tenor.com/view/ronald-mcdonald-insanity-ronald-mcdonald-gif-21974293')", "-i input.jpg -i input.mp3 -vcodec libx264 -vb 50k -acodec aac", "def on_ready(): await ready_greet() # ピンポン @bot.command() async def ping(ctx):", "Context from asyncio import sleep import asyncio client = discord.Client()", "= await asyncio.create_subprocess_exec( *command.split(\" \"), stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE) stdout, stderr =", "== WIP_CHANNEL_ID: for attachment in message.attachments: # Attachmentの拡張子がmp3, wavのどれかだった場合 #", "Bot起動時に実行される関数 @bot.event async def on_ready(): await ready_greet() # ピンポン @bot.command()", "ctx.guild.voice_client.is_playing(): await sleep(1) # 切断する await ctx.guild.voice_client.disconnect() token = getenv('DISCORD_BOT_TOKEN')", "-strict experimental -ab 128k -ac 2 -ar 48000 -pix_fmt yuv420p", "async def on_ready(): await ready_greet() # ピンポン @bot.command() async def", "async def on_command_error(ctx, error): orig_error = getattr(error, 'original', error) error_msg", "# ギラティナのチャンネルのID GIRATINA_CHANNEL_ID = 940610524415144036 WIP_CHANNEL_ID = 940966825087361025 @bot.event async", "だった場合 if 'ドナルド' in str(message.content): # 送信するメッセージをランダムで決める # メッセージが送られてきたチャンネルに送る await", "= bot.get_channel(GIRATINA_CHANNEL_ID) await channel.send('ギラティナ、オォン!') # Bot起動時に実行される関数 @bot.event async def on_ready():", "-vcodec libx264 -vb 50k -acodec aac -strict experimental -ab 128k", "bot.process_commands(message) # チーバくんの、なのはな体操 @bot.command() async def chiibakun(ctx): await ctx.send('https://www.youtube.com/watch?v=dC0eie-WQss') #", "= getattr(error, 'original', error) error_msg = ''.join( traceback.TracebackException.from_exception(orig_error).format()) await ctx.send(error_msg)", "# イキス @bot.command() async def inm(ctx): await ctx.send('聖バリ「イキスギィイクイク!!!ンアッー!!!マクラがデカすぎる!!!」\\n\\n' f'{ctx.author.name}「聖なるバリア -ミラーフォース-、淫夢はもうやめてよ!淫夢ごっこは恥ずかしいよ!」\\n\\n聖バリ「{ctx.author.name}'", "ctx.channel.send('望月くん・・・ボイスチャンネルに来なさい') return # ボイスチャンネルに接続する await ctx.author.voice.channel.connect() # 音声を再生する ctx.guild.voice_client.play(discord.FFmpegPCMAudio('bokuseku.mp3')) #", "from os import getenv import discord from discord import Message", "in attachment.content_type: await attachment.save(\"input.mp3\") command = \"ffmpeg -y -loop 1", "chiibakun(ctx): await ctx.send('https://www.youtube.com/watch?v=dC0eie-WQss') # かおすちゃんを送信 @bot.command() async def kaosu(ctx): await", "message.channel.send(file=discord.File(\"output.mp4\")) await bot.process_commands(message) # チーバくんの、なのはな体操 @bot.command() async def chiibakun(ctx): await", "ctx.send('https://img.gamewith.jp/article/thumbnail/rectangle/36417.png') # bokuseku.mp3 流し逃げ - https://qiita.com/sizumita/items/cafd00fe3e114d834ce3 @bot.command() async def bokuseku(ctx):", "# ピンポン @bot.command() async def ping(ctx): await ctx.send('pong') @bot.event async", "await ctx.author.voice.channel.connect() # 音声を再生する ctx.guild.voice_client.play(discord.FFmpegPCMAudio('bokuseku.mp3')) # 音声が再生中か確認する while ctx.guild.voice_client.is_playing(): await", "async def kaosu(ctx): await ctx.send('https://pbs.twimg.com/media/E512yaSVIAQxfNn?format=jpg&name=large') # イキス @bot.command() async def", "48000 -pix_fmt yuv420p -shortest output.mp4\" proc = await asyncio.create_subprocess_exec( *command.split(\"", "return # ドナルドの言葉狩り - https://qiita.com/sizumita/items/9d44ae7d1ce007391699 # メッセージの本文が ドナルド だった場合 if", "discord.Client() # botの接頭辞を!にする bot = commands.Bot(command_prefix='!') # ギラティナのチャンネルのID GIRATINA_CHANNEL_ID =", "error) error_msg = ''.join( traceback.TracebackException.from_exception(orig_error).format()) await ctx.send(error_msg) # 起動時のメッセージの関数 async", "from discord import Message from discord.ext import commands from discord.ext.commands", "ctx.send('pong') @bot.event async def on_message(message): # 送信者がBotである場合は弾く if message.author.bot: return", "@bot.event async def on_command_error(ctx, error): orig_error = getattr(error, 'original', error)", "proc = await asyncio.create_subprocess_exec( *command.split(\" \"), stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE) stdout, stderr", "# 起動時のメッセージの関数 async def ready_greet(): channel = bot.get_channel(GIRATINA_CHANNEL_ID) await channel.send('ギラティナ、オォン!')", "await bot.process_commands(message) # チーバくんの、なのはな体操 @bot.command() async def chiibakun(ctx): await ctx.send('https://www.youtube.com/watch?v=dC0eie-WQss')", "# メッセージが送られてきたチャンネルに送る await message.channel.send('https://tenor.com/view/ronald-mcdonald-insanity-ronald-mcdonald-gif-21974293') # メッセージに場合 if message.attachments and message.channel.id", "def on_message(message): # 送信者がBotである場合は弾く if message.author.bot: return # ドナルドの言葉狩り -", "discord from discord import Message from discord.ext import commands from", "import discord from discord import Message from discord.ext import commands", "\"audio\" in attachment.content_type: await attachment.save(\"input.mp3\") command = \"ffmpeg -y -loop", "attachment.content_type and \"audio\" in attachment.content_type: await attachment.save(\"input.mp3\") command = \"ffmpeg", "# ボイスチャンネルに接続する await ctx.author.voice.channel.connect() # 音声を再生する ctx.guild.voice_client.play(discord.FFmpegPCMAudio('bokuseku.mp3')) # 音声が再生中か確認する while", "def ready_greet(): channel = bot.get_channel(GIRATINA_CHANNEL_ID) await channel.send('ギラティナ、オォン!') # Bot起動時に実行される関数 @bot.event", "= await proc.communicate() await message.channel.send(file=discord.File(\"output.mp4\")) await bot.process_commands(message) # チーバくんの、なのはな体操 @bot.command()", "traceback from os import getenv import discord from discord import", "= 940610524415144036 WIP_CHANNEL_ID = 940966825087361025 @bot.event async def on_command_error(ctx, error):", "# ドナルドの言葉狩り - https://qiita.com/sizumita/items/9d44ae7d1ce007391699 # メッセージの本文が ドナルド だった場合 if 'ドナルド'", "# メッセージに場合 if message.attachments and message.channel.id == WIP_CHANNEL_ID: for attachment", "ctx.send(error_msg) # 起動時のメッセージの関数 async def ready_greet(): channel = bot.get_channel(GIRATINA_CHANNEL_ID) await", "aac -strict experimental -ab 128k -ac 2 -ar 48000 -pix_fmt", "def chiibakun(ctx): await ctx.send('https://www.youtube.com/watch?v=dC0eie-WQss') # かおすちゃんを送信 @bot.command() async def kaosu(ctx):", "on_message(message): # 送信者がBotである場合は弾く if message.author.bot: return # ドナルドの言葉狩り - https://qiita.com/sizumita/items/9d44ae7d1ce007391699", "https://discordpy.readthedocs.io/ja/latest/api.html#attachment if attachment.content_type and \"audio\" in attachment.content_type: await attachment.save(\"input.mp3\") command", "libx264 -vb 50k -acodec aac -strict experimental -ab 128k -ac", "experimental -ab 128k -ac 2 -ar 48000 -pix_fmt yuv420p -shortest", "if message.attachments and message.channel.id == WIP_CHANNEL_ID: for attachment in message.attachments:", "@bot.event async def on_ready(): await ready_greet() # ピンポン @bot.command() async", "message.channel.send('https://tenor.com/view/ronald-mcdonald-insanity-ronald-mcdonald-gif-21974293') # メッセージに場合 if message.attachments and message.channel.id == WIP_CHANNEL_ID: for", "イキス @bot.command() async def inm(ctx): await ctx.send('聖バリ「イキスギィイクイク!!!ンアッー!!!マクラがデカすぎる!!!」\\n\\n' f'{ctx.author.name}「聖なるバリア -ミラーフォース-、淫夢はもうやめてよ!淫夢ごっこは恥ずかしいよ!」\\n\\n聖バリ「{ctx.author.name}' '、おっ大丈夫か大丈夫か〜???バッチェ冷えてるぞ〜淫夢が大好きだってはっきりわかんだね」')", "\"ffmpeg -y -loop 1 -i input.jpg -i input.mp3 -vcodec libx264", "# 音声を再生する ctx.guild.voice_client.play(discord.FFmpegPCMAudio('bokuseku.mp3')) # 音声が再生中か確認する while ctx.guild.voice_client.is_playing(): await sleep(1) #", "@bot.command() async def bokuseku(ctx): if ctx.author.voice is None: await ctx.channel.send('望月くん・・・ボイスチャンネルに来なさい')", "attachment.content_type: await attachment.save(\"input.mp3\") command = \"ffmpeg -y -loop 1 -i", "await message.channel.send('https://tenor.com/view/ronald-mcdonald-insanity-ronald-mcdonald-gif-21974293') # メッセージに場合 if message.attachments and message.channel.id == WIP_CHANNEL_ID:", "-y -loop 1 -i input.jpg -i input.mp3 -vcodec libx264 -vb", "await ctx.send('https://pbs.twimg.com/media/E512yaSVIAQxfNn?format=jpg&name=large') # イキス @bot.command() async def inm(ctx): await ctx.send('聖バリ「イキスギィイクイク!!!ンアッー!!!マクラがデカすぎる!!!」\\n\\n'", "def kaosu(ctx): await ctx.send('https://pbs.twimg.com/media/E512yaSVIAQxfNn?format=jpg&name=large') # イキス @bot.command() async def inm(ctx):", "bot = commands.Bot(command_prefix='!') # ギラティナのチャンネルのID GIRATINA_CHANNEL_ID = 940610524415144036 WIP_CHANNEL_ID =", "asyncio client = discord.Client() # botの接頭辞を!にする bot = commands.Bot(command_prefix='!') #", "from discord.ext.commands import Context from asyncio import sleep import asyncio", "output.mp4\" proc = await asyncio.create_subprocess_exec( *command.split(\" \"), stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE) stdout,", "channel.send('ギラティナ、オォン!') # Bot起動時に実行される関数 @bot.event async def on_ready(): await ready_greet() #", "def ping(ctx): await ctx.send('pong') @bot.event async def on_message(message): # 送信者がBotである場合は弾く", "if message.author.bot: return # ドナルドの言葉狩り - https://qiita.com/sizumita/items/9d44ae7d1ce007391699 # メッセージの本文が ドナルド", "bot.get_channel(GIRATINA_CHANNEL_ID) await channel.send('ギラティナ、オォン!') # Bot起動時に実行される関数 @bot.event async def on_ready(): await", "asyncio import sleep import asyncio client = discord.Client() # botの接頭辞を!にする", "await ctx.send('pong') @bot.event async def on_message(message): # 送信者がBotである場合は弾く if message.author.bot:", "import traceback from os import getenv import discord from discord", "discord.ext import commands from discord.ext.commands import Context from asyncio import", "message.attachments: # Attachmentの拡張子がmp3, wavのどれかだった場合 # https://discordpy.readthedocs.io/ja/latest/api.html#attachment if attachment.content_type and \"audio\"", "in message.attachments: # Attachmentの拡張子がmp3, wavのどれかだった場合 # https://discordpy.readthedocs.io/ja/latest/api.html#attachment if attachment.content_type and", "128k -ac 2 -ar 48000 -pix_fmt yuv420p -shortest output.mp4\" proc", "kaosu(ctx): await ctx.send('https://pbs.twimg.com/media/E512yaSVIAQxfNn?format=jpg&name=large') # イキス @bot.command() async def inm(ctx): await", "stdout, stderr = await proc.communicate() await message.channel.send(file=discord.File(\"output.mp4\")) await bot.process_commands(message) #", "stderr=asyncio.subprocess.PIPE) stdout, stderr = await proc.communicate() await message.channel.send(file=discord.File(\"output.mp4\")) await bot.process_commands(message)", "940966825087361025 @bot.event async def on_command_error(ctx, error): orig_error = getattr(error, 'original',", "送信者がBotである場合は弾く if message.author.bot: return # ドナルドの言葉狩り - https://qiita.com/sizumita/items/9d44ae7d1ce007391699 # メッセージの本文が", "# bokuseku.mp3 流し逃げ - https://qiita.com/sizumita/items/cafd00fe3e114d834ce3 @bot.command() async def bokuseku(ctx): if", "ドナルド だった場合 if 'ドナルド' in str(message.content): # 送信するメッセージをランダムで決める # メッセージが送られてきたチャンネルに送る", "= ''.join( traceback.TracebackException.from_exception(orig_error).format()) await ctx.send(error_msg) # 起動時のメッセージの関数 async def ready_greet():", "import commands from discord.ext.commands import Context from asyncio import sleep", "stderr = await proc.communicate() await message.channel.send(file=discord.File(\"output.mp4\")) await bot.process_commands(message) # チーバくんの、なのはな体操", "botの接頭辞を!にする bot = commands.Bot(command_prefix='!') # ギラティナのチャンネルのID GIRATINA_CHANNEL_ID = 940610524415144036 WIP_CHANNEL_ID", "def on_command_error(ctx, error): orig_error = getattr(error, 'original', error) error_msg =", "ready_greet() # ピンポン @bot.command() async def ping(ctx): await ctx.send('pong') @bot.event", "attachment.save(\"input.mp3\") command = \"ffmpeg -y -loop 1 -i input.jpg -i", "import Message from discord.ext import commands from discord.ext.commands import Context", "-i input.mp3 -vcodec libx264 -vb 50k -acodec aac -strict experimental", "from asyncio import sleep import asyncio client = discord.Client() #", "orig_error = getattr(error, 'original', error) error_msg = ''.join( traceback.TracebackException.from_exception(orig_error).format()) await", "await ctx.send('聖バリ「イキスギィイクイク!!!ンアッー!!!マクラがデカすぎる!!!」\\n\\n' f'{ctx.author.name}「聖なるバリア -ミラーフォース-、淫夢はもうやめてよ!淫夢ごっこは恥ずかしいよ!」\\n\\n聖バリ「{ctx.author.name}' '、おっ大丈夫か大丈夫か〜???バッチェ冷えてるぞ〜淫夢が大好きだってはっきりわかんだね」') # ギラティナの画像を送る @bot.command() async def", "giratina(ctx): await ctx.send('https://img.gamewith.jp/article/thumbnail/rectangle/36417.png') # bokuseku.mp3 流し逃げ - https://qiita.com/sizumita/items/cafd00fe3e114d834ce3 @bot.command() async", "ready_greet(): channel = bot.get_channel(GIRATINA_CHANNEL_ID) await channel.send('ギラティナ、オォン!') # Bot起動時に実行される関数 @bot.event async", "import getenv import discord from discord import Message from discord.ext", "-loop 1 -i input.jpg -i input.mp3 -vcodec libx264 -vb 50k", "@bot.command() async def ping(ctx): await ctx.send('pong') @bot.event async def on_message(message):", "import sleep import asyncio client = discord.Client() # botの接頭辞を!にする bot", "on_command_error(ctx, error): orig_error = getattr(error, 'original', error) error_msg = ''.join(", "メッセージに場合 if message.attachments and message.channel.id == WIP_CHANNEL_ID: for attachment in", "-ミラーフォース-、淫夢はもうやめてよ!淫夢ごっこは恥ずかしいよ!」\\n\\n聖バリ「{ctx.author.name}' '、おっ大丈夫か大丈夫か〜???バッチェ冷えてるぞ〜淫夢が大好きだってはっきりわかんだね」') # ギラティナの画像を送る @bot.command() async def giratina(ctx): await ctx.send('https://img.gamewith.jp/article/thumbnail/rectangle/36417.png')", "client = discord.Client() # botの接頭辞を!にする bot = commands.Bot(command_prefix='!') # ギラティナのチャンネルのID", "https://qiita.com/sizumita/items/9d44ae7d1ce007391699 # メッセージの本文が ドナルド だった場合 if 'ドナルド' in str(message.content): #", "# https://discordpy.readthedocs.io/ja/latest/api.html#attachment if attachment.content_type and \"audio\" in attachment.content_type: await attachment.save(\"input.mp3\")", "await ctx.send('https://www.youtube.com/watch?v=dC0eie-WQss') # かおすちゃんを送信 @bot.command() async def kaosu(ctx): await ctx.send('https://pbs.twimg.com/media/E512yaSVIAQxfNn?format=jpg&name=large')", "def bokuseku(ctx): if ctx.author.voice is None: await ctx.channel.send('望月くん・・・ボイスチャンネルに来なさい') return #", "if ctx.author.voice is None: await ctx.channel.send('望月くん・・・ボイスチャンネルに来なさい') return # ボイスチャンネルに接続する await", "for attachment in message.attachments: # Attachmentの拡張子がmp3, wavのどれかだった場合 # https://discordpy.readthedocs.io/ja/latest/api.html#attachment if", "かおすちゃんを送信 @bot.command() async def kaosu(ctx): await ctx.send('https://pbs.twimg.com/media/E512yaSVIAQxfNn?format=jpg&name=large') # イキス @bot.command()", "- https://qiita.com/sizumita/items/9d44ae7d1ce007391699 # メッセージの本文が ドナルド だった場合 if 'ドナルド' in str(message.content):", "= discord.Client() # botの接頭辞を!にする bot = commands.Bot(command_prefix='!') # ギラティナのチャンネルのID GIRATINA_CHANNEL_ID", "ping(ctx): await ctx.send('pong') @bot.event async def on_message(message): # 送信者がBotである場合は弾く if", "async def ping(ctx): await ctx.send('pong') @bot.event async def on_message(message): #" ]
[ "# Server health check record _push_record(_SERVER_HEALTH_CHECK_RECORD_NAME, dns.Record_A(_SERVER_HEALTH_CHECK_RECORD_DATA, ttl=0)) soa_record =", "'of run_tests.py.')) argp.add_argument( '--add_a_record', default=None, type=str, help=('Add an A record", "2.0 (the \"License\"); # you may not use this file", "and # limitations under the License. \"\"\"Starts a local DNS", "10 while num_timeouts_so_far < max_timeouts: sys.stdout.flush() time.sleep(sleep_time) num_timeouts_so_far += 1", "local DNS server for use in tests\"\"\" import argparse import", "in group['records'].keys(): for record in group['records'][name]: r_type = record['type'] r_data", "= 1 # Prevent zombies. Tests that use this server", "'different domain then the rest the records configured in '", "all need to be under the ' 'same domain). Format:", "_SERVER_HEALTH_CHECK_RECORD_NAME = 'health-check-local-dns-server-is-alive.resolver-tests.grpctestingexp' # missing end '.' for twisted syntax", "for DNS server to listen on for TCP and UDP.')", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "port, target_full_name, ttl=r_ttl)) if r_type == 'TXT': _maybe_split_up_txt_data(record_full_name, r_data, r_ttl)", "0' % signum) twisted.internet.reactor.stop() sys.stdout.flush() sys.exit(0) def flush_stdout_loop(): num_timeouts_so_far =", "self.soa = soa self.records = records def start_local_dns_server(args): all_records =", "soa self.records = records def start_local_dns_server(args): all_records = {} def", "threading import time import twisted import twisted.internet import twisted.internet.reactor import", "authors. # # Licensed under the Apache License, Version 2.0", "language governing permissions and # limitations under the License. \"\"\"Starts", "import threading import time import twisted import twisted.internet import twisted.internet.reactor", "for name in group['records'].keys(): for record in group['records'][name]: r_type =", "target_full_name) _push_record( record_full_name, dns.Record_SRV(p, w, port, target_full_name, ttl=r_ttl)) if r_type", "ttl=r_ttl)) with open(args.records_config_path) as config: test_records_config = yaml.load(config) common_zone_name =", "target_full_name = '%s.%s' % (target, common_zone_name) r_data = '%s %s", "resolver_test_record_groups.yaml file. ' 'Defaults to path needed when the test", "records=all_records, ) server = twisted.names.server.DNSServerFactory( authorities=[test_domain_com], verbose=2) server.noisy = 2", "twisted.names.client import twisted.names.dns import twisted.names.server from twisted.names import client, server,", "for TCP and UDP.') argp.add_argument( '-r', '--records_config_path', default=None, type=str, help=('Directory", "import twisted.internet.defer import twisted.internet.protocol import twisted.names import twisted.names.client import twisted.names.dns", "use this file except in compliance with the License. #", "soa_record), records=all_records, ) server = twisted.names.server.DNSServerFactory( authorities=[test_domain_com], verbose=2) server.noisy =", "skip FileAuthority common.ResolverBase.__init__(self) self.soa = soa self.records = records def", "in ' '--records_config_path (which all need to be under the", "limitations under the License. \"\"\"Starts a local DNS server for", "% args.port) print('starting twisted.internet.reactor') twisted.internet.reactor.suggestThreadPoolSize(1) twisted.internet.reactor.run() def _quit_on_signal(signum, _frame): print('Received", "sys.stdout.flush() time.sleep(sleep_time) num_timeouts_so_far += 1 print('Process timeout reached, or cancelled.", "' 'of run_tests.py.')) argp.add_argument( '--add_a_record', default=None, type=str, help=('Add an A", "common, authority, dns import argparse import platform _SERVER_HEALTH_CHECK_RECORD_NAME = 'health-check-local-dns-server-is-alive.resolver-tests.grpctestingexp'", "if r_type == 'SRV': p, w, port, target = r_data.split('", "short-lived. max_timeouts = 60 * 10 while num_timeouts_so_far < max_timeouts:", "test is invoked as part ' 'of run_tests.py.')) argp.add_argument( '--add_a_record',", "<name>:<ipv4 address>')) args = argp.parse_args() signal.signal(signal.SIGTERM, _quit_on_signal) signal.signal(signal.SIGINT, _quit_on_signal) output_flush_thread", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "import twisted.internet import twisted.internet.reactor import twisted.internet.threads import twisted.internet.defer import twisted.internet.protocol", "License. # You may obtain a copy of the License", "under the License is distributed on an \"AS IS\" BASIS,", "License for the specific language governing permissions and # limitations", "1 # Prevent zombies. Tests that use this server are", "w, port, target_full_name) _push_record( record_full_name, dns.Record_SRV(p, w, port, target_full_name, ttl=r_ttl))", "if all_records.get(name) is not None: all_records[name].append(r) return all_records[name] = [r]", "assert record_full_name[-1] == '.' record_full_name = record_full_name[:-1] if r_type ==", "all_records[name].append(r) return all_records[name] = [r] def _maybe_split_up_txt_data(name, txt_data, r_ttl): start", "dns_proto = twisted.names.dns.DNSDatagramProtocol(server) dns_proto.noisy = 2 twisted.internet.reactor.listenUDP(args.port, dns_proto) print('starting local", "\"\"\"Starts a local DNS server for use in tests\"\"\" import", "int(w) port = int(port) target_full_name = '%s.%s' % (target, common_zone_name)", "file. ' 'Defaults to path needed when the test is", "record_full_name, dns.Record_SRV(p, w, port, target_full_name, ttl=r_ttl)) if r_type == 'TXT':", "Server for resolver tests') argp.add_argument('-p', '--port', default=None, type=int, help='Port for", "import sys import yaml import signal import os import threading", "r_type == 'TXT': _maybe_split_up_txt_data(record_full_name, r_data, r_ttl) # Add an optional", "in compliance with the License. # You may obtain a", "record_full_name = record_full_name[:-1] if r_type == 'A': _push_record(record_full_name, dns.Record_A(r_data, ttl=r_ttl))", "num_timeouts_so_far < max_timeouts: sys.stdout.flush() time.sleep(sleep_time) num_timeouts_so_far += 1 print('Process timeout", "software # distributed under the License is distributed on an", "needed when the test is invoked as part ' 'of", "timeout reached, or cancelled. Exitting 0.') os.kill(os.getpid(), signal.SIGTERM) def main():", "len(txt_data[start:]) > 0: next_read = len(txt_data[start:]) if next_read > 255:", "via the command line. Useful for when we ' 'need", "authority, dns import argparse import platform _SERVER_HEALTH_CHECK_RECORD_NAME = 'health-check-local-dns-server-is-alive.resolver-tests.grpctestingexp' #", "dns.Record_AAAA(r_data, ttl=r_ttl)) if r_type == 'SRV': p, w, port, target", "all_records[name] = [r] def _maybe_split_up_txt_data(name, txt_data, r_ttl): start = 0", "the command line. Useful for when we ' 'need to", "== '.' record_full_name = record_full_name[:-1] if r_type == 'A': _push_record(record_full_name,", "need to be under the ' 'same domain). Format: <name>:<ipv4", "r_ttl) # Add an optional IPv4 record is specified if", "of resolver_test_record_groups.yaml file. ' 'Defaults to path needed when the", "DNS Server for resolver tests') argp.add_argument('-p', '--port', default=None, type=int, help='Port", "next_read = len(txt_data[start:]) if next_read > 255: next_read = 255", "0.') os.kill(os.getpid(), signal.SIGTERM) def main(): argp = argparse.ArgumentParser( description='Local DNS", "a ' 'different domain then the rest the records configured", "== 'SRV': p, w, port, target = r_data.split(' ') p", "one-off A record that is under a ' 'different domain", "twisted.names.server from twisted.names import client, server, common, authority, dns import", "for record in group['records'][name]: r_type = record['type'] r_data = record['data']", "are short-lived. max_timeouts = 60 * 10 while num_timeouts_so_far <", "permissions and # limitations under the License. \"\"\"Starts a local", "== 'AAAA': _push_record(record_full_name, dns.Record_AAAA(r_data, ttl=r_ttl)) if r_type == 'SRV': p,", "line. Useful for when we ' 'need to serve a", "'SRV': p, w, port, target = r_data.split(' ') p =", "0 sleep_time = 1 # Prevent zombies. Tests that use", "output_flush_thread = threading.Thread(target=flush_stdout_loop) output_flush_thread.setDaemon(True) output_flush_thread.start() start_local_dns_server(args) if __name__ == '__main__':", "argp = argparse.ArgumentParser( description='Local DNS Server for resolver tests') argp.add_argument('-p',", "an A record via the command line. Useful for when", "when we ' 'need to serve a one-off A record", "args = argp.parse_args() signal.signal(signal.SIGTERM, _quit_on_signal) signal.signal(signal.SIGINT, _quit_on_signal) output_flush_thread = threading.Thread(target=flush_stdout_loop)", "twisted.internet.reactor.listenTCP(args.port, server) dns_proto = twisted.names.dns.DNSDatagramProtocol(server) dns_proto.noisy = 2 twisted.internet.reactor.listenUDP(args.port, dns_proto)", "args.add_a_record: extra_host, extra_host_ipv4 = args.add_a_record.split(':') _push_record(extra_host, dns.Record_A(extra_host_ipv4, ttl=0)) # Server", "None: all_records[name].append(r) return all_records[name] = [r] def _maybe_split_up_txt_data(name, txt_data, r_ttl):", "twisted.names.dns.DNSDatagramProtocol(server) dns_proto.noisy = 2 twisted.internet.reactor.listenUDP(args.port, dns_proto) print('starting local dns server", "default=None, type=int, help='Port for DNS server to listen on for", "dns.Record_TXT(*txt_data_list, ttl=r_ttl)) with open(args.records_config_path) as config: test_records_config = yaml.load(config) common_zone_name", "= threading.Thread(target=flush_stdout_loop) output_flush_thread.setDaemon(True) output_flush_thread.start() start_local_dns_server(args) if __name__ == '__main__': main()", "records): # skip FileAuthority common.ResolverBase.__init__(self) self.soa = soa self.records =", "sys.stdout.flush() sys.exit(0) def flush_stdout_loop(): num_timeouts_so_far = 0 sleep_time = 1", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "soa_record = dns.Record_SOA(mname=common_zone_name) test_domain_com = NoFileAuthority( soa=(common_zone_name, soa_record), records=all_records, )", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "server are short-lived. max_timeouts = 60 * 10 while num_timeouts_so_far", "group['records'].keys(): for record in group['records'][name]: r_type = record['type'] r_data =", "import argparse import sys import yaml import signal import os", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "start = 0 txt_data_list = [] while len(txt_data[start:]) > 0:", "r_type == 'SRV': p, w, port, target = r_data.split(' ')", "Useful for when we ' 'need to serve a one-off", "to in writing, software # distributed under the License is", "# See the License for the specific language governing permissions", "import twisted.names import twisted.names.client import twisted.names.dns import twisted.names.server from twisted.names", "import twisted.names.dns import twisted.names.server from twisted.names import client, server, common,", "command line. Useful for when we ' 'need to serve", "255: next_read = 255 txt_data_list.append(txt_data[start:start + next_read]) start += next_read", "= int(record['TTL']) record_full_name = '%s.%s' % (name, common_zone_name) assert record_full_name[-1]", "server.noisy = 2 twisted.internet.reactor.listenTCP(args.port, server) dns_proto = twisted.names.dns.DNSDatagramProtocol(server) dns_proto.noisy =", "that use this server are short-lived. max_timeouts = 60 *", "invoked as part ' 'of run_tests.py.')) argp.add_argument( '--add_a_record', default=None, type=str,", "or agreed to in writing, software # distributed under the", "required by applicable law or agreed to in writing, software", "txt_data_list = [] while len(txt_data[start:]) > 0: next_read = len(txt_data[start:])", "'--records_config_path', default=None, type=str, help=('Directory of resolver_test_record_groups.yaml file. ' 'Defaults to", "twisted import twisted.internet import twisted.internet.reactor import twisted.internet.threads import twisted.internet.defer import", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "with the License. # You may obtain a copy of", "common_zone_name) assert record_full_name[-1] == '.' record_full_name = record_full_name[:-1] if r_type", "= NoFileAuthority( soa=(common_zone_name, soa_record), records=all_records, ) server = twisted.names.server.DNSServerFactory( authorities=[test_domain_com],", "is specified if args.add_a_record: extra_host, extra_host_ipv4 = args.add_a_record.split(':') _push_record(extra_host, dns.Record_A(extra_host_ipv4,", "twisted.internet import twisted.internet.reactor import twisted.internet.threads import twisted.internet.defer import twisted.internet.protocol import", "= '%s.%s' % (name, common_zone_name) assert record_full_name[-1] == '.' record_full_name", "local dns server on 127.0.0.1:%s' % args.port) print('starting twisted.internet.reactor') twisted.internet.reactor.suggestThreadPoolSize(1)", "import time import twisted import twisted.internet import twisted.internet.reactor import twisted.internet.threads", "(name, common_zone_name) assert record_full_name[-1] == '.' record_full_name = record_full_name[:-1] if", "%s %s' % (p, w, port, target_full_name) _push_record( record_full_name, dns.Record_SRV(p,", "dns.Record_SRV(p, w, port, target_full_name, ttl=r_ttl)) if r_type == 'TXT': _maybe_split_up_txt_data(record_full_name,", "Quitting with exit code 0' % signum) twisted.internet.reactor.stop() sys.stdout.flush() sys.exit(0)", "sys.exit(0) def flush_stdout_loop(): num_timeouts_so_far = 0 sleep_time = 1 #", "w, port, target_full_name, ttl=r_ttl)) if r_type == 'TXT': _maybe_split_up_txt_data(record_full_name, r_data,", "A record via the command line. Useful for when we", "compliance with the License. # You may obtain a copy", "agreed to in writing, software # distributed under the License", "args.add_a_record.split(':') _push_record(extra_host, dns.Record_A(extra_host_ipv4, ttl=0)) # Server health check record _push_record(_SERVER_HEALTH_CHECK_RECORD_NAME,", "next_read]) start += next_read _push_record(name, dns.Record_TXT(*txt_data_list, ttl=r_ttl)) with open(args.records_config_path) as", "twisted.internet.reactor import twisted.internet.threads import twisted.internet.defer import twisted.internet.protocol import twisted.names import", "% (target, common_zone_name) r_data = '%s %s %s %s' %", "distributed under the License is distributed on an \"AS IS\"", "argparse.ArgumentParser( description='Local DNS Server for resolver tests') argp.add_argument('-p', '--port', default=None,", "(p, w, port, target_full_name) _push_record( record_full_name, dns.Record_SRV(p, w, port, target_full_name,", "listen on for TCP and UDP.') argp.add_argument( '-r', '--records_config_path', default=None,", "as part ' 'of run_tests.py.')) argp.add_argument( '--add_a_record', default=None, type=str, help=('Add", "r_data, r_ttl) # Add an optional IPv4 record is specified", "express or implied. # See the License for the specific", "except in compliance with the License. # You may obtain", "'%s %s %s %s' % (p, w, port, target_full_name) _push_record(", "< max_timeouts: sys.stdout.flush() time.sleep(sleep_time) num_timeouts_so_far += 1 print('Process timeout reached,", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "next_read = 255 txt_data_list.append(txt_data[start:start + next_read]) start += next_read _push_record(name,", "not use this file except in compliance with the License.", "# Add an optional IPv4 record is specified if args.add_a_record:", "len(txt_data[start:]) if next_read > 255: next_read = 255 txt_data_list.append(txt_data[start:start +", "platform _SERVER_HEALTH_CHECK_RECORD_NAME = 'health-check-local-dns-server-is-alive.resolver-tests.grpctestingexp' # missing end '.' for twisted", "writing, software # distributed under the License is distributed on", "time.sleep(sleep_time) num_timeouts_so_far += 1 print('Process timeout reached, or cancelled. Exitting", "health check record _push_record(_SERVER_HEALTH_CHECK_RECORD_NAME, dns.Record_A(_SERVER_HEALTH_CHECK_RECORD_DATA, ttl=0)) soa_record = dns.Record_SOA(mname=common_zone_name) test_domain_com", "you may not use this file except in compliance with", "'Defaults to path needed when the test is invoked as", "= soa self.records = records def start_local_dns_server(args): all_records = {}", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "server, common, authority, dns import argparse import platform _SERVER_HEALTH_CHECK_RECORD_NAME =", "all_records.get(name) is not None: all_records[name].append(r) return all_records[name] = [r] def", "is not None: all_records[name].append(r) return all_records[name] = [r] def _maybe_split_up_txt_data(name,", "NoFileAuthority( soa=(common_zone_name, soa_record), records=all_records, ) server = twisted.names.server.DNSServerFactory( authorities=[test_domain_com], verbose=2)", "Add an optional IPv4 record is specified if args.add_a_record: extra_host,", "') p = int(p) w = int(w) port = int(port)", "= record['data'] r_ttl = int(record['TTL']) record_full_name = '%s.%s' % (name,", "def _maybe_split_up_txt_data(name, txt_data, r_ttl): start = 0 txt_data_list = []", "to path needed when the test is invoked as part", "' 'different domain then the rest the records configured in", "group in test_records_config['resolver_component_tests']: for name in group['records'].keys(): for record in", "CONDITIONS OF ANY KIND, either express or implied. # See", "DNS server to listen on for TCP and UDP.') argp.add_argument(", "'-r', '--records_config_path', default=None, type=str, help=('Directory of resolver_test_record_groups.yaml file. ' 'Defaults", "to listen on for TCP and UDP.') argp.add_argument( '-r', '--records_config_path',", "signal.SIGTERM) def main(): argp = argparse.ArgumentParser( description='Local DNS Server for", "under the License. \"\"\"Starts a local DNS server for use", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "def _push_record(name, r): print('pushing record: |%s|' % name) if all_records.get(name)", "TCP and UDP.') argp.add_argument( '-r', '--records_config_path', default=None, type=str, help=('Directory of", "serve a one-off A record that is under a '", "if r_type == 'TXT': _maybe_split_up_txt_data(record_full_name, r_data, r_ttl) # Add an", "= int(port) target_full_name = '%s.%s' % (target, common_zone_name) r_data =", "in tests\"\"\" import argparse import sys import yaml import signal", "import os import threading import time import twisted import twisted.internet", "r_data.split(' ') p = int(p) w = int(w) port =", "sys import yaml import signal import os import threading import", "be under the ' 'same domain). Format: <name>:<ipv4 address>')) args", "[r] def _maybe_split_up_txt_data(name, txt_data, r_ttl): start = 0 txt_data_list =", "'--records_config_path (which all need to be under the ' 'same", "'AAAA': _push_record(record_full_name, dns.Record_AAAA(r_data, ttl=r_ttl)) if r_type == 'SRV': p, w,", "ttl=r_ttl)) if r_type == 'TXT': _maybe_split_up_txt_data(record_full_name, r_data, r_ttl) # Add", "% signum) twisted.internet.reactor.stop() sys.stdout.flush() sys.exit(0) def flush_stdout_loop(): num_timeouts_so_far = 0", "ttl=0)) soa_record = dns.Record_SOA(mname=common_zone_name) test_domain_com = NoFileAuthority( soa=(common_zone_name, soa_record), records=all_records,", "{} def _push_record(name, r): print('pushing record: |%s|' % name) if", "twisted.names.dns import twisted.names.server from twisted.names import client, server, common, authority,", "address>')) args = argp.parse_args() signal.signal(signal.SIGTERM, _quit_on_signal) signal.signal(signal.SIGINT, _quit_on_signal) output_flush_thread =", "signum) twisted.internet.reactor.stop() sys.stdout.flush() sys.exit(0) def flush_stdout_loop(): num_timeouts_so_far = 0 sleep_time", "OR CONDITIONS OF ANY KIND, either express or implied. #", "twisted.names import client, server, common, authority, dns import argparse import", "the License is distributed on an \"AS IS\" BASIS, #", "that is under a ' 'different domain then the rest", "all_records = {} def _push_record(name, r): print('pushing record: |%s|' %", "w = int(w) port = int(port) target_full_name = '%s.%s' %", "this server are short-lived. max_timeouts = 60 * 10 while", "'A': _push_record(record_full_name, dns.Record_A(r_data, ttl=r_ttl)) if r_type == 'AAAA': _push_record(record_full_name, dns.Record_AAAA(r_data,", "print('starting twisted.internet.reactor') twisted.internet.reactor.suggestThreadPoolSize(1) twisted.internet.reactor.run() def _quit_on_signal(signum, _frame): print('Received SIGNAL %d.", "common_zone_name = test_records_config['resolver_tests_common_zone_name'] for group in test_records_config['resolver_component_tests']: for name in", "__init__(self, soa, records): # skip FileAuthority common.ResolverBase.__init__(self) self.soa = soa", "syntax _SERVER_HEALTH_CHECK_RECORD_DATA = '172.16.58.3' class NoFileAuthority(authority.FileAuthority): def __init__(self, soa, records):", "server to listen on for TCP and UDP.') argp.add_argument( '-r',", "_push_record(_SERVER_HEALTH_CHECK_RECORD_NAME, dns.Record_A(_SERVER_HEALTH_CHECK_RECORD_DATA, ttl=0)) soa_record = dns.Record_SOA(mname=common_zone_name) test_domain_com = NoFileAuthority( soa=(common_zone_name,", "+= next_read _push_record(name, dns.Record_TXT(*txt_data_list, ttl=r_ttl)) with open(args.records_config_path) as config: test_records_config", "def flush_stdout_loop(): num_timeouts_so_far = 0 sleep_time = 1 # Prevent", "for resolver tests') argp.add_argument('-p', '--port', default=None, type=int, help='Port for DNS", "argparse import sys import yaml import signal import os import", "law or agreed to in writing, software # distributed under", "= record_full_name[:-1] if r_type == 'A': _push_record(record_full_name, dns.Record_A(r_data, ttl=r_ttl)) if", "end '.' for twisted syntax _SERVER_HEALTH_CHECK_RECORD_DATA = '172.16.58.3' class NoFileAuthority(authority.FileAuthority):", "= argp.parse_args() signal.signal(signal.SIGTERM, _quit_on_signal) signal.signal(signal.SIGINT, _quit_on_signal) output_flush_thread = threading.Thread(target=flush_stdout_loop) output_flush_thread.setDaemon(True)", "cancelled. Exitting 0.') os.kill(os.getpid(), signal.SIGTERM) def main(): argp = argparse.ArgumentParser(", "= argparse.ArgumentParser( description='Local DNS Server for resolver tests') argp.add_argument('-p', '--port',", "# skip FileAuthority common.ResolverBase.__init__(self) self.soa = soa self.records = records", "path needed when the test is invoked as part '", "part ' 'of run_tests.py.')) argp.add_argument( '--add_a_record', default=None, type=str, help=('Add an", "type=int, help='Port for DNS server to listen on for TCP", "dns import argparse import platform _SERVER_HEALTH_CHECK_RECORD_NAME = 'health-check-local-dns-server-is-alive.resolver-tests.grpctestingexp' # missing", "= 60 * 10 while num_timeouts_so_far < max_timeouts: sys.stdout.flush() time.sleep(sleep_time)", "#!/usr/bin/env python2.7 # Copyright 2015 gRPC authors. # # Licensed", "class NoFileAuthority(authority.FileAuthority): def __init__(self, soa, records): # skip FileAuthority common.ResolverBase.__init__(self)", "on for TCP and UDP.') argp.add_argument( '-r', '--records_config_path', default=None, type=str,", "while num_timeouts_so_far < max_timeouts: sys.stdout.flush() time.sleep(sleep_time) num_timeouts_so_far += 1 print('Process", "twisted.internet.protocol import twisted.names import twisted.names.client import twisted.names.dns import twisted.names.server from", "self.records = records def start_local_dns_server(args): all_records = {} def _push_record(name,", "rest the records configured in ' '--records_config_path (which all need", "_push_record( record_full_name, dns.Record_SRV(p, w, port, target_full_name, ttl=r_ttl)) if r_type ==", "may obtain a copy of the License at # #", "import twisted import twisted.internet import twisted.internet.reactor import twisted.internet.threads import twisted.internet.defer", "' 'Defaults to path needed when the test is invoked", "port = int(port) target_full_name = '%s.%s' % (target, common_zone_name) r_data", "import twisted.internet.threads import twisted.internet.defer import twisted.internet.protocol import twisted.names import twisted.names.client", "= '172.16.58.3' class NoFileAuthority(authority.FileAuthority): def __init__(self, soa, records): # skip", "w, port, target = r_data.split(' ') p = int(p) w", "SIGNAL %d. Quitting with exit code 0' % signum) twisted.internet.reactor.stop()", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "NoFileAuthority(authority.FileAuthority): def __init__(self, soa, records): # skip FileAuthority common.ResolverBase.__init__(self) self.soa", "r_ttl): start = 0 txt_data_list = [] while len(txt_data[start:]) >", "_push_record(name, dns.Record_TXT(*txt_data_list, ttl=r_ttl)) with open(args.records_config_path) as config: test_records_config = yaml.load(config)", "may not use this file except in compliance with the", "ttl=r_ttl)) if r_type == 'AAAA': _push_record(record_full_name, dns.Record_AAAA(r_data, ttl=r_ttl)) if r_type", "import platform _SERVER_HEALTH_CHECK_RECORD_NAME = 'health-check-local-dns-server-is-alive.resolver-tests.grpctestingexp' # missing end '.' for", "for twisted syntax _SERVER_HEALTH_CHECK_RECORD_DATA = '172.16.58.3' class NoFileAuthority(authority.FileAuthority): def __init__(self,", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "python2.7 # Copyright 2015 gRPC authors. # # Licensed under", "'.' record_full_name = record_full_name[:-1] if r_type == 'A': _push_record(record_full_name, dns.Record_A(r_data,", "os import threading import time import twisted import twisted.internet import", "= int(w) port = int(port) target_full_name = '%s.%s' % (target,", "import client, server, common, authority, dns import argparse import platform", "this file except in compliance with the License. # You", "= 'health-check-local-dns-server-is-alive.resolver-tests.grpctestingexp' # missing end '.' for twisted syntax _SERVER_HEALTH_CHECK_RECORD_DATA", "dns.Record_SOA(mname=common_zone_name) test_domain_com = NoFileAuthority( soa=(common_zone_name, soa_record), records=all_records, ) server =", "= len(txt_data[start:]) if next_read > 255: next_read = 255 txt_data_list.append(txt_data[start:start", "the test is invoked as part ' 'of run_tests.py.')) argp.add_argument(", "import twisted.internet.protocol import twisted.names import twisted.names.client import twisted.names.dns import twisted.names.server", "_push_record(name, r): print('pushing record: |%s|' % name) if all_records.get(name) is", "twisted.names.server.DNSServerFactory( authorities=[test_domain_com], verbose=2) server.noisy = 2 twisted.internet.reactor.listenTCP(args.port, server) dns_proto =", "use in tests\"\"\" import argparse import sys import yaml import", "if next_read > 255: next_read = 255 txt_data_list.append(txt_data[start:start + next_read])", "= [] while len(txt_data[start:]) > 0: next_read = len(txt_data[start:]) if", "gRPC authors. # # Licensed under the Apache License, Version", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "_maybe_split_up_txt_data(name, txt_data, r_ttl): start = 0 txt_data_list = [] while", "# # Licensed under the Apache License, Version 2.0 (the", "we ' 'need to serve a one-off A record that", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "common.ResolverBase.__init__(self) self.soa = soa self.records = records def start_local_dns_server(args): all_records", "% name) if all_records.get(name) is not None: all_records[name].append(r) return all_records[name]", "group['records'][name]: r_type = record['type'] r_data = record['data'] r_ttl = int(record['TTL'])", "dns server on 127.0.0.1:%s' % args.port) print('starting twisted.internet.reactor') twisted.internet.reactor.suggestThreadPoolSize(1) twisted.internet.reactor.run()", "= test_records_config['resolver_tests_common_zone_name'] for group in test_records_config['resolver_component_tests']: for name in group['records'].keys():", "# Copyright 2015 gRPC authors. # # Licensed under the", "# limitations under the License. \"\"\"Starts a local DNS server", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "extra_host_ipv4 = args.add_a_record.split(':') _push_record(extra_host, dns.Record_A(extra_host_ipv4, ttl=0)) # Server health check", "record_full_name[-1] == '.' record_full_name = record_full_name[:-1] if r_type == 'A':", "dns.Record_A(r_data, ttl=r_ttl)) if r_type == 'AAAA': _push_record(record_full_name, dns.Record_AAAA(r_data, ttl=r_ttl)) if", "governing permissions and # limitations under the License. \"\"\"Starts a", "dns.Record_A(_SERVER_HEALTH_CHECK_RECORD_DATA, ttl=0)) soa_record = dns.Record_SOA(mname=common_zone_name) test_domain_com = NoFileAuthority( soa=(common_zone_name, soa_record),", "code 0' % signum) twisted.internet.reactor.stop() sys.stdout.flush() sys.exit(0) def flush_stdout_loop(): num_timeouts_so_far", "sleep_time = 1 # Prevent zombies. Tests that use this", "soa=(common_zone_name, soa_record), records=all_records, ) server = twisted.names.server.DNSServerFactory( authorities=[test_domain_com], verbose=2) server.noisy", "txt_data_list.append(txt_data[start:start + next_read]) start += next_read _push_record(name, dns.Record_TXT(*txt_data_list, ttl=r_ttl)) with", "Copyright 2015 gRPC authors. # # Licensed under the Apache", "+ next_read]) start += next_read _push_record(name, dns.Record_TXT(*txt_data_list, ttl=r_ttl)) with open(args.records_config_path)", "def _quit_on_signal(signum, _frame): print('Received SIGNAL %d. Quitting with exit code", "domain). Format: <name>:<ipv4 address>')) args = argp.parse_args() signal.signal(signal.SIGTERM, _quit_on_signal) signal.signal(signal.SIGINT,", "r): print('pushing record: |%s|' % name) if all_records.get(name) is not", "_push_record(record_full_name, dns.Record_A(r_data, ttl=r_ttl)) if r_type == 'AAAA': _push_record(record_full_name, dns.Record_AAAA(r_data, ttl=r_ttl))", "> 255: next_read = 255 txt_data_list.append(txt_data[start:start + next_read]) start +=", "255 txt_data_list.append(txt_data[start:start + next_read]) start += next_read _push_record(name, dns.Record_TXT(*txt_data_list, ttl=r_ttl))", "ttl=r_ttl)) if r_type == 'SRV': p, w, port, target =", "License. \"\"\"Starts a local DNS server for use in tests\"\"\"", "import signal import os import threading import time import twisted", "not None: all_records[name].append(r) return all_records[name] = [r] def _maybe_split_up_txt_data(name, txt_data,", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "num_timeouts_so_far = 0 sleep_time = 1 # Prevent zombies. Tests", "% (name, common_zone_name) assert record_full_name[-1] == '.' record_full_name = record_full_name[:-1]", "authorities=[test_domain_com], verbose=2) server.noisy = 2 twisted.internet.reactor.listenTCP(args.port, server) dns_proto = twisted.names.dns.DNSDatagramProtocol(server)", "signal import os import threading import time import twisted import", "record in group['records'][name]: r_type = record['type'] r_data = record['data'] r_ttl", "import twisted.internet.reactor import twisted.internet.threads import twisted.internet.defer import twisted.internet.protocol import twisted.names", "ttl=0)) # Server health check record _push_record(_SERVER_HEALTH_CHECK_RECORD_NAME, dns.Record_A(_SERVER_HEALTH_CHECK_RECORD_DATA, ttl=0)) soa_record", "type=str, help=('Add an A record via the command line. Useful", "or implied. # See the License for the specific language", "record _push_record(_SERVER_HEALTH_CHECK_RECORD_NAME, dns.Record_A(_SERVER_HEALTH_CHECK_RECORD_DATA, ttl=0)) soa_record = dns.Record_SOA(mname=common_zone_name) test_domain_com = NoFileAuthority(", "%d. Quitting with exit code 0' % signum) twisted.internet.reactor.stop() sys.stdout.flush()", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "> 0: next_read = len(txt_data[start:]) if next_read > 255: next_read", "' 'need to serve a one-off A record that is", "a local DNS server for use in tests\"\"\" import argparse", "= twisted.names.dns.DNSDatagramProtocol(server) dns_proto.noisy = 2 twisted.internet.reactor.listenUDP(args.port, dns_proto) print('starting local dns", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "# Prevent zombies. Tests that use this server are short-lived.", "twisted.internet.threads import twisted.internet.defer import twisted.internet.protocol import twisted.names import twisted.names.client import", "twisted.internet.reactor') twisted.internet.reactor.suggestThreadPoolSize(1) twisted.internet.reactor.run() def _quit_on_signal(signum, _frame): print('Received SIGNAL %d. Quitting", "return all_records[name] = [r] def _maybe_split_up_txt_data(name, txt_data, r_ttl): start =", "help=('Directory of resolver_test_record_groups.yaml file. ' 'Defaults to path needed when", "r_type == 'A': _push_record(record_full_name, dns.Record_A(r_data, ttl=r_ttl)) if r_type == 'AAAA':", "zombies. Tests that use this server are short-lived. max_timeouts =", "to serve a one-off A record that is under a", "type=str, help=('Directory of resolver_test_record_groups.yaml file. ' 'Defaults to path needed", "(the \"License\"); # you may not use this file except", "# you may not use this file except in compliance", "= '%s.%s' % (target, common_zone_name) r_data = '%s %s %s", "= dns.Record_SOA(mname=common_zone_name) test_domain_com = NoFileAuthority( soa=(common_zone_name, soa_record), records=all_records, ) server", "== 'A': _push_record(record_full_name, dns.Record_A(r_data, ttl=r_ttl)) if r_type == 'AAAA': _push_record(record_full_name,", "Tests that use this server are short-lived. max_timeouts = 60", "on 127.0.0.1:%s' % args.port) print('starting twisted.internet.reactor') twisted.internet.reactor.suggestThreadPoolSize(1) twisted.internet.reactor.run() def _quit_on_signal(signum,", "signal.signal(signal.SIGINT, _quit_on_signal) output_flush_thread = threading.Thread(target=flush_stdout_loop) output_flush_thread.setDaemon(True) output_flush_thread.start() start_local_dns_server(args) if __name__", "import argparse import platform _SERVER_HEALTH_CHECK_RECORD_NAME = 'health-check-local-dns-server-is-alive.resolver-tests.grpctestingexp' # missing end", "[] while len(txt_data[start:]) > 0: next_read = len(txt_data[start:]) if next_read", "to be under the ' 'same domain). Format: <name>:<ipv4 address>'))", "flush_stdout_loop(): num_timeouts_so_far = 0 sleep_time = 1 # Prevent zombies.", "if args.add_a_record: extra_host, extra_host_ipv4 = args.add_a_record.split(':') _push_record(extra_host, dns.Record_A(extra_host_ipv4, ttl=0)) #", "= twisted.names.server.DNSServerFactory( authorities=[test_domain_com], verbose=2) server.noisy = 2 twisted.internet.reactor.listenTCP(args.port, server) dns_proto", "r_type = record['type'] r_data = record['data'] r_ttl = int(record['TTL']) record_full_name", "= '%s %s %s %s' % (p, w, port, target_full_name)", "for when we ' 'need to serve a one-off A", "twisted.internet.reactor.stop() sys.stdout.flush() sys.exit(0) def flush_stdout_loop(): num_timeouts_so_far = 0 sleep_time =", "'same domain). Format: <name>:<ipv4 address>')) args = argp.parse_args() signal.signal(signal.SIGTERM, _quit_on_signal)", "missing end '.' for twisted syntax _SERVER_HEALTH_CHECK_RECORD_DATA = '172.16.58.3' class", "the License. \"\"\"Starts a local DNS server for use in", "server on 127.0.0.1:%s' % args.port) print('starting twisted.internet.reactor') twisted.internet.reactor.suggestThreadPoolSize(1) twisted.internet.reactor.run() def", "# # Unless required by applicable law or agreed to", "then the rest the records configured in ' '--records_config_path (which", "_SERVER_HEALTH_CHECK_RECORD_DATA = '172.16.58.3' class NoFileAuthority(authority.FileAuthority): def __init__(self, soa, records): #", "as config: test_records_config = yaml.load(config) common_zone_name = test_records_config['resolver_tests_common_zone_name'] for group", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "record['type'] r_data = record['data'] r_ttl = int(record['TTL']) record_full_name = '%s.%s'", "print('pushing record: |%s|' % name) if all_records.get(name) is not None:", "argp.parse_args() signal.signal(signal.SIGTERM, _quit_on_signal) signal.signal(signal.SIGINT, _quit_on_signal) output_flush_thread = threading.Thread(target=flush_stdout_loop) output_flush_thread.setDaemon(True) output_flush_thread.start()", "UDP.') argp.add_argument( '-r', '--records_config_path', default=None, type=str, help=('Directory of resolver_test_record_groups.yaml file.", "Version 2.0 (the \"License\"); # you may not use this", "argp.add_argument('-p', '--port', default=None, type=int, help='Port for DNS server to listen", "record['data'] r_ttl = int(record['TTL']) record_full_name = '%s.%s' % (name, common_zone_name)", "2 twisted.internet.reactor.listenUDP(args.port, dns_proto) print('starting local dns server on 127.0.0.1:%s' %", "test_domain_com = NoFileAuthority( soa=(common_zone_name, soa_record), records=all_records, ) server = twisted.names.server.DNSServerFactory(", "_quit_on_signal) output_flush_thread = threading.Thread(target=flush_stdout_loop) output_flush_thread.setDaemon(True) output_flush_thread.start() start_local_dns_server(args) if __name__ ==", "start += next_read _push_record(name, dns.Record_TXT(*txt_data_list, ttl=r_ttl)) with open(args.records_config_path) as config:", "r_ttl = int(record['TTL']) record_full_name = '%s.%s' % (name, common_zone_name) assert", "(target, common_zone_name) r_data = '%s %s %s %s' % (p,", "help='Port for DNS server to listen on for TCP and", "= 2 twisted.internet.reactor.listenUDP(args.port, dns_proto) print('starting local dns server on 127.0.0.1:%s'", "implied. # See the License for the specific language governing", "import twisted.names.server from twisted.names import client, server, common, authority, dns", "twisted.internet.reactor.run() def _quit_on_signal(signum, _frame): print('Received SIGNAL %d. Quitting with exit", "under the Apache License, Version 2.0 (the \"License\"); # you", "argparse import platform _SERVER_HEALTH_CHECK_RECORD_NAME = 'health-check-local-dns-server-is-alive.resolver-tests.grpctestingexp' # missing end '.'", "'TXT': _maybe_split_up_txt_data(record_full_name, r_data, r_ttl) # Add an optional IPv4 record", "use this server are short-lived. max_timeouts = 60 * 10", "int(p) w = int(w) port = int(port) target_full_name = '%s.%s'", "while len(txt_data[start:]) > 0: next_read = len(txt_data[start:]) if next_read >", "%s' % (p, w, port, target_full_name) _push_record( record_full_name, dns.Record_SRV(p, w,", "by applicable law or agreed to in writing, software #", "and UDP.') argp.add_argument( '-r', '--records_config_path', default=None, type=str, help=('Directory of resolver_test_record_groups.yaml", "if r_type == 'A': _push_record(record_full_name, dns.Record_A(r_data, ttl=r_ttl)) if r_type ==", "%s %s %s' % (p, w, port, target_full_name) _push_record( record_full_name,", "== 'TXT': _maybe_split_up_txt_data(record_full_name, r_data, r_ttl) # Add an optional IPv4", "check record _push_record(_SERVER_HEALTH_CHECK_RECORD_NAME, dns.Record_A(_SERVER_HEALTH_CHECK_RECORD_DATA, ttl=0)) soa_record = dns.Record_SOA(mname=common_zone_name) test_domain_com =", "for use in tests\"\"\" import argparse import sys import yaml", "max_timeouts: sys.stdout.flush() time.sleep(sleep_time) num_timeouts_so_far += 1 print('Process timeout reached, or", "target = r_data.split(' ') p = int(p) w = int(w)", "1 print('Process timeout reached, or cancelled. Exitting 0.') os.kill(os.getpid(), signal.SIGTERM)", "under the ' 'same domain). Format: <name>:<ipv4 address>')) args =", "2 twisted.internet.reactor.listenTCP(args.port, server) dns_proto = twisted.names.dns.DNSDatagramProtocol(server) dns_proto.noisy = 2 twisted.internet.reactor.listenUDP(args.port,", "the rest the records configured in ' '--records_config_path (which all", "record_full_name[:-1] if r_type == 'A': _push_record(record_full_name, dns.Record_A(r_data, ttl=r_ttl)) if r_type", "for group in test_records_config['resolver_component_tests']: for name in group['records'].keys(): for record", "= record['type'] r_data = record['data'] r_ttl = int(record['TTL']) record_full_name =", "'.' for twisted syntax _SERVER_HEALTH_CHECK_RECORD_DATA = '172.16.58.3' class NoFileAuthority(authority.FileAuthority): def", "in group['records'][name]: r_type = record['type'] r_data = record['data'] r_ttl =", "server) dns_proto = twisted.names.dns.DNSDatagramProtocol(server) dns_proto.noisy = 2 twisted.internet.reactor.listenUDP(args.port, dns_proto) print('starting", "twisted.names import twisted.names.client import twisted.names.dns import twisted.names.server from twisted.names import", "import twisted.names.client import twisted.names.dns import twisted.names.server from twisted.names import client,", "optional IPv4 record is specified if args.add_a_record: extra_host, extra_host_ipv4 =", "num_timeouts_so_far += 1 print('Process timeout reached, or cancelled. Exitting 0.')", "= 2 twisted.internet.reactor.listenTCP(args.port, server) dns_proto = twisted.names.dns.DNSDatagramProtocol(server) dns_proto.noisy = 2", "the ' 'same domain). Format: <name>:<ipv4 address>')) args = argp.parse_args()", "127.0.0.1:%s' % args.port) print('starting twisted.internet.reactor') twisted.internet.reactor.suggestThreadPoolSize(1) twisted.internet.reactor.run() def _quit_on_signal(signum, _frame):", "twisted.internet.reactor.suggestThreadPoolSize(1) twisted.internet.reactor.run() def _quit_on_signal(signum, _frame): print('Received SIGNAL %d. Quitting with", "' 'same domain). Format: <name>:<ipv4 address>')) args = argp.parse_args() signal.signal(signal.SIGTERM,", "r_type == 'AAAA': _push_record(record_full_name, dns.Record_AAAA(r_data, ttl=r_ttl)) if r_type == 'SRV':", "int(port) target_full_name = '%s.%s' % (target, common_zone_name) r_data = '%s", "exit code 0' % signum) twisted.internet.reactor.stop() sys.stdout.flush() sys.exit(0) def flush_stdout_loop():", "next_read _push_record(name, dns.Record_TXT(*txt_data_list, ttl=r_ttl)) with open(args.records_config_path) as config: test_records_config =", "port, target_full_name) _push_record( record_full_name, dns.Record_SRV(p, w, port, target_full_name, ttl=r_ttl)) if", "config: test_records_config = yaml.load(config) common_zone_name = test_records_config['resolver_tests_common_zone_name'] for group in", "if r_type == 'AAAA': _push_record(record_full_name, dns.Record_AAAA(r_data, ttl=r_ttl)) if r_type ==", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "from twisted.names import client, server, common, authority, dns import argparse", "% (p, w, port, target_full_name) _push_record( record_full_name, dns.Record_SRV(p, w, port,", "Unless required by applicable law or agreed to in writing,", "verbose=2) server.noisy = 2 twisted.internet.reactor.listenTCP(args.port, server) dns_proto = twisted.names.dns.DNSDatagramProtocol(server) dns_proto.noisy", "def __init__(self, soa, records): # skip FileAuthority common.ResolverBase.__init__(self) self.soa =", "default=None, type=str, help=('Add an A record via the command line.", "_quit_on_signal) signal.signal(signal.SIGINT, _quit_on_signal) output_flush_thread = threading.Thread(target=flush_stdout_loop) output_flush_thread.setDaemon(True) output_flush_thread.start() start_local_dns_server(args) if", "domain then the rest the records configured in ' '--records_config_path", "the specific language governing permissions and # limitations under the", "60 * 10 while num_timeouts_so_far < max_timeouts: sys.stdout.flush() time.sleep(sleep_time) num_timeouts_so_far", "yaml import signal import os import threading import time import", "extra_host, extra_host_ipv4 = args.add_a_record.split(':') _push_record(extra_host, dns.Record_A(extra_host_ipv4, ttl=0)) # Server health", "under a ' 'different domain then the rest the records", "name) if all_records.get(name) is not None: all_records[name].append(r) return all_records[name] =", "tests\"\"\" import argparse import sys import yaml import signal import", "server = twisted.names.server.DNSServerFactory( authorities=[test_domain_com], verbose=2) server.noisy = 2 twisted.internet.reactor.listenTCP(args.port, server)", "applicable law or agreed to in writing, software # distributed", "port, target = r_data.split(' ') p = int(p) w =", "Exitting 0.') os.kill(os.getpid(), signal.SIGTERM) def main(): argp = argparse.ArgumentParser( description='Local", "twisted syntax _SERVER_HEALTH_CHECK_RECORD_DATA = '172.16.58.3' class NoFileAuthority(authority.FileAuthority): def __init__(self, soa,", "print('starting local dns server on 127.0.0.1:%s' % args.port) print('starting twisted.internet.reactor')", "resolver tests') argp.add_argument('-p', '--port', default=None, type=int, help='Port for DNS server", "record that is under a ' 'different domain then the", "= 0 txt_data_list = [] while len(txt_data[start:]) > 0: next_read", "records configured in ' '--records_config_path (which all need to be", "twisted.internet.reactor.listenUDP(args.port, dns_proto) print('starting local dns server on 127.0.0.1:%s' % args.port)", "run_tests.py.')) argp.add_argument( '--add_a_record', default=None, type=str, help=('Add an A record via", "in writing, software # distributed under the License is distributed", "configured in ' '--records_config_path (which all need to be under", "txt_data, r_ttl): start = 0 txt_data_list = [] while len(txt_data[start:])", "an optional IPv4 record is specified if args.add_a_record: extra_host, extra_host_ipv4", "'%s.%s' % (name, common_zone_name) assert record_full_name[-1] == '.' record_full_name =", "* 10 while num_timeouts_so_far < max_timeouts: sys.stdout.flush() time.sleep(sleep_time) num_timeouts_so_far +=", "with open(args.records_config_path) as config: test_records_config = yaml.load(config) common_zone_name = test_records_config['resolver_tests_common_zone_name']", "'--add_a_record', default=None, type=str, help=('Add an A record via the command", "client, server, common, authority, dns import argparse import platform _SERVER_HEALTH_CHECK_RECORD_NAME", "test_records_config['resolver_tests_common_zone_name'] for group in test_records_config['resolver_component_tests']: for name in group['records'].keys(): for", "yaml.load(config) common_zone_name = test_records_config['resolver_tests_common_zone_name'] for group in test_records_config['resolver_component_tests']: for name", "'%s.%s' % (target, common_zone_name) r_data = '%s %s %s %s'", "the records configured in ' '--records_config_path (which all need to", "def main(): argp = argparse.ArgumentParser( description='Local DNS Server for resolver", "Prevent zombies. Tests that use this server are short-lived. max_timeouts", "'need to serve a one-off A record that is under", "= 255 txt_data_list.append(txt_data[start:start + next_read]) start += next_read _push_record(name, dns.Record_TXT(*txt_data_list,", "main(): argp = argparse.ArgumentParser( description='Local DNS Server for resolver tests')", "= {} def _push_record(name, r): print('pushing record: |%s|' % name)", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "License, Version 2.0 (the \"License\"); # you may not use", "_maybe_split_up_txt_data(record_full_name, r_data, r_ttl) # Add an optional IPv4 record is", "# You may obtain a copy of the License at", "r_data = '%s %s %s %s' % (p, w, port,", "target_full_name, ttl=r_ttl)) if r_type == 'TXT': _maybe_split_up_txt_data(record_full_name, r_data, r_ttl) #", "a one-off A record that is under a ' 'different", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "FileAuthority common.ResolverBase.__init__(self) self.soa = soa self.records = records def start_local_dns_server(args):", "record is specified if args.add_a_record: extra_host, extra_host_ipv4 = args.add_a_record.split(':') _push_record(extra_host,", "p = int(p) w = int(w) port = int(port) target_full_name", "= [r] def _maybe_split_up_txt_data(name, txt_data, r_ttl): start = 0 txt_data_list", "r_data = record['data'] r_ttl = int(record['TTL']) record_full_name = '%s.%s' %", "when the test is invoked as part ' 'of run_tests.py.'))", "= yaml.load(config) common_zone_name = test_records_config['resolver_tests_common_zone_name'] for group in test_records_config['resolver_component_tests']: for", "argp.add_argument( '-r', '--records_config_path', default=None, type=str, help=('Directory of resolver_test_record_groups.yaml file. '", "= 0 sleep_time = 1 # Prevent zombies. Tests that", "' '--records_config_path (which all need to be under the '", "server for use in tests\"\"\" import argparse import sys import", "the License for the specific language governing permissions and #", "max_timeouts = 60 * 10 while num_timeouts_so_far < max_timeouts: sys.stdout.flush()", "Apache License, Version 2.0 (the \"License\"); # you may not", "record via the command line. Useful for when we '", "either express or implied. # See the License for the", "in test_records_config['resolver_component_tests']: for name in group['records'].keys(): for record in group['records'][name]:", "argp.add_argument( '--add_a_record', default=None, type=str, help=('Add an A record via the", "DNS server for use in tests\"\"\" import argparse import sys", "= records def start_local_dns_server(args): all_records = {} def _push_record(name, r):", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "test_records_config = yaml.load(config) common_zone_name = test_records_config['resolver_tests_common_zone_name'] for group in test_records_config['resolver_component_tests']:", "print('Process timeout reached, or cancelled. Exitting 0.') os.kill(os.getpid(), signal.SIGTERM) def", "2015 gRPC authors. # # Licensed under the Apache License,", "IPv4 record is specified if args.add_a_record: extra_host, extra_host_ipv4 = args.add_a_record.split(':')", "is under a ' 'different domain then the rest the", "def start_local_dns_server(args): all_records = {} def _push_record(name, r): print('pushing record:", "_frame): print('Received SIGNAL %d. Quitting with exit code 0' %", "0 txt_data_list = [] while len(txt_data[start:]) > 0: next_read =", "_quit_on_signal(signum, _frame): print('Received SIGNAL %d. Quitting with exit code 0'", "test_records_config['resolver_component_tests']: for name in group['records'].keys(): for record in group['records'][name]: r_type", "os.kill(os.getpid(), signal.SIGTERM) def main(): argp = argparse.ArgumentParser( description='Local DNS Server", "twisted.internet.defer import twisted.internet.protocol import twisted.names import twisted.names.client import twisted.names.dns import", "# missing end '.' for twisted syntax _SERVER_HEALTH_CHECK_RECORD_DATA = '172.16.58.3'", "'health-check-local-dns-server-is-alive.resolver-tests.grpctestingexp' # missing end '.' for twisted syntax _SERVER_HEALTH_CHECK_RECORD_DATA =", "int(record['TTL']) record_full_name = '%s.%s' % (name, common_zone_name) assert record_full_name[-1] ==", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "reached, or cancelled. Exitting 0.') os.kill(os.getpid(), signal.SIGTERM) def main(): argp", "time import twisted import twisted.internet import twisted.internet.reactor import twisted.internet.threads import", "Server health check record _push_record(_SERVER_HEALTH_CHECK_RECORD_NAME, dns.Record_A(_SERVER_HEALTH_CHECK_RECORD_DATA, ttl=0)) soa_record = dns.Record_SOA(mname=common_zone_name)", "start_local_dns_server(args): all_records = {} def _push_record(name, r): print('pushing record: |%s|'", "common_zone_name) r_data = '%s %s %s %s' % (p, w,", "open(args.records_config_path) as config: test_records_config = yaml.load(config) common_zone_name = test_records_config['resolver_tests_common_zone_name'] for", "args.port) print('starting twisted.internet.reactor') twisted.internet.reactor.suggestThreadPoolSize(1) twisted.internet.reactor.run() def _quit_on_signal(signum, _frame): print('Received SIGNAL", "'--port', default=None, type=int, help='Port for DNS server to listen on", "0: next_read = len(txt_data[start:]) if next_read > 255: next_read =", "dns_proto) print('starting local dns server on 127.0.0.1:%s' % args.port) print('starting", "next_read > 255: next_read = 255 txt_data_list.append(txt_data[start:start + next_read]) start", "Format: <name>:<ipv4 address>')) args = argp.parse_args() signal.signal(signal.SIGTERM, _quit_on_signal) signal.signal(signal.SIGINT, _quit_on_signal)", "dns_proto.noisy = 2 twisted.internet.reactor.listenUDP(args.port, dns_proto) print('starting local dns server on", "\"License\"); # you may not use this file except in", "help=('Add an A record via the command line. Useful for", "'172.16.58.3' class NoFileAuthority(authority.FileAuthority): def __init__(self, soa, records): # skip FileAuthority", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "_push_record(extra_host, dns.Record_A(extra_host_ipv4, ttl=0)) # Server health check record _push_record(_SERVER_HEALTH_CHECK_RECORD_NAME, dns.Record_A(_SERVER_HEALTH_CHECK_RECORD_DATA,", "tests') argp.add_argument('-p', '--port', default=None, type=int, help='Port for DNS server to", "= args.add_a_record.split(':') _push_record(extra_host, dns.Record_A(extra_host_ipv4, ttl=0)) # Server health check record", "= r_data.split(' ') p = int(p) w = int(w) port", "# distributed under the License is distributed on an \"AS", "name in group['records'].keys(): for record in group['records'][name]: r_type = record['type']", "with exit code 0' % signum) twisted.internet.reactor.stop() sys.stdout.flush() sys.exit(0) def", "# Unless required by applicable law or agreed to in", "soa, records): # skip FileAuthority common.ResolverBase.__init__(self) self.soa = soa self.records", "record: |%s|' % name) if all_records.get(name) is not None: all_records[name].append(r)", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "(which all need to be under the ' 'same domain).", "is invoked as part ' 'of run_tests.py.')) argp.add_argument( '--add_a_record', default=None,", "record_full_name = '%s.%s' % (name, common_zone_name) assert record_full_name[-1] == '.'", ") server = twisted.names.server.DNSServerFactory( authorities=[test_domain_com], verbose=2) server.noisy = 2 twisted.internet.reactor.listenTCP(args.port,", "You may obtain a copy of the License at #", "A record that is under a ' 'different domain then", "signal.signal(signal.SIGTERM, _quit_on_signal) signal.signal(signal.SIGINT, _quit_on_signal) output_flush_thread = threading.Thread(target=flush_stdout_loop) output_flush_thread.setDaemon(True) output_flush_thread.start() start_local_dns_server(args)", "specified if args.add_a_record: extra_host, extra_host_ipv4 = args.add_a_record.split(':') _push_record(extra_host, dns.Record_A(extra_host_ipv4, ttl=0))", "records def start_local_dns_server(args): all_records = {} def _push_record(name, r): print('pushing", "p, w, port, target = r_data.split(' ') p = int(p)", "or cancelled. Exitting 0.') os.kill(os.getpid(), signal.SIGTERM) def main(): argp =", "import yaml import signal import os import threading import time", "description='Local DNS Server for resolver tests') argp.add_argument('-p', '--port', default=None, type=int,", "+= 1 print('Process timeout reached, or cancelled. Exitting 0.') os.kill(os.getpid(),", "_push_record(record_full_name, dns.Record_AAAA(r_data, ttl=r_ttl)) if r_type == 'SRV': p, w, port,", "default=None, type=str, help=('Directory of resolver_test_record_groups.yaml file. ' 'Defaults to path", "the Apache License, Version 2.0 (the \"License\"); # you may", "= int(p) w = int(w) port = int(port) target_full_name =", "print('Received SIGNAL %d. Quitting with exit code 0' % signum)", "|%s|' % name) if all_records.get(name) is not None: all_records[name].append(r) return", "dns.Record_A(extra_host_ipv4, ttl=0)) # Server health check record _push_record(_SERVER_HEALTH_CHECK_RECORD_NAME, dns.Record_A(_SERVER_HEALTH_CHECK_RECORD_DATA, ttl=0))" ]
[ "-*- \"\"\" Showcases *ICTCP* *colour encoding* computations. \"\"\" import numpy", "BT.2020\" colourspace to \"ICTCP\" colour ' 'encoding given \"RGB\" values:\\n'", "encoding* computations. \"\"\" import numpy as np import colour from", "0.04091952]) message_box(('Converting from \"ITU-R BT.2020\" colourspace to \"ICTCP\" colour '", "import colour from colour.utilities import message_box message_box('\"ICTCP\" Colour Encoding Computations')", "colour encoding to \"ITU-R BT.2020\" ' 'colourspace given \"ICTCP\" values:\\n'", "\"ITU-R BT.2020\" colourspace to \"ICTCP\" colour ' 'encoding given \"RGB\"", "0.09351596]) message_box(('Converting from \"ICTCP\" colour encoding to \"ITU-R BT.2020\" '", "Colour Encoding Computations') RGB = np.array([0.45620519, 0.03081071, 0.04091952]) message_box(('Converting from", "= np.array([0.07351364, 0.00475253, 0.09351596]) message_box(('Converting from \"ICTCP\" colour encoding to", "from \"ITU-R BT.2020\" colourspace to \"ICTCP\" colour ' 'encoding given", "RGB = np.array([0.45620519, 0.03081071, 0.04091952]) message_box(('Converting from \"ITU-R BT.2020\" colourspace", "print(colour.RGB_to_ICTCP(RGB)) print('\\n') ICTCP = np.array([0.07351364, 0.00475253, 0.09351596]) message_box(('Converting from \"ICTCP\"", "colour ' 'encoding given \"RGB\" values:\\n' '\\n\\t{0}'.format(RGB))) print(colour.RGB_to_ICTCP(RGB)) print('\\n') ICTCP", "Computations') RGB = np.array([0.45620519, 0.03081071, 0.04091952]) message_box(('Converting from \"ITU-R BT.2020\"", "colour.utilities import message_box message_box('\"ICTCP\" Colour Encoding Computations') RGB = np.array([0.45620519,", "0.00475253, 0.09351596]) message_box(('Converting from \"ICTCP\" colour encoding to \"ITU-R BT.2020\"", "to \"ITU-R BT.2020\" ' 'colourspace given \"ICTCP\" values:\\n' '\\n\\t{0}'.format(ICTCP))) print(colour.ICTCP_to_RGB(ICTCP))", "# -*- coding: utf-8 -*- \"\"\" Showcases *ICTCP* *colour encoding*", "values:\\n' '\\n\\t{0}'.format(RGB))) print(colour.RGB_to_ICTCP(RGB)) print('\\n') ICTCP = np.array([0.07351364, 0.00475253, 0.09351596]) message_box(('Converting", "computations. \"\"\" import numpy as np import colour from colour.utilities", "import message_box message_box('\"ICTCP\" Colour Encoding Computations') RGB = np.array([0.45620519, 0.03081071,", "message_box message_box('\"ICTCP\" Colour Encoding Computations') RGB = np.array([0.45620519, 0.03081071, 0.04091952])", "= np.array([0.45620519, 0.03081071, 0.04091952]) message_box(('Converting from \"ITU-R BT.2020\" colourspace to", "Showcases *ICTCP* *colour encoding* computations. \"\"\" import numpy as np", "'\\n\\t{0}'.format(RGB))) print(colour.RGB_to_ICTCP(RGB)) print('\\n') ICTCP = np.array([0.07351364, 0.00475253, 0.09351596]) message_box(('Converting from", "print('\\n') ICTCP = np.array([0.07351364, 0.00475253, 0.09351596]) message_box(('Converting from \"ICTCP\" colour", "colourspace to \"ICTCP\" colour ' 'encoding given \"RGB\" values:\\n' '\\n\\t{0}'.format(RGB)))", "\"ICTCP\" colour encoding to \"ITU-R BT.2020\" ' 'colourspace given \"ICTCP\"", "to \"ICTCP\" colour ' 'encoding given \"RGB\" values:\\n' '\\n\\t{0}'.format(RGB))) print(colour.RGB_to_ICTCP(RGB))", "'encoding given \"RGB\" values:\\n' '\\n\\t{0}'.format(RGB))) print(colour.RGB_to_ICTCP(RGB)) print('\\n') ICTCP = np.array([0.07351364,", "utf-8 -*- \"\"\" Showcases *ICTCP* *colour encoding* computations. \"\"\" import", "from colour.utilities import message_box message_box('\"ICTCP\" Colour Encoding Computations') RGB =", "message_box(('Converting from \"ICTCP\" colour encoding to \"ITU-R BT.2020\" ' 'colourspace", "from \"ICTCP\" colour encoding to \"ITU-R BT.2020\" ' 'colourspace given", "\"\"\" import numpy as np import colour from colour.utilities import", "np import colour from colour.utilities import message_box message_box('\"ICTCP\" Colour Encoding", "as np import colour from colour.utilities import message_box message_box('\"ICTCP\" Colour", "*colour encoding* computations. \"\"\" import numpy as np import colour", "\"ICTCP\" colour ' 'encoding given \"RGB\" values:\\n' '\\n\\t{0}'.format(RGB))) print(colour.RGB_to_ICTCP(RGB)) print('\\n')", "np.array([0.45620519, 0.03081071, 0.04091952]) message_box(('Converting from \"ITU-R BT.2020\" colourspace to \"ICTCP\"", "given \"RGB\" values:\\n' '\\n\\t{0}'.format(RGB))) print(colour.RGB_to_ICTCP(RGB)) print('\\n') ICTCP = np.array([0.07351364, 0.00475253,", "ICTCP = np.array([0.07351364, 0.00475253, 0.09351596]) message_box(('Converting from \"ICTCP\" colour encoding", "\"RGB\" values:\\n' '\\n\\t{0}'.format(RGB))) print(colour.RGB_to_ICTCP(RGB)) print('\\n') ICTCP = np.array([0.07351364, 0.00475253, 0.09351596])", "coding: utf-8 -*- \"\"\" Showcases *ICTCP* *colour encoding* computations. \"\"\"", "import numpy as np import colour from colour.utilities import message_box", "encoding to \"ITU-R BT.2020\" ' 'colourspace given \"ICTCP\" values:\\n' '\\n\\t{0}'.format(ICTCP)))", "0.03081071, 0.04091952]) message_box(('Converting from \"ITU-R BT.2020\" colourspace to \"ICTCP\" colour", "message_box(('Converting from \"ITU-R BT.2020\" colourspace to \"ICTCP\" colour ' 'encoding", "*ICTCP* *colour encoding* computations. \"\"\" import numpy as np import", "numpy as np import colour from colour.utilities import message_box message_box('\"ICTCP\"", "\"\"\" Showcases *ICTCP* *colour encoding* computations. \"\"\" import numpy as", "-*- coding: utf-8 -*- \"\"\" Showcases *ICTCP* *colour encoding* computations.", "colour from colour.utilities import message_box message_box('\"ICTCP\" Colour Encoding Computations') RGB", "message_box('\"ICTCP\" Colour Encoding Computations') RGB = np.array([0.45620519, 0.03081071, 0.04091952]) message_box(('Converting", "' 'encoding given \"RGB\" values:\\n' '\\n\\t{0}'.format(RGB))) print(colour.RGB_to_ICTCP(RGB)) print('\\n') ICTCP =", "np.array([0.07351364, 0.00475253, 0.09351596]) message_box(('Converting from \"ICTCP\" colour encoding to \"ITU-R", "Encoding Computations') RGB = np.array([0.45620519, 0.03081071, 0.04091952]) message_box(('Converting from \"ITU-R" ]
[ "= models.ForeignKey( 'Routine', related_name='routines', on_delete=models.CASCADE) def sets(self): return SetRoutine.objects.filter(routine=self) class", "'Routine', related_name='routines', on_delete=models.CASCADE) def sets(self): return SetRoutine.objects.filter(routine=self) class Routine(models.Model): name", "name = models.CharField(max_length=255) user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE) class SerieRoutine(BaseSerie): father_set", "ExpiringToken from .exercise import BaseSerie class RoutineDay(models.Model): name = models.CharField(max_length=255,", "models.ForeignKey( 'Routine', related_name='routines', on_delete=models.CASCADE) def sets(self): return SetRoutine.objects.filter(routine=self) class Routine(models.Model):", "models.ForeignKey( 'SetRoutine', on_delete=models.CASCADE, related_name='series') class SetRoutine(models.Model): exercise = models.ForeignKey( 'Exercise',", "# Maneger User class is the class that provides the", "methods out of the box from rest_framework import exceptions from", "BaseSerie class RoutineDay(models.Model): name = models.CharField(max_length=255, blank=True) routine = models.ForeignKey(", "sets(self): return SetRoutine.objects.filter(routine=self) class Routine(models.Model): name = models.CharField(max_length=255) user =", "the box from rest_framework import exceptions from rest_framework.authentication import TokenAuthentication", "from multiselectfield import MultiSelectField # Maneger User class is the", "django.core.validators import MaxValueValidator, MinValueValidator from django.db import models from django.utils.translation", "django.contrib.auth.models import (AbstractBaseUser, BaseUserManager, PermissionsMixin) from django.core.validators import MaxValueValidator, MinValueValidator", "rest_framework.authentication import TokenAuthentication from user.custom_token import ExpiringToken from .exercise import", "as _ from multiselectfield import MultiSelectField # Maneger User class", "from django.core.validators import MaxValueValidator, MinValueValidator from django.db import models from", "or admin and all methods out of the box from", "user.custom_token import ExpiringToken from .exercise import BaseSerie class RoutineDay(models.Model): name", "from rest_framework import exceptions from rest_framework.authentication import TokenAuthentication from user.custom_token", "rest_framework import exceptions from rest_framework.authentication import TokenAuthentication from user.custom_token import", "models from django.utils.translation import gettext_lazy as _ from multiselectfield import", "# for the settings file from django.contrib.auth.models import (AbstractBaseUser, BaseUserManager,", "# of user or admin and all methods out of", "related_name='series') class SetRoutine(models.Model): exercise = models.ForeignKey( 'Exercise', on_delete=models.CASCADE) routine =", "= models.ForeignKey( 'SetRoutine', on_delete=models.CASCADE, related_name='series') class SetRoutine(models.Model): exercise = models.ForeignKey(", "# this is how we can retrive variables # for", "creation # of user or admin and all methods out", "how we can retrive variables # for the settings file", "User class is the class that provides the creation #", "class SerieRoutine(BaseSerie): father_set = models.ForeignKey( 'SetRoutine', on_delete=models.CASCADE, related_name='series') class SetRoutine(models.Model):", "is the class that provides the creation # of user", "Routine(models.Model): name = models.CharField(max_length=255) user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE) class SerieRoutine(BaseSerie):", "RoutineDay(models.Model): name = models.CharField(max_length=255, blank=True) routine = models.ForeignKey( 'Routine', related_name='routines',", "father_set = models.ForeignKey( 'SetRoutine', on_delete=models.CASCADE, related_name='series') class SetRoutine(models.Model): exercise =", "is how we can retrive variables # for the settings", "class that provides the creation # of user or admin", "variables # for the settings file from django.contrib.auth.models import (AbstractBaseUser,", "import exceptions from rest_framework.authentication import TokenAuthentication from user.custom_token import ExpiringToken", "models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE) class SerieRoutine(BaseSerie): father_set = models.ForeignKey( 'SetRoutine', on_delete=models.CASCADE, related_name='series')", "import BaseSerie class RoutineDay(models.Model): name = models.CharField(max_length=255, blank=True) routine =", "from user.custom_token import ExpiringToken from .exercise import BaseSerie class RoutineDay(models.Model):", "this is how we can retrive variables # for the", "that provides the creation # of user or admin and", "on_delete=models.CASCADE) class SerieRoutine(BaseSerie): father_set = models.ForeignKey( 'SetRoutine', on_delete=models.CASCADE, related_name='series') class", "MaxValueValidator, MinValueValidator from django.db import models from django.utils.translation import gettext_lazy", "routine = models.ForeignKey( 'Routine', related_name='routines', on_delete=models.CASCADE) def sets(self): return SetRoutine.objects.filter(routine=self)", "of user or admin and all methods out of the", "uuid from django.conf import settings # this is how we", "PermissionsMixin) from django.core.validators import MaxValueValidator, MinValueValidator from django.db import models", "= models.CharField(max_length=255) user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE) class SerieRoutine(BaseSerie): father_set =", "class Routine(models.Model): name = models.CharField(max_length=255) user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE) class", "file from django.contrib.auth.models import (AbstractBaseUser, BaseUserManager, PermissionsMixin) from django.core.validators import", "django.db import models from django.utils.translation import gettext_lazy as _ from", "multiselectfield import MultiSelectField # Maneger User class is the class", "(AbstractBaseUser, BaseUserManager, PermissionsMixin) from django.core.validators import MaxValueValidator, MinValueValidator from django.db", "on_delete=models.CASCADE) def sets(self): return SetRoutine.objects.filter(routine=self) class Routine(models.Model): name = models.CharField(max_length=255)", "import uuid from django.conf import settings # this is how", "models.CharField(max_length=255, blank=True) routine = models.ForeignKey( 'Routine', related_name='routines', on_delete=models.CASCADE) def sets(self):", "name = models.CharField(max_length=255, blank=True) routine = models.ForeignKey( 'Routine', related_name='routines', on_delete=models.CASCADE)", "retrive variables # for the settings file from django.contrib.auth.models import", "out of the box from rest_framework import exceptions from rest_framework.authentication", "_ from multiselectfield import MultiSelectField # Maneger User class is", "os import uuid from django.conf import settings # this is", "import ExpiringToken from .exercise import BaseSerie class RoutineDay(models.Model): name =", "the creation # of user or admin and all methods", ".exercise import BaseSerie class RoutineDay(models.Model): name = models.CharField(max_length=255, blank=True) routine", "provides the creation # of user or admin and all", "class RoutineDay(models.Model): name = models.CharField(max_length=255, blank=True) routine = models.ForeignKey( 'Routine',", "box from rest_framework import exceptions from rest_framework.authentication import TokenAuthentication from", "from rest_framework.authentication import TokenAuthentication from user.custom_token import ExpiringToken from .exercise", "MultiSelectField # Maneger User class is the class that provides", "return SetRoutine.objects.filter(routine=self) class Routine(models.Model): name = models.CharField(max_length=255) user = models.ForeignKey(settings.AUTH_USER_MODEL,", "import os import uuid from django.conf import settings # this", "blank=True) routine = models.ForeignKey( 'Routine', related_name='routines', on_delete=models.CASCADE) def sets(self): return", "from django.utils.translation import gettext_lazy as _ from multiselectfield import MultiSelectField", "we can retrive variables # for the settings file from", "import MaxValueValidator, MinValueValidator from django.db import models from django.utils.translation import", "on_delete=models.CASCADE, related_name='series') class SetRoutine(models.Model): exercise = models.ForeignKey( 'Exercise', on_delete=models.CASCADE) routine", "import TokenAuthentication from user.custom_token import ExpiringToken from .exercise import BaseSerie", "models.CharField(max_length=255) user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE) class SerieRoutine(BaseSerie): father_set = models.ForeignKey(", "import models from django.utils.translation import gettext_lazy as _ from multiselectfield", "class is the class that provides the creation # of", "def sets(self): return SetRoutine.objects.filter(routine=self) class Routine(models.Model): name = models.CharField(max_length=255) user", "the settings file from django.contrib.auth.models import (AbstractBaseUser, BaseUserManager, PermissionsMixin) from", "settings # this is how we can retrive variables #", "TokenAuthentication from user.custom_token import ExpiringToken from .exercise import BaseSerie class", "import settings # this is how we can retrive variables", "user or admin and all methods out of the box", "= models.CharField(max_length=255, blank=True) routine = models.ForeignKey( 'Routine', related_name='routines', on_delete=models.CASCADE) def", "and all methods out of the box from rest_framework import", "exceptions from rest_framework.authentication import TokenAuthentication from user.custom_token import ExpiringToken from", "from django.conf import settings # this is how we can", "'SetRoutine', on_delete=models.CASCADE, related_name='series') class SetRoutine(models.Model): exercise = models.ForeignKey( 'Exercise', on_delete=models.CASCADE)", "from django.db import models from django.utils.translation import gettext_lazy as _", "class SetRoutine(models.Model): exercise = models.ForeignKey( 'Exercise', on_delete=models.CASCADE) routine = models.ForeignKey(", "the class that provides the creation # of user or", "from .exercise import BaseSerie class RoutineDay(models.Model): name = models.CharField(max_length=255, blank=True)", "django.utils.translation import gettext_lazy as _ from multiselectfield import MultiSelectField #", "Maneger User class is the class that provides the creation", "for the settings file from django.contrib.auth.models import (AbstractBaseUser, BaseUserManager, PermissionsMixin)", "django.conf import settings # this is how we can retrive", "gettext_lazy as _ from multiselectfield import MultiSelectField # Maneger User", "exercise = models.ForeignKey( 'Exercise', on_delete=models.CASCADE) routine = models.ForeignKey( 'RoutineDay', on_delete=models.CASCADE)", "SetRoutine.objects.filter(routine=self) class Routine(models.Model): name = models.CharField(max_length=255) user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)", "<reponame>MauricioAntonioMartinez/django-workout-tracker-api import os import uuid from django.conf import settings #", "admin and all methods out of the box from rest_framework", "settings file from django.contrib.auth.models import (AbstractBaseUser, BaseUserManager, PermissionsMixin) from django.core.validators", "user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE) class SerieRoutine(BaseSerie): father_set = models.ForeignKey( 'SetRoutine',", "related_name='routines', on_delete=models.CASCADE) def sets(self): return SetRoutine.objects.filter(routine=self) class Routine(models.Model): name =", "SetRoutine(models.Model): exercise = models.ForeignKey( 'Exercise', on_delete=models.CASCADE) routine = models.ForeignKey( 'RoutineDay',", "from django.contrib.auth.models import (AbstractBaseUser, BaseUserManager, PermissionsMixin) from django.core.validators import MaxValueValidator,", "import MultiSelectField # Maneger User class is the class that", "MinValueValidator from django.db import models from django.utils.translation import gettext_lazy as", "import (AbstractBaseUser, BaseUserManager, PermissionsMixin) from django.core.validators import MaxValueValidator, MinValueValidator from", "= models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE) class SerieRoutine(BaseSerie): father_set = models.ForeignKey( 'SetRoutine', on_delete=models.CASCADE,", "import gettext_lazy as _ from multiselectfield import MultiSelectField # Maneger", "BaseUserManager, PermissionsMixin) from django.core.validators import MaxValueValidator, MinValueValidator from django.db import", "all methods out of the box from rest_framework import exceptions", "of the box from rest_framework import exceptions from rest_framework.authentication import", "can retrive variables # for the settings file from django.contrib.auth.models", "SerieRoutine(BaseSerie): father_set = models.ForeignKey( 'SetRoutine', on_delete=models.CASCADE, related_name='series') class SetRoutine(models.Model): exercise" ]
[ "nemo_u = coast.Gridded(fn_data=fn_nemo_dat_u, fn_domain=fn_nemo_dom, config=fn_config_u_grid) nemo_v = coast.Gridded(fn_data=fn_nemo_dat_v, fn_domain=fn_nemo_dom, config=fn_config_v_grid)", "N 15 W) and (56 N, 12 W) using the", "coordinates): Creating them and performing some custom diagnostics with them.", "two points. 2. Plotting data along a transect. 3. Calculating", "# Load packages and define some file paths import coast", "Flow across the transect # With NEMO’s staggered grid, the", "(54 N 15 W) and (56 N, 12 W) using", "performing some custom diagnostics with them. --- In this tutorial", "this tutorial we take a look at subsetting the model", "bespoke diagnostics along it. We look at: 1. Creating a", "staggered grid, the first step is to define the transect", "as the transect end points. tran_t = coast.TransectT(nemo_t, (54, -15),", "points closest to these coordinates will be selected as the", "model data on the u- and v-grid grids nemo_u =", "transect end points. The model points closest to these coordinates", "the data # It is simple to plot a scalar", "(54, -15), (56, -12)) # Inspect the data tran_t.data #", "nemo_t = coast.Gridded(fn_data=fn_nemo_dat_t, fn_domain=fn_nemo_dom, config=fn_config_t_grid) # Now create a transect", "-12)) tran_f.data # We also need the i- and j-components", "take a look at subsetting the model data along a", "data between two coordinates): Creating them and performing some custom", "transect, we can plot the ‘normal_velocities’ variable cross_velocity_mean = tran_f.data_cross_tran_flow.normal_velocities.mean(dim=\"t_dim\")", "defined between two points. 2. Plotting data along a transect.", "transect: temp_mean = tran_t.data.temperature.mean(dim=\"t_dim\") plt.figure() temp_mean.plot.pcolormesh(y=\"depth_0\", yincrease=False) plt.show() # %%", "along a transect. 3. Calculating flow normal to the transect", "\"./config/example_nemo_grid_t.json\" fn_config_f_grid = \"./config/example_nemo_grid_f.json\" fn_config_u_grid = \"./config/example_nemo_grid_u.json\" fn_config_v_grid = \"./config/example_nemo_grid_v.json\"", "to plot the time averaged velocity across the transect, we", "\"./example_files/nemo_data_U_grid.nc\" fn_nemo_dat_v = \"./example_files/nemo_data_V_grid.nc\" fn_nemo_dom = \"./example_files/COAsT_example_NEMO_domain.nc\" # Configuration files", "coast.TransectT(nemo_t, (54, -15), (56, -12)) # Inspect the data tran_t.data", "time averaged velocity across the transect, we can plot the", "a transect subset of the example dataset # Load packages", "so (lazy) load the model data on the u- and", "transect between the points (54 N 15 W) and (56", "(56, -12)) tran_f.data # We also need the i- and", "strict data formatting requirements, which are outlined in tranect.py. Transect", "the i- and j-components of velocity so (lazy) load the", "are all defined at the points between f-points. tran_f.data_cross_tran_flow #", "‘normal_velocities’ variable cross_velocity_mean = tran_f.data_cross_tran_flow.normal_velocities.mean(dim=\"t_dim\") plt.figure() cross_velocity_mean.rolling(r_dim=2).mean().plot.pcolormesh(yincrease=False, y=\"depth_0\", cbar_kwargs={\"label\": \"m/s\"})", "fn_nemo_dat_v = \"./example_files/nemo_data_V_grid.nc\" fn_nemo_dom = \"./example_files/COAsT_example_NEMO_domain.nc\" # Configuration files describing", "Transect class in the COAsT package. This object has strict", "it. We look at: 1. Creating a TRANSECT object, defined", "along a transect (a custom straight line) and creating some", "the ‘normal_transports’ variable plt.figure() cross_transport_mean = tran_f.data_cross_tran_flow.normal_transports.mean(dim=\"t_dim\") cross_transport_mean.rolling(r_dim=2).mean().plot() plt.ylabel(\"Sv\") plt.show()", "(a custom straight line) and creating some bespoke diagnostics along", "for using the Transect class in the COAsT package. This", "transect on the f-grid so that the velocity components are", "the grid information, so create a nemo f-grid object nemo_f", "velocity components are between f-points. We do not need any", "Creating them and performing some custom diagnostics with them. ---", "import xarray as xr import matplotlib.pyplot as plt fn_nemo_dat_t =", "config=fn_config_u_grid) nemo_v = coast.Gridded(fn_data=fn_nemo_dat_v, fn_domain=fn_nemo_dom, config=fn_config_v_grid) # Now we can", "transect subset of the example dataset # Load packages and", "--- In this tutorial we take a look at subsetting", "t-grid nemo_t = coast.Gridded(fn_data=fn_nemo_dat_t, fn_domain=fn_nemo_dom, config=fn_config_t_grid) # Now create a", "points. The model points closest to these coordinates will be", "plt.show() # %% Flow across the transect # With NEMO’s", "the `coast.TransectT` object. This needs to be passed the corresponding", "straight line) and creating some bespoke diagnostics along it. We", "temperature along the transect: temp_mean = tran_t.data.temperature.mean(dim=\"t_dim\") plt.figure() temp_mean.plot.pcolormesh(y=\"depth_0\", yincrease=False)", "new dataset where the variables are all defined at the", "dimension along the transect. # %% Plot the data #", "the time averaged velocity across the transect, we can plot", "selected as the transect end points. tran_t = coast.TransectT(nemo_t, (54,", "transect is stored in a new dataset where the variables", "do not need any model data on the f-grid, just", "object, defined between two points. 2. Plotting data along a", "object nemo_f = coast.Gridded(fn_domain=fn_nemo_dom, config=fn_config_f_grid) # and a transect on", "model data on the f-grid, just the grid information, so", "the ‘normal_velocities’ variable cross_velocity_mean = tran_f.data_cross_tran_flow.normal_velocities.mean(dim=\"t_dim\") plt.figure() cross_velocity_mean.rolling(r_dim=2).mean().plot.pcolormesh(yincrease=False, y=\"depth_0\", cbar_kwargs={\"label\":", "# The flow across the transect is stored in a", "has strict data formatting requirements, which are outlined in tranect.py.", "Plotting data along a transect. 3. Calculating flow normal to", "With NEMO’s staggered grid, the first step is to define", "%% Flow across the transect # With NEMO’s staggered grid,", "diagnostics with them. --- In this tutorial we take a", "across the transect, we can plot the ‘normal_transports’ variable plt.figure()", "# Inspect the data tran_t.data # where `r_dim` is the", "We also need the i- and j-components of velocity so", "Load packages and define some file paths import coast import", "is the dimension along the transect. # %% Plot the", "coast.Gridded(fn_domain=fn_nemo_dom, config=fn_config_f_grid) # and a transect on the f-grid tran_f", "= \"./example_files/COAsT_example_NEMO_domain.nc\" # Configuration files describing the data files fn_config_t_grid", "formatting requirements, which are outlined in tranect.py. Transect subsetting (a", "the transect, we can plot the ‘normal_transports’ variable plt.figure() cross_transport_mean", "closest to these coordinates will be selected as the transect", "`r_dim` is the dimension along the transect. # %% Plot", "that are on the NEMO t-grid nemo_t = coast.Gridded(fn_data=fn_nemo_dat_t, fn_domain=fn_nemo_dom,", "W) using the `coast.TransectT` object. This needs to be passed", "tran_f.calc_flow_across_transect(nemo_u, nemo_v) # The flow across the transect is stored", "across the transect with the method tran_f.calc_flow_across_transect(nemo_u, nemo_v) # The", "and j-components of velocity so (lazy) load the model data", "# Configuration files describing the data files fn_config_t_grid = \"./config/example_nemo_grid_t.json\"", "method tran_f.calc_flow_across_transect(nemo_u, nemo_v) # The flow across the transect is", "This is a demonstration script for using the Transect class", "15 W) and (56 N, 12 W) using the `coast.TransectT`", "# We also need the i- and j-components of velocity", "with the method tran_f.calc_flow_across_transect(nemo_u, nemo_v) # The flow across the", "= \"./config/example_nemo_grid_f.json\" fn_config_u_grid = \"./config/example_nemo_grid_u.json\" fn_config_v_grid = \"./config/example_nemo_grid_v.json\" # %%", "as xr import matplotlib.pyplot as plt fn_nemo_dat_t = \"./example_files/nemo_data_T_grid.nc\" fn_nemo_dat_u", "and creating some bespoke diagnostics along it. We look at:", "between two points. 2. Plotting data along a transect. 3.", "import coast import xarray as xr import matplotlib.pyplot as plt", "averaged velocity across the transect, we can plot the ‘normal_velocities’", "temp_mean = tran_t.data.temperature.mean(dim=\"t_dim\") plt.figure() temp_mean.plot.pcolormesh(y=\"depth_0\", yincrease=False) plt.show() # %% Flow", "the f-grid tran_f = coast.TransectF(nemo_f, (54, -15), (56, -12)) tran_f.data", "# or the volume transport across the transect, we can", "fn_nemo_dat_u = \"./example_files/nemo_data_U_grid.nc\" fn_nemo_dat_v = \"./example_files/nemo_data_V_grid.nc\" fn_nemo_dom = \"./example_files/COAsT_example_NEMO_domain.nc\" #", "a transect on the f-grid tran_f = coast.TransectF(nemo_f, (54, -15),", "some file paths import coast import xarray as xr import", "the transect with the method tran_f.calc_flow_across_transect(nemo_u, nemo_v) # The flow", "config=fn_config_t_grid) # Now create a transect between the points (54", "12 W) using the `coast.TransectT` object. This needs to be", "object has strict data formatting requirements, which are outlined in", "tran_t.data # where `r_dim` is the dimension along the transect.", "2. Plotting data along a transect. 3. Calculating flow normal", "where `r_dim` is the dimension along the transect. # %%", "plt.show() # or the volume transport across the transect, we", "slice of data between two coordinates): Creating them and performing", "requirements, which are outlined in tranect.py. Transect subsetting (a vertical", "a demonstration script for using the Transect class in the", "nemo_v = coast.Gridded(fn_data=fn_nemo_dat_v, fn_domain=fn_nemo_dom, config=fn_config_v_grid) # Now we can calculate", "and a transect on the f-grid tran_f = coast.TransectF(nemo_f, (54,", "plot a scalar such as temperature along the transect: temp_mean", "transect end points. tran_t = coast.TransectT(nemo_t, (54, -15), (56, -12))", "xarray as xr import matplotlib.pyplot as plt fn_nemo_dat_t = \"./example_files/nemo_data_T_grid.nc\"", "need the i- and j-components of velocity so (lazy) load", "(56, -12)) # Inspect the data tran_t.data # where `r_dim`", "tutorial we take a look at subsetting the model data", "data along a transect (a custom straight line) and creating", "grid information, so create a nemo f-grid object nemo_f =", "The model points closest to these coordinates will be selected", "matplotlib.pyplot as plt fn_nemo_dat_t = \"./example_files/nemo_data_T_grid.nc\" fn_nemo_dat_u = \"./example_files/nemo_data_U_grid.nc\" fn_nemo_dat_v", "coordinates will be selected as the transect end points. tran_t", "also need the i- and j-components of velocity so (lazy)", "line) and creating some bespoke diagnostics along it. We look", "Create a transect subset of the example dataset # Load", "-12)) # Inspect the data tran_t.data # where `r_dim` is", "them. --- In this tutorial we take a look at", "-15), (56, -12)) tran_f.data # We also need the i-", "the f-grid, just the grid information, so create a nemo", "two coordinates): Creating them and performing some custom diagnostics with", "transect # With NEMO’s staggered grid, the first step is", "= \"./example_files/nemo_data_V_grid.nc\" fn_nemo_dom = \"./example_files/COAsT_example_NEMO_domain.nc\" # Configuration files describing the", "of the example dataset # Load packages and define some", "along it. We look at: 1. Creating a TRANSECT object,", "step is to define the transect on the f-grid so", "the transect end points. tran_t = coast.TransectT(nemo_t, (54, -15), (56,", "so that the velocity components are between f-points. We do", "of velocity so (lazy) load the model data on the", "simple to plot a scalar such as temperature along the", "the Transect class in the COAsT package. This object has", "variables are all defined at the points between f-points. tran_f.data_cross_tran_flow", "nemo_v) # The flow across the transect is stored in", "of data between two coordinates): Creating them and performing some", "cross_velocity_mean = tran_f.data_cross_tran_flow.normal_velocities.mean(dim=\"t_dim\") plt.figure() cross_velocity_mean.rolling(r_dim=2).mean().plot.pcolormesh(yincrease=False, y=\"depth_0\", cbar_kwargs={\"label\": \"m/s\"}) plt.show() #", "using the Transect class in the COAsT package. This object", "example, to plot the time averaged velocity across the transect,", "cross_velocity_mean.rolling(r_dim=2).mean().plot.pcolormesh(yincrease=False, y=\"depth_0\", cbar_kwargs={\"label\": \"m/s\"}) plt.show() # or the volume transport", "to define the transect on the f-grid so that the", "custom straight line) and creating some bespoke diagnostics along it.", "fn_nemo_dat_t = \"./example_files/nemo_data_T_grid.nc\" fn_nemo_dat_u = \"./example_files/nemo_data_U_grid.nc\" fn_nemo_dat_v = \"./example_files/nemo_data_V_grid.nc\" fn_nemo_dom", "u- and v-grid grids nemo_u = coast.Gridded(fn_data=fn_nemo_dat_u, fn_domain=fn_nemo_dom, config=fn_config_u_grid) nemo_v", "such as temperature along the transect: temp_mean = tran_t.data.temperature.mean(dim=\"t_dim\") plt.figure()", "coast.Gridded(fn_data=fn_nemo_dat_t, fn_domain=fn_nemo_dom, config=fn_config_t_grid) # Now create a transect between the", "data along a transect. 3. Calculating flow normal to the", "in a new dataset where the variables are all defined", "the example dataset # Load packages and define some file", "NEMO t-grid nemo_t = coast.Gridded(fn_data=fn_nemo_dat_t, fn_domain=fn_nemo_dom, config=fn_config_t_grid) # Now create", "object and transect end points. The model points closest to", "and v-grid grids nemo_u = coast.Gridded(fn_data=fn_nemo_dat_u, fn_domain=fn_nemo_dom, config=fn_config_u_grid) nemo_v =", "fn_config_v_grid = \"./config/example_nemo_grid_v.json\" # %% Load data variables that are", "points between f-points. tran_f.data_cross_tran_flow # For example, to plot the", "and performing some custom diagnostics with them. --- In this", "coast.TransectF(nemo_f, (54, -15), (56, -12)) tran_f.data # We also need", "Plot the data # It is simple to plot a", "Calculating flow normal to the transect \"\"\" ## Create a", "create a transect between the points (54 N 15 W)", "components are between f-points. We do not need any model", "fn_config_f_grid = \"./config/example_nemo_grid_f.json\" fn_config_u_grid = \"./config/example_nemo_grid_u.json\" fn_config_v_grid = \"./config/example_nemo_grid_v.json\" #", "# For example, to plot the time averaged velocity across", "define some file paths import coast import xarray as xr", "(54, -15), (56, -12)) tran_f.data # We also need the", "f-points. We do not need any model data on the", "fn_domain=fn_nemo_dom, config=fn_config_t_grid) # Now create a transect between the points", "Creating a TRANSECT object, defined between two points. 2. Plotting", "to the transect \"\"\" ## Create a transect subset of", "# %% Plot the data # It is simple to", "# where `r_dim` is the dimension along the transect. #", "end points. tran_t = coast.TransectT(nemo_t, (54, -15), (56, -12)) #", "a transect. 3. Calculating flow normal to the transect \"\"\"", "subset of the example dataset # Load packages and define", "f-points. tran_f.data_cross_tran_flow # For example, to plot the time averaged", "transport across the transect, we can plot the ‘normal_transports’ variable", "be passed the corresponding NEMO object and transect end points.", "\"./config/example_nemo_grid_u.json\" fn_config_v_grid = \"./config/example_nemo_grid_v.json\" # %% Load data variables that", "across the transect # With NEMO’s staggered grid, the first", "coast.Gridded(fn_data=fn_nemo_dat_u, fn_domain=fn_nemo_dom, config=fn_config_u_grid) nemo_v = coast.Gridded(fn_data=fn_nemo_dat_v, fn_domain=fn_nemo_dom, config=fn_config_v_grid) # Now", "the dimension along the transect. # %% Plot the data", "subsetting (a vertical slice of data between two coordinates): Creating", "\"./example_files/nemo_data_T_grid.nc\" fn_nemo_dat_u = \"./example_files/nemo_data_U_grid.nc\" fn_nemo_dat_v = \"./example_files/nemo_data_V_grid.nc\" fn_nemo_dom = \"./example_files/COAsT_example_NEMO_domain.nc\"", "some bespoke diagnostics along it. We look at: 1. Creating", "\"\"\" This is a demonstration script for using the Transect", "3. Calculating flow normal to the transect \"\"\" ## Create", "the data files fn_config_t_grid = \"./config/example_nemo_grid_t.json\" fn_config_f_grid = \"./config/example_nemo_grid_f.json\" fn_config_u_grid", "the NEMO t-grid nemo_t = coast.Gridded(fn_data=fn_nemo_dat_t, fn_domain=fn_nemo_dom, config=fn_config_t_grid) # Now", "files describing the data files fn_config_t_grid = \"./config/example_nemo_grid_t.json\" fn_config_f_grid =", "between f-points. tran_f.data_cross_tran_flow # For example, to plot the time", "xr import matplotlib.pyplot as plt fn_nemo_dat_t = \"./example_files/nemo_data_T_grid.nc\" fn_nemo_dat_u =", "# %% Load data variables that are on the NEMO", "tran_f.data_cross_tran_flow # For example, to plot the time averaged velocity", "transect. # %% Plot the data # It is simple", "the transect. # %% Plot the data # It is", "a new dataset where the variables are all defined at", "plot the ‘normal_velocities’ variable cross_velocity_mean = tran_f.data_cross_tran_flow.normal_velocities.mean(dim=\"t_dim\") plt.figure() cross_velocity_mean.rolling(r_dim=2).mean().plot.pcolormesh(yincrease=False, y=\"depth_0\",", "Now create a transect between the points (54 N 15", "model points closest to these coordinates will be selected as", "not need any model data on the f-grid, just the", "y=\"depth_0\", cbar_kwargs={\"label\": \"m/s\"}) plt.show() # or the volume transport across", "on the u- and v-grid grids nemo_u = coast.Gridded(fn_data=fn_nemo_dat_u, fn_domain=fn_nemo_dom,", "fn_config_u_grid = \"./config/example_nemo_grid_u.json\" fn_config_v_grid = \"./config/example_nemo_grid_v.json\" # %% Load data", "plt.figure() cross_velocity_mean.rolling(r_dim=2).mean().plot.pcolormesh(yincrease=False, y=\"depth_0\", cbar_kwargs={\"label\": \"m/s\"}) plt.show() # or the volume", "tran_t.data.temperature.mean(dim=\"t_dim\") plt.figure() temp_mean.plot.pcolormesh(y=\"depth_0\", yincrease=False) plt.show() # %% Flow across the", "# With NEMO’s staggered grid, the first step is to", "%% Plot the data # It is simple to plot", "points. tran_t = coast.TransectT(nemo_t, (54, -15), (56, -12)) # Inspect", "so create a nemo f-grid object nemo_f = coast.Gridded(fn_domain=fn_nemo_dom, config=fn_config_f_grid)", "import matplotlib.pyplot as plt fn_nemo_dat_t = \"./example_files/nemo_data_T_grid.nc\" fn_nemo_dat_u = \"./example_files/nemo_data_U_grid.nc\"", "and transect end points. The model points closest to these", "the corresponding NEMO object and transect end points. The model", "data tran_t.data # where `r_dim` is the dimension along the", "that the velocity components are between f-points. We do not", "a TRANSECT object, defined between two points. 2. Plotting data", "velocity so (lazy) load the model data on the u-", "across the transect, we can plot the ‘normal_velocities’ variable cross_velocity_mean", "passed the corresponding NEMO object and transect end points. The", "= coast.TransectT(nemo_t, (54, -15), (56, -12)) # Inspect the data", "corresponding NEMO object and transect end points. The model points", "f-grid, just the grid information, so create a nemo f-grid", "the points between f-points. tran_f.data_cross_tran_flow # For example, to plot", "= \"./config/example_nemo_grid_t.json\" fn_config_f_grid = \"./config/example_nemo_grid_f.json\" fn_config_u_grid = \"./config/example_nemo_grid_u.json\" fn_config_v_grid =", "data variables that are on the NEMO t-grid nemo_t =", "at subsetting the model data along a transect (a custom", "= coast.TransectF(nemo_f, (54, -15), (56, -12)) tran_f.data # We also", "= tran_f.data_cross_tran_flow.normal_velocities.mean(dim=\"t_dim\") plt.figure() cross_velocity_mean.rolling(r_dim=2).mean().plot.pcolormesh(yincrease=False, y=\"depth_0\", cbar_kwargs={\"label\": \"m/s\"}) plt.show() # or", "to plot a scalar such as temperature along the transect:", "temp_mean.plot.pcolormesh(y=\"depth_0\", yincrease=False) plt.show() # %% Flow across the transect #", "end points. The model points closest to these coordinates will", "transect, we can plot the ‘normal_transports’ variable plt.figure() cross_transport_mean =", "stored in a new dataset where the variables are all", "yincrease=False) plt.show() # %% Flow across the transect # With", "diagnostics along it. We look at: 1. Creating a TRANSECT", "the COAsT package. This object has strict data formatting requirements,", "with them. --- In this tutorial we take a look", "are on the NEMO t-grid nemo_t = coast.Gridded(fn_data=fn_nemo_dat_t, fn_domain=fn_nemo_dom, config=fn_config_t_grid)", "between two coordinates): Creating them and performing some custom diagnostics", "Now we can calculate the flow across the transect with", "along the transect. # %% Plot the data # It", "is stored in a new dataset where the variables are", "dataset where the variables are all defined at the points", "# It is simple to plot a scalar such as", "(a vertical slice of data between two coordinates): Creating them", "package. This object has strict data formatting requirements, which are", "tran_f.data_cross_tran_flow.normal_velocities.mean(dim=\"t_dim\") plt.figure() cross_velocity_mean.rolling(r_dim=2).mean().plot.pcolormesh(yincrease=False, y=\"depth_0\", cbar_kwargs={\"label\": \"m/s\"}) plt.show() # or the", "a look at subsetting the model data along a transect", "can plot the ‘normal_transports’ variable plt.figure() cross_transport_mean = tran_f.data_cross_tran_flow.normal_transports.mean(dim=\"t_dim\") cross_transport_mean.rolling(r_dim=2).mean().plot()", "the method tran_f.calc_flow_across_transect(nemo_u, nemo_v) # The flow across the transect", "grid, the first step is to define the transect on", "as plt fn_nemo_dat_t = \"./example_files/nemo_data_T_grid.nc\" fn_nemo_dat_u = \"./example_files/nemo_data_U_grid.nc\" fn_nemo_dat_v =", "any model data on the f-grid, just the grid information,", "packages and define some file paths import coast import xarray", "nemo f-grid object nemo_f = coast.Gridded(fn_domain=fn_nemo_dom, config=fn_config_f_grid) # and a", "is to define the transect on the f-grid so that", "config=fn_config_v_grid) # Now we can calculate the flow across the", "Inspect the data tran_t.data # where `r_dim` is the dimension", "We do not need any model data on the f-grid,", "TRANSECT object, defined between two points. 2. Plotting data along", "\"./example_files/COAsT_example_NEMO_domain.nc\" # Configuration files describing the data files fn_config_t_grid =", "creating some bespoke diagnostics along it. We look at: 1.", "a transect between the points (54 N 15 W) and", "we take a look at subsetting the model data along", "is simple to plot a scalar such as temperature along", "COAsT package. This object has strict data formatting requirements, which", "Configuration files describing the data files fn_config_t_grid = \"./config/example_nemo_grid_t.json\" fn_config_f_grid", "need any model data on the f-grid, just the grid", "For example, to plot the time averaged velocity across the", "in the COAsT package. This object has strict data formatting", "is a demonstration script for using the Transect class in", "\"m/s\"}) plt.show() # or the volume transport across the transect,", "transect (a custom straight line) and creating some bespoke diagnostics", "grids nemo_u = coast.Gridded(fn_data=fn_nemo_dat_u, fn_domain=fn_nemo_dom, config=fn_config_u_grid) nemo_v = coast.Gridded(fn_data=fn_nemo_dat_v, fn_domain=fn_nemo_dom,", "\"./config/example_nemo_grid_v.json\" # %% Load data variables that are on the", "= tran_t.data.temperature.mean(dim=\"t_dim\") plt.figure() temp_mean.plot.pcolormesh(y=\"depth_0\", yincrease=False) plt.show() # %% Flow across", "first step is to define the transect on the f-grid", "subsetting the model data along a transect (a custom straight", "nemo_f = coast.Gridded(fn_domain=fn_nemo_dom, config=fn_config_f_grid) # and a transect on the", "across the transect is stored in a new dataset where", "in tranect.py. Transect subsetting (a vertical slice of data between", "# Now we can calculate the flow across the transect", "# Now create a transect between the points (54 N", "points. 2. Plotting data along a transect. 3. Calculating flow", "plot the time averaged velocity across the transect, we can", "volume transport across the transect, we can plot the ‘normal_transports’", "to these coordinates will be selected as the transect end", "plot the ‘normal_transports’ variable plt.figure() cross_transport_mean = tran_f.data_cross_tran_flow.normal_transports.mean(dim=\"t_dim\") cross_transport_mean.rolling(r_dim=2).mean().plot() plt.ylabel(\"Sv\")", "The flow across the transect is stored in a new", "some custom diagnostics with them. --- In this tutorial we", "-15), (56, -12)) # Inspect the data tran_t.data # where", "we can plot the ‘normal_transports’ variable plt.figure() cross_transport_mean = tran_f.data_cross_tran_flow.normal_transports.mean(dim=\"t_dim\")", "at: 1. Creating a TRANSECT object, defined between two points.", "where the variables are all defined at the points between", "will be selected as the transect end points. tran_t =", "are outlined in tranect.py. Transect subsetting (a vertical slice of", "paths import coast import xarray as xr import matplotlib.pyplot as", "script for using the Transect class in the COAsT package.", "= \"./config/example_nemo_grid_u.json\" fn_config_v_grid = \"./config/example_nemo_grid_v.json\" # %% Load data variables", "N, 12 W) using the `coast.TransectT` object. This needs to", "NEMO object and transect end points. The model points closest", "the transect on the f-grid so that the velocity components", "the data tran_t.data # where `r_dim` is the dimension along", "to be passed the corresponding NEMO object and transect end", "can plot the ‘normal_velocities’ variable cross_velocity_mean = tran_f.data_cross_tran_flow.normal_velocities.mean(dim=\"t_dim\") plt.figure() cross_velocity_mean.rolling(r_dim=2).mean().plot.pcolormesh(yincrease=False,", "look at subsetting the model data along a transect (a", "\"./config/example_nemo_grid_f.json\" fn_config_u_grid = \"./config/example_nemo_grid_u.json\" fn_config_v_grid = \"./config/example_nemo_grid_v.json\" # %% Load", "a scalar such as temperature along the transect: temp_mean =", "data # It is simple to plot a scalar such", "these coordinates will be selected as the transect end points.", "= coast.Gridded(fn_domain=fn_nemo_dom, config=fn_config_f_grid) # and a transect on the f-grid", "(56 N, 12 W) using the `coast.TransectT` object. This needs", "the model data on the u- and v-grid grids nemo_u", "between f-points. We do not need any model data on", "outlined in tranect.py. Transect subsetting (a vertical slice of data", "= coast.Gridded(fn_data=fn_nemo_dat_v, fn_domain=fn_nemo_dom, config=fn_config_v_grid) # Now we can calculate the", "coast.Gridded(fn_data=fn_nemo_dat_v, fn_domain=fn_nemo_dom, config=fn_config_v_grid) # Now we can calculate the flow", "j-components of velocity so (lazy) load the model data on", "Transect subsetting (a vertical slice of data between two coordinates):", "i- and j-components of velocity so (lazy) load the model", "In this tutorial we take a look at subsetting the", "the model data along a transect (a custom straight line)", "and (56 N, 12 W) using the `coast.TransectT` object. This", "NEMO’s staggered grid, the first step is to define the", "variable cross_velocity_mean = tran_f.data_cross_tran_flow.normal_velocities.mean(dim=\"t_dim\") plt.figure() cross_velocity_mean.rolling(r_dim=2).mean().plot.pcolormesh(yincrease=False, y=\"depth_0\", cbar_kwargs={\"label\": \"m/s\"}) plt.show()", "the first step is to define the transect on the", "normal to the transect \"\"\" ## Create a transect subset", "calculate the flow across the transect with the method tran_f.calc_flow_across_transect(nemo_u,", "on the NEMO t-grid nemo_t = coast.Gridded(fn_data=fn_nemo_dat_t, fn_domain=fn_nemo_dom, config=fn_config_t_grid) #", "class in the COAsT package. This object has strict data", "file paths import coast import xarray as xr import matplotlib.pyplot", "fn_domain=fn_nemo_dom, config=fn_config_v_grid) # Now we can calculate the flow across", "the transect: temp_mean = tran_t.data.temperature.mean(dim=\"t_dim\") plt.figure() temp_mean.plot.pcolormesh(y=\"depth_0\", yincrease=False) plt.show() #", "%% Load data variables that are on the NEMO t-grid", "the velocity components are between f-points. We do not need", "them and performing some custom diagnostics with them. --- In", "a nemo f-grid object nemo_f = coast.Gridded(fn_domain=fn_nemo_dom, config=fn_config_f_grid) # and", "as temperature along the transect: temp_mean = tran_t.data.temperature.mean(dim=\"t_dim\") plt.figure() temp_mean.plot.pcolormesh(y=\"depth_0\",", "the u- and v-grid grids nemo_u = coast.Gridded(fn_data=fn_nemo_dat_u, fn_domain=fn_nemo_dom, config=fn_config_u_grid)", "points (54 N 15 W) and (56 N, 12 W)", "on the f-grid tran_f = coast.TransectF(nemo_f, (54, -15), (56, -12))", "# %% Flow across the transect # With NEMO’s staggered", "the transect is stored in a new dataset where the", "vertical slice of data between two coordinates): Creating them and", "define the transect on the f-grid so that the velocity", "transect \"\"\" ## Create a transect subset of the example", "using the `coast.TransectT` object. This needs to be passed the", "the transect, we can plot the ‘normal_velocities’ variable cross_velocity_mean =", "f-grid so that the velocity components are between f-points. We", "and define some file paths import coast import xarray as", "\"./example_files/nemo_data_V_grid.nc\" fn_nemo_dom = \"./example_files/COAsT_example_NEMO_domain.nc\" # Configuration files describing the data", "the flow across the transect with the method tran_f.calc_flow_across_transect(nemo_u, nemo_v)", "plt fn_nemo_dat_t = \"./example_files/nemo_data_T_grid.nc\" fn_nemo_dat_u = \"./example_files/nemo_data_U_grid.nc\" fn_nemo_dat_v = \"./example_files/nemo_data_V_grid.nc\"", "scalar such as temperature along the transect: temp_mean = tran_t.data.temperature.mean(dim=\"t_dim\")", "custom diagnostics with them. --- In this tutorial we take", "we can plot the ‘normal_velocities’ variable cross_velocity_mean = tran_f.data_cross_tran_flow.normal_velocities.mean(dim=\"t_dim\") plt.figure()", "files fn_config_t_grid = \"./config/example_nemo_grid_t.json\" fn_config_f_grid = \"./config/example_nemo_grid_f.json\" fn_config_u_grid = \"./config/example_nemo_grid_u.json\"", "are between f-points. We do not need any model data", "data on the u- and v-grid grids nemo_u = coast.Gridded(fn_data=fn_nemo_dat_u,", "which are outlined in tranect.py. Transect subsetting (a vertical slice", "= coast.Gridded(fn_data=fn_nemo_dat_t, fn_domain=fn_nemo_dom, config=fn_config_t_grid) # Now create a transect between", "`coast.TransectT` object. This needs to be passed the corresponding NEMO", "velocity across the transect, we can plot the ‘normal_velocities’ variable", "coast import xarray as xr import matplotlib.pyplot as plt fn_nemo_dat_t", "1. Creating a TRANSECT object, defined between two points. 2.", "information, so create a nemo f-grid object nemo_f = coast.Gridded(fn_domain=fn_nemo_dom,", "data files fn_config_t_grid = \"./config/example_nemo_grid_t.json\" fn_config_f_grid = \"./config/example_nemo_grid_f.json\" fn_config_u_grid =", "W) and (56 N, 12 W) using the `coast.TransectT` object.", "# and a transect on the f-grid tran_f = coast.TransectF(nemo_f,", "cbar_kwargs={\"label\": \"m/s\"}) plt.show() # or the volume transport across the", "look at: 1. Creating a TRANSECT object, defined between two", "on the f-grid so that the velocity components are between", "a transect (a custom straight line) and creating some bespoke", "the f-grid so that the velocity components are between f-points.", "fn_nemo_dom = \"./example_files/COAsT_example_NEMO_domain.nc\" # Configuration files describing the data files", "transect with the method tran_f.calc_flow_across_transect(nemo_u, nemo_v) # The flow across", "on the f-grid, just the grid information, so create a", "just the grid information, so create a nemo f-grid object", "model data along a transect (a custom straight line) and", "= \"./config/example_nemo_grid_v.json\" # %% Load data variables that are on", "= \"./example_files/nemo_data_T_grid.nc\" fn_nemo_dat_u = \"./example_files/nemo_data_U_grid.nc\" fn_nemo_dat_v = \"./example_files/nemo_data_V_grid.nc\" fn_nemo_dom =", "tran_f = coast.TransectF(nemo_f, (54, -15), (56, -12)) tran_f.data # We", "create a nemo f-grid object nemo_f = coast.Gridded(fn_domain=fn_nemo_dom, config=fn_config_f_grid) #", "tran_t = coast.TransectT(nemo_t, (54, -15), (56, -12)) # Inspect the", "the variables are all defined at the points between f-points.", "defined at the points between f-points. tran_f.data_cross_tran_flow # For example,", "plt.figure() temp_mean.plot.pcolormesh(y=\"depth_0\", yincrease=False) plt.show() # %% Flow across the transect", "all defined at the points between f-points. tran_f.data_cross_tran_flow # For", "It is simple to plot a scalar such as temperature", "flow across the transect is stored in a new dataset", "the points (54 N 15 W) and (56 N, 12", "between the points (54 N 15 W) and (56 N,", "Load data variables that are on the NEMO t-grid nemo_t", "at the points between f-points. tran_f.data_cross_tran_flow # For example, to", "tranect.py. Transect subsetting (a vertical slice of data between two", "fn_config_t_grid = \"./config/example_nemo_grid_t.json\" fn_config_f_grid = \"./config/example_nemo_grid_f.json\" fn_config_u_grid = \"./config/example_nemo_grid_u.json\" fn_config_v_grid", "needs to be passed the corresponding NEMO object and transect", "f-grid object nemo_f = coast.Gridded(fn_domain=fn_nemo_dom, config=fn_config_f_grid) # and a transect", "the transect # With NEMO’s staggered grid, the first step", "\"\"\" ## Create a transect subset of the example dataset", "transect. 3. Calculating flow normal to the transect \"\"\" ##", "demonstration script for using the Transect class in the COAsT", "tran_f.data # We also need the i- and j-components of", "This needs to be passed the corresponding NEMO object and", "## Create a transect subset of the example dataset #", "the transect \"\"\" ## Create a transect subset of the", "can calculate the flow across the transect with the method", "describing the data files fn_config_t_grid = \"./config/example_nemo_grid_t.json\" fn_config_f_grid = \"./config/example_nemo_grid_f.json\"", "variables that are on the NEMO t-grid nemo_t = coast.Gridded(fn_data=fn_nemo_dat_t,", "We look at: 1. Creating a TRANSECT object, defined between", "config=fn_config_f_grid) # and a transect on the f-grid tran_f =", "fn_domain=fn_nemo_dom, config=fn_config_u_grid) nemo_v = coast.Gridded(fn_data=fn_nemo_dat_v, fn_domain=fn_nemo_dom, config=fn_config_v_grid) # Now we", "v-grid grids nemo_u = coast.Gridded(fn_data=fn_nemo_dat_u, fn_domain=fn_nemo_dom, config=fn_config_u_grid) nemo_v = coast.Gridded(fn_data=fn_nemo_dat_v,", "object. This needs to be passed the corresponding NEMO object", "the volume transport across the transect, we can plot the", "= coast.Gridded(fn_data=fn_nemo_dat_u, fn_domain=fn_nemo_dom, config=fn_config_u_grid) nemo_v = coast.Gridded(fn_data=fn_nemo_dat_v, fn_domain=fn_nemo_dom, config=fn_config_v_grid) #", "(lazy) load the model data on the u- and v-grid", "load the model data on the u- and v-grid grids", "or the volume transport across the transect, we can plot", "dataset # Load packages and define some file paths import", "This object has strict data formatting requirements, which are outlined", "flow normal to the transect \"\"\" ## Create a transect", "f-grid tran_f = coast.TransectF(nemo_f, (54, -15), (56, -12)) tran_f.data #", "flow across the transect with the method tran_f.calc_flow_across_transect(nemo_u, nemo_v) #", "= \"./example_files/nemo_data_U_grid.nc\" fn_nemo_dat_v = \"./example_files/nemo_data_V_grid.nc\" fn_nemo_dom = \"./example_files/COAsT_example_NEMO_domain.nc\" # Configuration", "data on the f-grid, just the grid information, so create", "along the transect: temp_mean = tran_t.data.temperature.mean(dim=\"t_dim\") plt.figure() temp_mean.plot.pcolormesh(y=\"depth_0\", yincrease=False) plt.show()", "data formatting requirements, which are outlined in tranect.py. Transect subsetting", "we can calculate the flow across the transect with the", "example dataset # Load packages and define some file paths", "be selected as the transect end points. tran_t = coast.TransectT(nemo_t,", "transect on the f-grid tran_f = coast.TransectF(nemo_f, (54, -15), (56," ]
[ "``b`` are integers. In particular, since Python considers ``True`` and", "(B | C | ...) & ...). If simplify is", "does so in terms of structural equality rather than mathematical,", "Includes 0, 1 argset.discard(x) argset.add(True if x else False) rel", "-2) U (2, oo] \"\"\" from ..sets import Union if", "x > -2).as_set() (-2, 2) \"\"\" from ..sets import Intersection", "or ``(d, a)``: >>> eq1 = Or(And(Not(y), t), And(Not(y), z),", "return GreaterThan(*arg.args) if isinstance(arg, StrictGreaterThan): return LessThan(*arg.args) if isinstance(arg, LessThan):", "as a Symbol, # of times it appeared as a", "neg in combinations(self.args, i): clause = [~s if s in", "to_int_repr([x | y, y], [x, y]) [{1, 2}, {2}] \"\"\"", "false converter[bool] = lambda x: true if x else false", "| ~b | (a & ~b)) & (b | ~a", "b, c)) (a | ~c) & (b | ~a) &", "for x in terms: for y in essential: if _compare_term(x,", "returns the result of C. Examples ======== >>> ITE(True, False,", "& b) == frozenset([And(a, b)]) True \"\"\" return Or.make_args(expr) def", "used and when ``true`` should be used in various contexts", "False. Examples ======== >>> Nor(True, False) false >>> Nor(True, True)", "return info[0] rest = info[2](*[a for a in info[0].args if", "possible, to a simplified set of minterms with one less", "x else: return -1 return index def _convert_to_varsSOP(minterm, variables): \"\"\"", "The assumptions system should use ``True`` and ``False``. Aside from", "\"\"\" for i, x in enumerate(term): if x not in", "= [] for x in reversed(list(args)): if isinstance(x, Number) or", "form. That is, of the form ((A & ~B &", "symbols (Not(x)), or Or (or an And) whose arguments are", "Note that the output is NOT simplified. Examples ======== >>>", "whether a function or expression will return ``true`` or ``True``,", "if all arguments are True. Examples ======== >>> Nand(False, True)", "only to literals. If simplified is True, checks if result", "b) & (~a | ~b), {a: a, b: b, c:", "\"\"\" if len(self.free_symbols) == 1: return self.args[0].as_set().complement(S.Reals) else: raise NotImplementedError('Sorry,", "mean that a function has been denested and is either", "Or) def as_set(self): \"\"\" Rewrite logic operators and relationals in", "for a in info[0].args if a is not conj]) return", "result of B if A is true else it returns", "if cj == nc: return false elif cj == c:", "are not necessarily unique, but they are canonical. Here, ``(t,", "act bitwise on 0. Functions in the logic module will", "= simplify_logic(bool2) m = match(a, b) if m: return a,", "sympify from ..utilities import ordered class Boolean(Expr): \"\"\"A boolean object", "an equivalent sentence in DNF. Note that the output is", "^ b`` and ``Xor(a, b)`` will be different if ``a``", "return ``true`` or ``True``, just use ``==`` instead of ``is``", "True def eliminate_implications(expr): \"\"\" Change >>, <<, and Equivalent into", "true if True in argset: argset.discard(True) return And(*argset) if False", "args: if not is_literal(arg): arg = arg.to_nnf(simplify) if simplify: if", "= Or(And(Not(y), a), And(Not(y), b), And(x, y)) >>> dict(_finger(eq)) {(0,", "order, giving False immediately if any of them are True,", "if all arguments are False. Examples ======== >>> Nor(True, False)", "(a | ~c) & (b | ~a) & (c |", "(~x & ~y & ~z) | (~x & ~y &", "None (default). If 'cnf' or 'dnf', the simplest expression in", "__and__ def __or__(self, other): \"\"\"Overloading for | operator.\"\"\" return Or(self,", "sum of the number of arguments with which it appeared,", "if isinstance(arg, Number) or arg in (True, False): return false", "def _find_predicates(expr): \"\"\"Helper to find logical predicates in BooleanFunctions. A", "truthtable) else: raise ValueError('form can be cnf or dnf only')", "true >>> Not(And(True, False)) true >>> Not(Or(True, False)) false >>>", "| ~a) >>> eliminate_implications(Equivalent(a, b, c)) (a | ~c) &", "for & operator.\"\"\" return And(self, other) __rand__ = __and__ def", "# of times it appeared as a Not(symbol), # of", "x in [True, False]: # Includes 0, 1 argset.discard(x) argset.add(True", "& ~y) | (z & ~y), {t: a, x: b,", "operator.\"\"\" return Not(self) def __rshift__(self, other): \"\"\"Overloading for >> operator.\"\"\"", "x in terms: for y in essential: if _compare_term(x, y):", "from binary to it's variable form (for SOP). \"\"\" temp", "any(r == nc for r in rel): return [false] rel.append(c)", "| (a & ~b) | (b & c) | (~b", "in args]), simplify=simplify) if func == Xor: result = []", "not yet been' ' implemented for mutivariate' ' expressions') def", "true in argset else argset.add(true) for a, b in remove:", "but they are canonical. Here, ``(t, z)`` could be ``(a,", "-2).as_set() (-2, 2) \"\"\" from ..sets import Intersection if len(self.free_symbols)", "conj.args)))) elif isinstance(info[0], info[1]): return info[1](*list(map(_distribute, ((x, info[1], info[2]) for", "~x >>> ~x ~x >>> Not(And(Or(x, y), Or(~x, ~y))) ~((x", "Logical XOR (exclusive OR) function. Returns True if an odd", "is_Not = True @classmethod def eval(cls, arg): from ..core import", "i in expr.args)) def simplify_logic(expr, form=None, deep=True): \"\"\" This function", "isinstance(expr, BooleanFunction): return expr if simplify: return simplify_logic(expr, 'dnf', True)", "is True, the expr is evaluated to its simplest CNF", "other.has(Relational): raise NotImplementedError('handling of relationals') return self.atoms() == other.atoms() and", "= [] for i in range(0, len(self.args)+1, 2): for neg", "so this is a workaround that is valid for simplified", "for i, ti in enumerate(terms[:-1]): for j_i, tj in enumerate(terms[(i", "are False. Returns False if all arguments are True. Examples", "False \"\"\" from ..core.relational import Relational from .inference import satisfiable", "return And._to_nnf(Or(a, ~c), Or(~a, ~b), simplify=simplify) raise ValueError(f'Illegal operator {func}", "Checks if expr is in Negation Normal Form. A logical", "value of False. To avoid this issue, use the Diofant", "b), c)) (b & c) | (c & ~a) \"\"\"", "info[2](*[a for a in info[0].args if a is not conj])", "``True``, just use ``==`` instead of ``is`` to do the", "__rand__ = __and__ def __or__(self, other): \"\"\"Overloading for | operator.\"\"\"", "== nc for r in rel): return [false] rel.append(c) newargs.append(x)", "nc for r in rel): return [false] rel.append(c) newargs.append(x) return", "\"\"\"Test whether or not an expression is of the required", "of them is returned. For example, And(x, y) is logically", "True. To avoid this issue, use the Diofant boolean types", "& z) | (z & ~t) References ========== * https://en.wikipedia.org/wiki/Quine-McCluskey_algorithm", "if function1.__class__ != function2.__class__: return if len(function1.args) != len(function2.args): return", "if k not in f2 or len(f1[k]) != len(f2[k]): return", "b in remove: argset.remove(a) argset.remove(b) if len(argset) == 0: return", "===== The ``~`` operator is provided as a convenience, but", "the same as ``~1`` which is ``-2``, which has a", "essential def SOPform(variables, minterms, dontcares=None): \"\"\" The SOPform function uses", "remove: argset.remove(a) argset.remove(b) if len(argset) == 0: return false elif", "or ``False``, and does so in terms of structural equality", "list of all input combinations that generate '1' (the minterms)", "if is_nnf(expr, simplify): return expr return expr.to_nnf(simplify) def to_cnf(expr, simplify=False):", "form == 'cnf' or form is None: # pragma: no", "if newterm not in simplified_terms: simplified_terms.append(newterm) simplified_terms.extend( [terms[i] for i", "some correspondence between the variables of each. If more than", "function1): return False for cls in expr.args: if cls.is_Atom: continue", "bool2): \"\"\" Return the simplified version of bool1, and the", "according to the form with fewest args (in CNF by", "this issue, use the Diofant boolean types ``true`` and ``false``.", "of False. To avoid this issue, use the Diofant objects", "any argument is True. Returns True if all arguments are", "raise ValueError(f'Illegal operator {func} in expression') class Xor(BooleanFunction): \"\"\" Logical", ">>> to_nnf(Equivalent(a >> b, b >> a)) (a | ~b", "self.args[0].as_set().complement(S.Reals) else: raise NotImplementedError('Sorry, Not.as_set has not yet been' '", "ValueError: raise ValueError(f'{len(args)} operand(s) used for an Implies ' f'(pairs", "[false] rel.append(c) newargs.append(x) return LatticeOp._new_args_filter(newargs, And) def as_set(self): \"\"\" Rewrite", "Examples ======== >>> Or(x > 2, x < -2).as_set() [-oo,", "While \"``true is True``\" is ``False``, \"``true == True``\" is", "isinstance(expr, function1): return False for cls in expr.args: if cls.is_Atom:", "and # S.BooleanFalse, but making the class and instance names", "they are all True. Examples ======== >>> x & y", "[(r, r.canonical, (~r).canonical) for r in argset if r.is_Relational] odd", "equal they must have the same literals. Examples ======== >>>", "in args], simplify=simplify) if func == Implies: a, b =", "function2): \"\"\"Test whether or not an expression is of the", "ai in a.args) for ai in a.args: if ai.is_Symbol: d[ai][2]", ">>> to_dnf((a & b) | (a & ~b) | (b", "times it appeared as a Not(Symbol) in an And or", "of C. Examples ======== >>> ITE(True, False, True) false >>>", "args return And._to_nnf(Or(a, ~c), Or(~a, ~b), simplify=simplify) raise ValueError(f'Illegal operator", "Atom if expr.is_Atom: return True # Special case of a", "Symbol as 1 and Not(Symbol) as 2 ] >>> eq", "for neg in combinations(self.args, i): clause = [~s if s", "== -1: index = x else: return -1 return index", "def _eval_simplify(self, ratio, measure): return simplify_logic(self) def to_nnf(self, simplify=True): return", "expression is in disjunctive normal form. Examples ======== >>> is_dnf(a", "EmptySet() true = BooleanTrue() false: BooleanFalse = BooleanFalse() # We", "argset]) _args = frozenset(argset) obj = super().__new__(cls, _args) obj._argset =", "pass them as a list, too. The result will be", "Not, and Not is applied only to literals. If simplify", "| ~c) True >>> is_nnf((a | ~a) & (b |", "or 'dnf', the simplest expression in the corresponding normal form", "for cls in expr.args: if cls.is_Atom: continue if isinstance(cls, Not):", "accessed via ``false``. This is the Diofant version of False,", "And(*[~arg for arg in argset]) _args = frozenset(argset) obj =", "(0, 1): newargs.append(True if x else False) else: newargs.append(x) A,", "isinstance(arg, LessThan): return StrictGreaterThan(*arg.args) if isinstance(arg, GreaterThan): return StrictLessThan(*arg.args) def", "OR function It evaluates its arguments in order, giving True", "those contexts where the boolean is being used as a", "the smallest Product of Sums form. The variables must be", "true >>> Nor(x, y) ~(x | y) \"\"\" @classmethod def", "itself. \"\"\" if not isinstance(expr, BooleanFunction): return {expr} return set().union(*(_find_predicates(i)", ">>> Equivalent(False, False, False) true >>> Equivalent(True, False, False) false", "Or(*temp) def _simplified_pairs(terms): \"\"\" Reduces a set of minterms, if", "minterms = [[0, 0, 0, 1], [0, 0, 1, 1],", "& ~a) \"\"\" return _distribute((expr, Or, And)) def _distribute(info): \"\"\"Distributes", "conjunctions and disjunctions of literals, return an equivalent sentence in", "from not satisfying the above rule of thumb, the assumptions", "\"\"\" Equivalence relation. Equivalent(A, B) is True iff A and", "__xor__ def equals(self, other, failing_expression=False): \"\"\" Returns True if the", "(b | ~a) & (c | ~b) \"\"\" return to_nnf(expr)", "True)) true \"\"\" def __new__(cls, *args, **options): from ..core.relational import", "is applied only to literals. If simplified is True, checks", "indicates whether to recursively simplify any non-boolean functions contained within", "b) | c) True >>> is_dnf(a & (b | c))", "[]: return false minterms = [list(i) for i in minterms]", "if ``a`` and ``b`` are integers. >>> Xor(x, y).subs({y: 0})", "for multivariate' ' expressions') class Or(LatticeOp, BooleanFunction): \"\"\" Logical OR", "== 1: return argset.pop() elif True in argset: argset.remove(True) return", "most part, you can just use ``True`` and it will", "1, 1, 1]] >>> dontcares = [[0, 0, 0, 0],", ">>> sympify(b) (z & ~x & ~y) | (~x &", "a, b = args return And._to_nnf(a, ~b, simplify=simplify) if func", "and Not, and Not is applied only to literals. If", "necessary, similar to how you can generally use 1 instead", "if any argument is True. Returns True if all arguments", "Or.make_args(expr) def distribute_and_over_or(expr): \"\"\" Given a sentence s consisting of", "type: ignore[misc] @cacheit def args(self): return tuple(ordered(self._argset)) def to_nnf(self, simplify=True):", "rel[j][:2] if cj == nc: return false elif cj ==", "in args: if Not(arg) in args: return False stack.extend(expr.args) elif", "simplify=True): a, b = self.args return Or._to_nnf(~a, b, simplify=simplify) class", "\"\"\" expr = sympify(expr) if not isinstance(expr, BooleanFunction): return expr", "expression in the corresponding normal form is returned; if None,", "o inv = defaultdict(list) for k, v in ordered(d.items()): inv[tuple(v)].append(k)", "singleton that can be accessed via ``true``. This is the", "BooleanFunction) else: return not isinstance(expr, BooleanFunction) def to_int_repr(clauses, symbols): \"\"\"", "repeat=len(variables)): t = list(t) if (t not in minterms) and", "elif arg in argset: argset.remove(arg) else: argset.add(arg) rel = [(r,", "c) | (~b & c), True) a | c \"\"\"", "in info[0].args: if isinstance(arg, info[1]): conj = arg break else:", "bool2 represent the same logical behaviour for some correspondence between", "in the expansion of a function from binary to it's", "1] = None newterm = ti[:] newterm[index] = 3 if", "{x: a, y:b} or {x: b, y:a}. If no such", "elif isinstance(info[0], info[1]): return info[1](*list(map(_distribute, ((x, info[1], info[2]) for x", "for i in (dontcares or [])] for d in dontcares:", "_ in todo if _ is not None]]) return simplified_terms", "if d in minterms: raise ValueError(f'{d} in minterms is also", "POS). \"\"\" temp = [] for i, m in enumerate(maxterm):", "simplify_logic(expr, form=None, deep=True): \"\"\" This function simplifies a boolean function", "= stack.pop() if expr.func in (And, Or): if simplified: args", "\"\"\" @classmethod def eval(cls, *args): return Not(And(*args)) class Nor(BooleanFunction): \"\"\"", "variables): \"\"\" Converts a term in the expansion of a", "isinstance(arg, Unequality): return Equality(*arg.args) if isinstance(arg, StrictLessThan): return GreaterThan(*arg.args) if", "return Or._to_nnf(~a, b, simplify=simplify) class Equivalent(BooleanFunction): \"\"\" Equivalence relation. Equivalent(A,", "we have to if is_dnf(expr): return expr expr = eliminate_implications(expr)", "for i, (r, c, nc) in enumerate(rel): for j in", "causes some # major issues (like the inability to import", "True \"\"\" return And.make_args(expr) def disjuncts(expr): \"\"\"Return a list of", "smallest Sum of Products form. The variables must be given", "else: arg = true if isinstance(arg, Xor): for a in", "being used as a symbolic representation of truth. For example,", "& ~b) | (c & d))) (a | b) &", "for | operator.\"\"\" return Or(self, other) __ror__ = __or__ def", "equivalent. Returns False otherwise. Examples ======== >>> Equivalent(False, False, False)", "[{append_symbol(arg, symbols) for arg in Or.make_args(c)} for c in clauses]", "~B & ...) | (B & C & ...) |", "the corresponding normal form is returned; if None, the answer", "== Equivalent: return And._to_nnf(Or(*args), Or(*[~arg for arg in args]), simplify=simplify)", "a list of the conjuncts in the expr s. Examples", "returns ``True`` or ``False``, and does so in terms of", "_simplified_pairs(old) essential = _rem_redundancy(new, minterms) return Or(*[_convert_to_varsSOP(x, variables) for x", "newargs = [] for x in args: if isinstance(x, Number)", "should be used and when ``true`` should be used in", "use 1 instead of ``Integer(1)``. The rule of thumb is:", "class Not(BooleanFunction): \"\"\" Logical Not function (negation). Returns True if", "as a convenience, but note that its use here is", "Examples ======== >>> function1 = SOPform([x, z, y], [[1, 0,", "False immediately if any of them are False, and True", "Intersection if len(self.free_symbols) == 1: return Intersection(*[arg.as_set() for arg in", "arg in super(Xor, obj).args: if isinstance(arg, Number) or arg in", "def to_dnf(expr, simplify=False): \"\"\" Convert a propositional logical sentence s", "= _finger(function2) # more quick checks if len(f1) != len(f2):", "c == true: return Not(a) def to_nnf(self, simplify=True): a, b,", "different from their normal use in Python, which is bit", "z) >>> ITE(True, x, y) x >>> ITE(False, x, y)", "def _check_pair(minterm1, minterm2): \"\"\" Checks if a pair of minterms", "enough (see issue sympy/sympy#4835) so this is a workaround that", "if not cls.args[0].is_Atom: return False elif not isinstance(cls, function2): return", "f} for a in eq.args: if a.is_Symbol: d[a][0] += 1", "``True`` or ``False``, and does so in terms of structural", "QM method. \"\"\" simplified_terms = [] todo = list(range(len(terms))) for", "\"\"\" Rewrite logic operators and relationals in terms of real", "if func == Xor: result = [] for i in", "should be used in various contexts throughout Diofant. An important", "& ~y & ~z) | (~x & ~y & z)", "'dnf', True) # Don't convert unless we have to if", "len(self.args)+1, 2): for neg in combinations(self.args, i): clause = [~s", "bitwise not. In particular, ``~a`` and ``Not(a)`` will be different", "advantage of using true instead of True is that shorthand", "to_cnf(~(a | b) | c) (c | ~a) & (c", "BooleanFunction): return {expr} return set().union(*(_find_predicates(i) for i in expr.args)) def", "essential: if _compare_term(x, y): break else: for z in l1:", "return And.make_args(expr) def disjuncts(expr): \"\"\"Return a list of the disjuncts", "c = self.args return And._to_nnf(Or(~a, b), Or(a, c), simplify=simplify) def", "else: return info[0] def to_nnf(expr, simplify=True): \"\"\" Converts expr to", "======== >>> Not(x > 0, evaluate=False).as_set() (-oo, 0] \"\"\" if", "to_dnf((a & b) | (a & ~b) | (b &", "the statement is False. Returns False if the statement is", "Examples ======== >>> to_nnf(Not((~a & ~b) | (c & d)))", "\"\"\" return _distribute((expr, And, Or)) def distribute_or_over_and(expr): \"\"\" Given a", "sympify(expr) if not isinstance(expr, BooleanFunction): return expr variables = _find_predicates(expr)", "a convenience, but note that their use here is different", "it's better to just use ``if x`` instead of ``if", "if x else False) rel = [] for r in", "False return True def is_cnf(expr): \"\"\" Test whether or not", "[-oo, -2) U (2, oo] \"\"\" from ..sets import Union", "(~c).canonical if any(r == nc for r in rel): return", "= {} for k in f1: if k not in", "is not conj]) return info[1](*list(map(_distribute, ((info[2](c, rest), info[1], info[2]) for", "~(x | y) \"\"\" @classmethod def eval(cls, *args): return Not(Or(*args))", "return _distribute((expr, And, Or)) def distribute_or_over_and(expr): \"\"\" Given a sentence", "true >>> Not(Or(True, False)) false >>> Not(And(And(True, x), Or(x, False)))", "And._to_nnf(*args, simplify=simplify) class ITE(BooleanFunction): \"\"\" If then else clause. ITE(A,", ">>> Implies(True, False) false >>> Implies(False, False) true >>> Implies(True,", "issues (like the inability to import the class directly from", "def __new__(cls, *args, **options): from ..core.relational import Relational args =", "if x not in (3, minterm[i]): return False return True", "a symbolic representation of truth. For example, if the object", "StrictLessThan(*arg.args) def as_set(self): \"\"\" Rewrite logic operators and relationals in", "mapping of variables that makes the two expressions bool1 and", "from its normal use in Python, which is bitwise and.", "& c) True >>> is_cnf((a & b) | c) False", "U (2, oo] \"\"\" from ..sets import Union if len(self.free_symbols)", "Worse: ``if greeting is True:`` Examples ======== >>> sympify(True) true", "are logically equivalent. Returns False otherwise. Examples ======== >>> Equivalent(False,", "If 'cnf' or 'dnf', the simplest expression in the corresponding", "x is True``. To quote PEP 8: Don't compare boolean", "in Or.make_args(c)} for c in clauses] def _check_pair(minterm1, minterm2): \"\"\"", "The variables must be given as the first argument. Return", "returns ``true``. This means that for the most part, you", "not in dontcares): maxterms.append(t) old = None new = maxterms", "bools in Python subclass from ``int``, ``~True`` is the same", "the expr s. Examples ======== >>> conjuncts(a & b) ==", "is a literal, else False. Examples ======== >>> is_literal(a) True", "the result of B if A is true else it", "in Python, which is bit shifts. Hence, ``Implies(a, b)`` and", "= SOPform([a, b, c], [[1, 0, 1], [1, 0, 0]])", "a literal, else False. Examples ======== >>> is_literal(a) True >>>", "import Atom, cacheit from ..core.expr import Expr from ..core.function import", "======== >>> Implies(True, False) false >>> Implies(False, False) true >>>", "= super().__new__(cls, _args) obj._argset = _args return obj @property #", "Or, Not, etc. \"\"\" is_Boolean = True def _eval_simplify(self, ratio,", "False) false >>> x ^ y Xor(x, y) Notes =====", "if ai.is_Symbol: d[ai][2] += 1 d[ai][-1] += o else: d[ai.args[0]][3]", "for x in reversed(list(args)): if isinstance(x, Number) or x in", "is evaluated to its simplest CNF form. Examples ======== >>>", "predicate is defined here as anything within a BooleanFunction that", "i != j: if index == -1: index = x", "\"\"\" variables = [sympify(v) for v in variables] if minterms", "d}) >>> eq = And(Xor(a, b), c, And(c, d)) >>>", "counting Symbol as 1 and Not(Symbol) as 2 ] >>>", "is_literal(expr): return False return True def is_cnf(expr): \"\"\" Test whether", "result.append(Or(*clause)) return And._to_nnf(*result, simplify=simplify) if func == ITE: a, b,", "not in simplified_terms: simplified_terms.append(newterm) simplified_terms.extend( [terms[i] for i in [_", "minterms, dontcares=None): \"\"\" The SOPform function uses simplified_pairs and a", "result contains no redundant clauses. Examples ======== >>> is_nnf(a &", "not isinstance(expr, function1): return False for cls in expr.args: if", "SOPform([t, x, y, z], minterms, dontcares) (y & z) |", ">>> eliminate_implications(Implies(a, b)) b | ~a >>> eliminate_implications(Equivalent(a, b)) (a", "# Don't convert unless we have to if is_cnf(expr): return", "sympify(expr) if is_literal(expr): return True stack = [expr] while stack:", "expr = sympify(expr) if not isinstance(expr, BooleanFunction): return expr if", "c, nc) in enumerate(rel): for j in range(i + 1,", "== 1: if temporary[0] not in essential: essential.append(temporary[0]) for x", "the object ends up in the ``.args`` of any expression,", "statement is False. Returns False if the statement is True.", "Python, which is bitwise not. In particular, ``~a`` and ``Not(a)``", "if temporary[0] not in essential: essential.append(temporary[0]) for x in terms:", "all arguments are True. Examples ======== >>> Nand(False, True) true", "The ``|`` operator is provided as a convenience, but note", "non-boolean functions contained within the input. Examples ======== >>> b", "fingerprint dictionaries f1 = _finger(function1) f2 = _finger(function2) # more", "Examples ======== >>> is_cnf(a | b | c) True >>>", "are False. Examples ======== >>> Nor(True, False) false >>> Nor(True,", "are required): {args!s}') if A == true or A ==", "``False``, and does so in terms of structural equality rather", "\"\"\" is_Boolean = True def _eval_simplify(self, ratio, measure): return simplify_logic(self)", "[] for i, m in enumerate(maxterm): if m == 1:", "simplified version in SOP or POS form. The return type", "False) rel = [] for r in argset: if isinstance(r,", "expression, then it must necessarily be ``true`` instead of ``True``,", "contains only And, Or and Not, and Not is applied", "https://en.wikipedia.org/wiki/Quine-McCluskey_algorithm \"\"\" variables = [sympify(v) for v in variables] if", "of bool1, and the mapping of variables that makes the", "~y) | (z & ~y), {t: a, x: b, y:", "false >> false true >>> False >> False 0 >>>", "terms: temporary = [] for y in l1: if _compare_term(x,", "true.as_set() UniversalSet() \"\"\" return S.UniversalSet class BooleanFalse(BooleanAtom, metaclass=Singleton): \"\"\"Diofant version", "!= old: old = new new = _simplified_pairs(old) essential =", "return And(*[_convert_to_varsPOS(x, variables) for x in essential]) def _find_predicates(expr): \"\"\"Helper", "3 arguments') if a == true: return b elif a", "b, c = self.args return And._to_nnf(Or(~a, b), Or(a, c), simplify=simplify)", "to literals. If simplify is True, the result contains no", "integer. Furthermore, since bools in Python subclass from ``int``, ``~True``", "The ``>>`` and ``<<`` operators are provided as a convenience,", "\"\"\" Convert a propositional logical sentence s to disjunctive normal", "False in argset: argset.discard(False) return And(*[~arg for arg in argset])", ">> 1``, i.e., 0, which has a truth value of", "sentence in CNF. Examples ======== >>> distribute_and_over_or(Or(a, And(Not(b), Not(c)))) (a", "True and the rest are False. Examples ======== >>> Xor(True,", "a == true: return b elif a == false: return", "assumptions system uses a three-valued logic (``True``, ``False``, ``None``), whereas", "if simplify: if isinstance(arg, cls): arg = arg.args else: arg", "def _to_nnf(cls, *args, **kwargs): simplify = kwargs.get('simplify', True) argset =", "**options): from ..core.relational import Relational args = [sympify(arg, strict=True) for", "functions that satisfy the conditions. Examples ======== >>> minterms =", "a), And(Not(c), d), And(b, c)) >>> bool_map(eq1, eq2) ((x &", "Equality(*arg.args) if isinstance(arg, StrictLessThan): return GreaterThan(*arg.args) if isinstance(arg, StrictGreaterThan): return", "``False``, \"``true == True``\" is ``True``, so if there is", "negated symbols (Not(x)), or Or (or an And) whose arguments", "[] for x in args: if isinstance(x, Number) or x", "terms of real sets. Examples ======== >>> Or(x > 2,", "``if greeting is True:`` Examples ======== >>> sympify(True) true >>>", "form ((A | ~B | ...) & (B | C", "using QM method. \"\"\" simplified_terms = [] todo = list(range(len(terms)))", "y x | y Notes ===== The ``|`` operator is", "true or B == false: return Or(Not(A), B) elif A", "and >> will work as expected on this class, whereas", "only to literals. If simplify is True, the result contains", "result contains no redundant clauses. Examples ======== >>> to_nnf(Not((~a &", "logic operators and relationals in terms of real sets. Examples", ">>> Xor(True, False, True, False) false >>> x ^ y", "(3, minterm[i]): return False return True def _rem_redundancy(l1, terms): \"\"\"", "Not(z))). Basic.match is not robust enough (see issue sympy/sympy#4835) so", "(a | ~b | (a & ~b)) & (b |", "~a) & (c | ~b) \"\"\" return to_nnf(expr) def is_literal(expr):", "Or(w, Not(z))). Basic.match is not robust enough (see issue sympy/sympy#4835)", "[] todo = list(range(len(terms))) for i, ti in enumerate(terms[:-1]): for", "for And, Or, Not, etc. \"\"\" is_Boolean = True def", "& ~b) | (b & c) | (~b & c),", "The ``&`` operator is provided as a convenience, but note", "args(self): return tuple(ordered(self._argset)) def to_nnf(self, simplify=True): args = [] for", "return And(self, other) __rand__ = __and__ def __or__(self, other): \"\"\"Overloading", "Parameters ========== expr : string or boolean expression form :", "False) continue if x.is_Relational: c = x.canonical if c in", "| ...). If simplify is True, the expr is evaluated", "to its simplest DNF form. Examples ======== >>> to_dnf(b &", "from ..core.sympify import converter, sympify from ..utilities import ordered class", "c, z: d}) >>> eq = And(Xor(a, b), c, And(c,", "return arg.args[0] # Simplify Relational objects. if isinstance(arg, Equality): return", "obj._argset = frozenset(argset) return obj @property # type: ignore[misc] @cacheit", "0, 1], [0, 0, 1, 1], [0, 1, 1, 1],", "def to_nnf(self, simplify=True): a, b = self.args return Or._to_nnf(~a, b,", "Python, which is bitwise or. Hence, ``Or(a, b)`` and ``a", "[] for i in range(0, len(self.args)+1, 2): for neg in", ":py:class:`~diofant.logic.boolalg.BooleanTrue`. Examples ======== >>> sympify(False) false >>> false >> false", "[expr] while stack: expr = stack.pop() if expr.func in (And,", "in argset else argset.add(true) for a, b in remove: argset.remove(a)", "is returned. For example, And(x, y) is logically equivalent to", "using false instead of False is that shorthand boolean operations", "convert unless we have to if is_cnf(expr): return expr expr", "frozenset(argset) return obj @property # type: ignore[misc] @cacheit def args(self):", "via ``true``. This is the Diofant version of True, for", "x in (0, 1): newargs.append(True if x else False) continue", "of True. To avoid this issue, use the Diofant boolean", "if isinstance(arg, Xor): for a in arg.args: argset.remove(a) if a", "eq = Or(And(Not(y), a), And(Not(y), b), And(x, y)) >>> dict(_finger(eq))", "arg in info[0].args: if isinstance(arg, info[1]): conj = arg break", "[list(i) for i in minterms] dontcares = [list(i) for i", "__hash__(self): return hash(True) def as_set(self): \"\"\" Rewrite logic operators and", "they are all False. Returns False if any argument is", "i + 1] = None newterm = ti[:] newterm[index] =", "is a workaround that is valid for simplified boolean expressions.", "expr is a literal, else False. Examples ======== >>> is_literal(a)", "and c == true: return Not(a) def to_nnf(self, simplify=True): a,", "``True`` should be used and when ``true`` should be used", "~((x | y) & (~x | ~y)) Notes ===== The", "has only &, |, and ~ as logical operators. Examples", "The return type is an Or or And object in", "True in argset: argset.discard(True) return And(*argset) if False in argset:", "\"\"\" Convert a propositional logical sentence s to conjunctive normal", "immediately if any of them are True, and False if", "& (b | ~a | (b & ~a)) \"\"\" expr", "and ``<<`` operators are provided as a convenience, but note", "Examples ======== >>> is_nnf(a & b | ~c) True >>>", "false: return a elif b == false and c ==", "Intersection(*[arg.as_set() for arg in self.args]) else: raise NotImplementedError('Sorry, And.as_set has", "y) | (t & ~y) | (z & ~y), {t:", "[] for i in range(1, len(args)+1, 2): for neg in", "provided as a convenience, but note that its use here", "= list(t) if expr.xreplace(dict(zip(variables, t))): truthtable.append(t) if deep: from ..simplify", "``True`` and ``False``. Aside from not satisfying the above rule", "'cnf' or form is None: # pragma: no branch return", "\"\"\" Returns True if the given formulas have the same", "Xor(self, other) __rxor__ = __xor__ def equals(self, other, failing_expression=False): \"\"\"", "the logic module will return this class when they evaluate", "Normal Form. A logical expression is in Negation Normal Form", "It evaluates its arguments in order, giving True immediately if", "# of times it appeared as a Not(Symbol) in an", "then else clause. ITE(A, B, C) evaluates and returns the", "for the mapping {x: a, y:b} or {x: b, y:a}.", "eliminate_implications(expr) return distribute_and_over_or(expr) def to_dnf(expr, simplify=False): \"\"\" Convert a propositional", ": boolean (default True) indicates whether to recursively simplify any", "important thing to remember is that ``sympify(True)`` returns ``true``. This", "some confusion as to when ``True`` should be used and", "true nargs = None @classmethod def _new_args_filter(cls, args): newargs =", "if len(self.free_symbols) == 1: return Intersection(*[arg.as_set() for arg in self.args])", "Implies(True, True) true >>> Implies(False, True) true >>> x >>", "the truth table has been sufficiently simplified, use the prime", "when they evaluate to true. Notes ===== There is liable", "& b`` will return different things if ``a`` and ``b``", "start 0 -> False remove = [] for i, (r,", "isinstance(arg, Xor): for a in arg.args: argset.remove(a) if a in", "if a is not conj]) return info[1](*list(map(_distribute, ((info[2](c, rest), info[1],", "0] \"\"\" if len(self.free_symbols) == 1: return self.args[0].as_set().complement(S.Reals) else: raise", "behaviour for some correspondence between the variables of each. If", "in expr.args: if cls.is_Atom: continue if isinstance(cls, Not): if not", "simplifies a boolean function to its simplified version in SOP", "makes the two expressions bool1 and bool2 represent the same", "in (And, Or): if simplified: args = expr.args for arg", ">>> ITE(Or(True, False), And(True, True), Xor(True, True)) true >>> ITE(x,", "is returned; if None, the answer is returned according to", "for i, m in enumerate(minterm): if m == 0: temp.append(Not(variables[i]))", "real sets. Examples ======== >>> false.as_set() EmptySet() \"\"\" from ..sets", "_distribute((expr, And, Or)) def distribute_or_over_and(expr): \"\"\" Given a sentence s", "disjunctions of literals, return an equivalent sentence in DNF. Note", "essential essential.append(z) break return essential def SOPform(variables, minterms, dontcares=None): \"\"\"", "SOPform function uses simplified_pairs and a redundant group- eliminating algorithm", "enumerate(terms[:-1]): for j_i, tj in enumerate(terms[(i + 1):]): index =", "self.args[0] func, args = expr.func, expr.args if func == And:", "simplify=True): args = [] for i in range(0, len(self.args)+1, 2):", "then it must necessarily be ``true`` instead of ``True``, as", "= [] for y in l1: if _compare_term(x, y): temporary.append(y)", "the result contains no redundant clauses. Examples ======== >>> to_nnf(Not((~a", "as elements of ``.args`` must be ``Basic``. On the other", "======== >>> distribute_and_over_or(Or(a, And(Not(b), Not(c)))) (a | ~b) & (a", "different if ``a`` and ``b`` are integers. >>> Xor(x, y).subs({y:", "of times it appeared as a Not(Symbol) in an And", "b) | (a & ~b) | (b & c) |", "in variables] if minterms == []: return false minterms =", "None: expr = sympify(expr) if not isinstance(expr, BooleanFunction): return expr", "(like the inability to import the class directly from this", "to just use ``if x`` instead of ``if x is", "is not None]]) return simplified_terms def _compare_term(minterm, term): \"\"\" Return", "checks if result contains no redundant clauses. Examples ======== >>>", "arguments are True and the rest are False. Returns False", "{2}] \"\"\" symbols = dict(zip(symbols, range(1, len(symbols) + 1))) def", "in remove: argset.remove(a) argset.remove(b) if len(argset) == 0: return false", "class ITE(BooleanFunction): \"\"\" If then else clause. ITE(A, B, C)", "True >>> is_cnf((a & b) | c) False \"\"\" return", "__and__(self, other): \"\"\"Overloading for & operator.\"\"\" return And(self, other) __rand__", "nargs = None @classmethod def _new_args_filter(cls, args): newargs = []", "arguments are True. Examples ======== >>> Nand(False, True) true >>>", "a.is_Not: d[a.args[0]][1] += 1 else: o = len(a.args) + sum(isinstance(ai,", "expr = sympify(expr) # Special case of an Atom if", "implemented for multivariate' ' expressions') class Or(LatticeOp, BooleanFunction): \"\"\" Logical", "been' ' implemented for mutivariate' ' expressions') def to_nnf(self, simplify=True):", "2, x < -2).as_set() [-oo, -2) U (2, oo] \"\"\"", "instead of True is that shorthand boolean operations like ~", "subclass from ``int``, ``~True`` is the same as ``~1`` which", "or len(f1[k]) != len(f2[k]): return for i, x in enumerate(f1[k]):", "else: argset.add(arg) rel = [(r, r.canonical, (~r).canonical) for r in", ">>> ~true false >>> ~True -2 >>> Or(True, False) true", "boolean types ``true`` and ``false``. >>> ~True -2 >>> ~true", "Nand(True, True) false >>> Nand(x, y) ~(x & y) \"\"\"", "__rxor__ = __xor__ def equals(self, other, failing_expression=False): \"\"\" Returns True", "to_nnf(Not((~a & ~b) | (c & d))) (a | b)", "if any(r == nc for r in rel): return [false]", "True >>> is_cnf(a & b & c) True >>> is_cnf((a", "def eval(cls, *args): return Not(And(*args)) class Nor(BooleanFunction): \"\"\" Logical NOR", "~b, simplify=simplify) if func == Equivalent: return And._to_nnf(Or(*args), Or(*[~arg for", ">>> function1 = SOPform([x, z, y], [[1, 0, 1], [0,", "..sets import Intersection if len(self.free_symbols) == 1: return Intersection(*[arg.as_set() for", "= self.args return Or._to_nnf(~a, b, simplify=simplify) class Equivalent(BooleanFunction): \"\"\" Equivalence", "as a symbolic representation of truth. For example, if the", "not necessarily unique, but they are canonical. Here, ``(t, z)``", "return True def eliminate_implications(expr): \"\"\" Change >>, <<, and Equivalent", "b | ~a >>> eliminate_implications(Equivalent(a, b)) (a | ~b) &", "an arbitrary symbolic ``Boolean``, like ``Or(x, y)`` or ``x >", "if cj == nc: odd = ~odd break elif cj", "c) >>> to_dnf((a & b) | (a & ~b) |", "__bool__(self): return False def __hash__(self): return hash(False) def as_set(self): \"\"\"", "is True and B is False. Returns True otherwise. Examples", "denested and is either an And (or an Or) whose", "to a simplified set of minterms with one less variable", "Product of Sums form. The variables must be given as", "SOPform(variables, minterms, dontcares=None): \"\"\" The SOPform function uses simplified_pairs and", "will work as expected on this class, whereas with False", "by an arbitrary symbolic ``Boolean``, like ``Or(x, y)`` or ``x", "within a BooleanFunction that is not a BooleanFunction itself. \"\"\"", "where the boolean is being used as a symbolic representation", "__bool__(self): return True def __hash__(self): return hash(True) def as_set(self): \"\"\"", "\"\"\" Logical NAND function. It evaluates its arguments in order,", "in essential essential.append(z) break return essential def SOPform(variables, minterms, dontcares=None):", "all True. Examples ======== >>> x & y x &", "1, len(rel)): rj, cj = rel[j][:2] if cj == nc:", "sufficiently simplified, use the prime implicant table method to recognize", ">> ~a) True >>> Not(And(a, b, c)).equals(And(Not(a), Not(b), Not(c))) False", "rest are False. Examples ======== >>> Xor(True, False) true >>>", "function1.is_Symbol: return {function1: function2} # get the fingerprint dictionaries f1", "is bitwise and. Hence, ``And(a, b)`` and ``a & b``", "boolean in question can be replaced by an arbitrary symbolic", "d)) >>> bool_map(eq, eq.subs({c: x})) (c & d & (a", "POS form. The return type is an Or or And", "\"\"\" from ..core.relational import Relational from .inference import satisfiable other", "import (Equality, GreaterThan, LessThan, StrictGreaterThan, StrictLessThan, Unequality) if isinstance(arg, Number)", "ValueError('form can be cnf or dnf only') def _finger(eq): \"\"\"", ">>> is_cnf(a | b | c) True >>> is_cnf(a &", "Examples ======== >>> is_literal(a) True >>> is_literal(~a) True >>> is_literal(a", "def bool_map(bool1, bool2): \"\"\" Return the simplified version of bool1,", "return EmptySet() true = BooleanTrue() false: BooleanFalse = BooleanFalse() #", "rel): return [true] rel.append(c) newargs.append(x) return LatticeOp._new_args_filter(newargs, Or) def as_set(self):", "the logic module. The primary advantage of using true instead", "1: if temporary[0] not in essential: essential.append(temporary[0]) for x in", "\"\"\"Return a list of the disjuncts in the sentence s.", "1], [1, 0, 1, 1], [1, 1, 1, 1]] >>>", "as expected on this class, whereas with True they act", "== True``\" is ``True``, so if there is any doubt", "or not an expression is in disjunctive normal form. Examples", "isinstance(lit, Not): if not lit.args[0].is_Atom: return False else: if not", "is None and len(truthtable) >= (2 ** (len(variables) - 1))):", "True # Special case of a single negation if isinstance(expr,", "And(x, Not(y), Or(w, Not(z))). Basic.match is not robust enough (see", "could be ``(a, d)`` or ``(d, a)``: >>> eq1 =", "& b & c) True >>> is_dnf((a & b) |", "Examples ======== >>> sympify(True) true >>> ~true false >>> ~True", "with True they act bitwise on 1. Functions in the", "== nc: odd = ~odd break elif cj == c:", ">> b) & (b >> a)) False \"\"\" expr =", "Not(a) in argset: return cls.zero argset.add(a) else: argset.add(arg) return cls(*argset)", "example, And(x, y) is logically equivalent to And(a, b) for", "false minterms = [list(i) for i in minterms] dontcares =", "be some confusion as to when ``True`` should be used", "it's variable form (for SOP). \"\"\" temp = [] for", "arguments are only symbols or negated symbols. For example, And(x,", "in neg else s for s in args] result.append(Or(*clause)) return", "lambda x: true if x else false class BooleanFunction(Application, Boolean):", "and Not(Symbol) as 2 ] >>> eq = Or(And(Not(y), a),", "in dontcares') old = None new = minterms + dontcares", "(``True``, ``False``, ``None``), whereas ``true`` and ``false`` represent a two-valued", "else: return not isinstance(expr, BooleanFunction) def to_int_repr(clauses, symbols): \"\"\" Takes", "======== >>> sympify(False) false >>> false >> false true >>>", "== c: return b elif b == true and c", "[0, 0, 1]]) >>> function2 = SOPform([a, b, c], [[1,", "obj._argset = _args return obj @property # type: ignore[misc] @cacheit", "rel.append((r, r.canonical, (~r).canonical)) remove = [] for i, (r, c,", "POSform function uses simplified_pairs and a redundant-group eliminating algorithm to", "temporary.append(y) if len(temporary) == 1: if temporary[0] not in essential:", "have to if is_cnf(expr): return expr expr = eliminate_implications(expr) return", "False. Examples ======== >>> Xor(True, False) true >>> Xor(True, True)", "stack.extend(expr.args) elif not is_literal(expr): return False return True def is_cnf(expr):", "And(self, other) __rand__ = __and__ def __or__(self, other): \"\"\"Overloading for", "(y | ~t) References ========== * https://en.wikipedia.org/wiki/Quine-McCluskey_algorithm \"\"\" variables =", "False) true See Also ======== BooleanTrue \"\"\" def __bool__(self): return", "return info[1](*list(map(_distribute, ((x, info[1], info[2]) for x in info[0].args)))) else:", "in expr.args: if isinstance(lit, Not): if not lit.args[0].is_Atom: return False", "newargs except ValueError: raise ValueError(f'{len(args)} operand(s) used for an Implies", "other) __ror__ = __or__ def __invert__(self): \"\"\"Overloading for ~ operator.\"\"\"", "of the form ((A | ~B | ...) & (B", "formulas have the same truth table. For two formulas to", "& ...). If simplify is True, the expr is evaluated", "or {x: b, y:a}. If no such mapping exists, return", "i, x in enumerate(term): if x not in (3, minterm[i]):", "def eval(cls, *args): try: newargs = [] for x in", "isinstance(arg, StrictLessThan): return GreaterThan(*arg.args) if isinstance(arg, StrictGreaterThan): return LessThan(*arg.args) if", "{a: a, b: b, c: d, d: x}) \"\"\" def", "for i in [_ for _ in todo if _", "for fi in f} for a in eq.args: if a.is_Symbol:", "so if there is any doubt over whether a function", "Xor): for a in arg.args: argset.remove(a) if a in argset", "args = [sympify(arg, strict=True) for arg in args] argset =", "terms of real sets. Examples ======== >>> And(x < 2,", "Xor: result = [] for i in range(1, len(args)+1, 2):", "a, y:b} or {x: b, y:a}. If no such mapping", "is True, checks if result contains no redundant clauses. Examples", "minterms: raise ValueError(f'{d} in minterms is also in dontcares') old", "\"product of sums\" or \"POS\" form) that gives the desired", "1 elif a.is_Not: d[a.args[0]][1] += 1 else: o = len(a.args)", "_is_form(expr, Or, And) def _is_form(expr, function1, function2): \"\"\"Test whether or", "self.args]) else: raise NotImplementedError('Sorry, Or.as_set has not yet been' '", "evaluated to its simplest CNF form. Examples ======== >>> to_cnf(~(a", "Singleton from ..core.sympify import converter, sympify from ..utilities import ordered", "Xor(x, y).subs({y: 0}) x \"\"\" def __new__(cls, *args, **kwargs): argset", "\"\"\"Overloading for & operator.\"\"\" return And(self, other) __rand__ = __and__", "0, 3): [a, b], (0, 0, 1, 2, 8): [y]}", "returns -1. \"\"\" index = -1 for x, (i, j)", "it appeared, counting Symbol as 1 and Not(Symbol) as 2", "to_dnf(expr, simplify=False): \"\"\" Convert a propositional logical sentence s to", "return And(*argset) if False in argset: argset.discard(False) return And(*[~arg for", "cls.zero argset.add(a) else: argset.add(arg) return cls(*argset) class And(LatticeOp, BooleanFunction): \"\"\"", "in various contexts throughout Diofant. An important thing to remember", "function2.__class__: return if len(function1.args) != len(function2.args): return if function1.is_Symbol: return", "the sentence s. Examples ======== >>> disjuncts(a | b) ==", "c), True) a | c \"\"\" expr = sympify(expr) if", "& b) | c) False >>> is_nnf((a >> b) &", "class, whereas with True they act bitwise on 1. Functions", ">>> dict(_finger(eq)) {(0, 0, 1, 0, 2): [x], (0, 0,", "the class and instance names the same causes some #", "a)``: >>> eq1 = Or(And(Not(y), t), And(Not(y), z), And(x, y))", "its arguments in order, giving True immediately if any of", "ValueError(f'{len(args)} operand(s) used for an Implies ' f'(pairs are required):", "pair of minterms differs by only one bit. If yes,", "obj).args: if isinstance(arg, Number) or arg in (True, False): if", "the result of C. Examples ======== >>> ITE(True, False, True)", "list(t) if (t not in minterms) and (t not in", "Examples ======== >>> Implies(True, False) false >>> Implies(False, False) true", "self) __rrshift__ = __lshift__ __rlshift__ = __rshift__ def __xor__(self, other):", "better to just use ``if x`` instead of ``if x", "if possible, to a simplified set of minterms with one", "\"\"\" Assign a 5-item fingerprint to each symbol in the", "Equivalent(x, And(x, True)) true \"\"\" def __new__(cls, *args, **options): from", "argset: argset.discard(False) return And(*[~arg for arg in argset]) _args =", "function. It evaluates its arguments in order, giving False immediately", "products\" or \"SOP\" form) that gives the desired outcome. If", ">>> Or(True, False) true See Also ======== BooleanFalse \"\"\" def", "= sympify(expr) if not isinstance(expr, BooleanFunction): return expr if simplify:", "..sets import Union if len(self.free_symbols) == 1: return Union(*[arg.as_set() for", "in args] result.append(Or(*clause)) return And._to_nnf(*result, simplify=simplify) if func == ITE:", "the conditions. Examples ======== >>> minterms = [[0, 0, 0,", "in argset: return cls.zero argset.add(a) else: argset.add(arg) return cls(*argset) class", "return Not(Xor(*argset)) else: obj._args = tuple(ordered(argset)) obj._argset = frozenset(argset) return", "= Or(And(Not(c), a), And(Not(c), d), And(b, c)) >>> bool_map(eq1, eq2)", "in minterms] dontcares = [list(i) for i in (dontcares or", "y, y], [x, y]) [{1, 2}, {2}] \"\"\" symbols =", "return tuple(ordered(self._argset)) def to_nnf(self, simplify=True): args = [] for a,", "ti in enumerate(terms[:-1]): for j_i, tj in enumerate(terms[(i + 1):]):", "temp.append(Not(variables[i])) elif m == 0: temp.append(variables[i]) return Or(*temp) def _simplified_pairs(terms):", "otherwise. Examples ======== >>> Equivalent(False, False, False) true >>> Equivalent(True,", "Not(False) true >>> Not(And(True, False)) true >>> Not(Or(True, False)) false", "the assumptions system uses a three-valued logic (``True``, ``False``, ``None``),", "a in arg: if Not(a) in argset: return cls.zero argset.add(a)", "& ~y & ~z) >>> simplify_logic(_) ~x & ~y \"\"\"", "one bit. If yes, returns index, else returns -1. \"\"\"", "minterms == []: return false minterms = [list(i) for i", "false elif len(argset) == 1: return argset.pop() elif True in", "same causes some # major issues (like the inability to", "Notes ===== The ``&`` operator is provided as a convenience,", "and ``a & b`` will return different things if ``a``", "**kwargs): simplify = kwargs.get('simplify', True) argset = set() for arg", "output is NOT simplified. Examples ======== >>> distribute_or_over_and(And(Or(Not(a), b), c))", "redundant group- eliminating algorithm to convert the list of all", "NAND function. It evaluates its arguments in order, giving True", "def __invert__(self): \"\"\"Overloading for ~ operator.\"\"\" return Not(self) def __rshift__(self,", "to literals. If simplified is True, checks if result contains", "return hash(True) def as_set(self): \"\"\" Rewrite logic operators and relationals", "their use here is different from their normal use in", ">>> is_dnf(a & b & c) True >>> is_dnf((a &", "which is bitwise xor. In particular, ``a ^ b`` and", "if self.has(Relational) or other.has(Relational): raise NotImplementedError('handling of relationals') return self.atoms()", "1]] >>> dontcares = [[0, 0, 0, 0], [0, 0,", "terms using QM method. \"\"\" simplified_terms = [] todo =", "C. Examples ======== >>> ITE(True, False, True) false >>> ITE(Or(True,", "len(f1) != len(f2): return # assemble the match dictionary if", "type is an Or or And object in Diofant. Parameters", "len(symbols) + 1))) def append_symbol(arg, symbols): if isinstance(arg, Not): return", "return cls(*argset) class And(LatticeOp, BooleanFunction): \"\"\" Logical AND function. It", "the \"sum of products\" or \"SOP\" form) that gives the", "= ti[:] newterm[index] = 3 if newterm not in simplified_terms:", "False return True # Special case of a single negation", "into an integer representation. Examples ======== >>> to_int_repr([x | y,", "redundant-group eliminating algorithm to convert the list of all input", "# pragma: no branch if _compare_term(x, z): assert z not", "x \"\"\" def __new__(cls, *args, **kwargs): argset = set() obj", "all False. Examples ======== >>> x | y x |", "try: newargs = [] for x in args: if isinstance(x,", "Diofant version of True, for use in the logic module.", "simplify=simplify) class Nand(BooleanFunction): \"\"\" Logical NAND function. It evaluates its", "return if len(function1.args) != len(function2.args): return if function1.is_Symbol: return {function1:", "its normal use in Python, which is bitwise not. In", "temp.append(Not(variables[i])) elif m == 1: temp.append(variables[i]) return And(*temp) def _convert_to_varsPOS(maxterm,", "~ operator.\"\"\" return Not(self) def __rshift__(self, other): \"\"\"Overloading for >>", "z], minterms, dontcares) z & (y | ~t) References ==========", "False if they are all True. Returns True if any", "0, 1, 1], [0, 1, 1, 1], ... [1, 0,", "equation: [ # of times it appeared as a Symbol,", "function to its simplified version in SOP or POS form.", "of any expression, then it must necessarily be ``true`` instead", "(t not in minterms) and (t not in dontcares): maxterms.append(t)", "def _eval_derivative(self, x): return self.func(self.args[0], *[a.diff(x) for a in self.args[1:]])", "Implies(other, self) __rrshift__ = __lshift__ __rlshift__ = __rshift__ def __xor__(self,", "symbolic representation of truth. For example, if the object ends", "be given as the first argument. Return a logical Or", "into the smallest Sum of Products form. The variables must", "True is that shorthand boolean operations like ~ and >>", "is_literal(self): return self expr = self.args[0] func, args = expr.func,", "| ~a) & (b | c), False) True >>> is_nnf(Not(a", "rel = [] for x in args: if isinstance(x, Number)", "given as the first argument. Return a logical And function", "for i, m in enumerate(maxterm): if m == 1: temp.append(Not(variables[i]))", "if not isinstance(expr, function1): return False for cls in expr.args:", "======== >>> Nand(False, True) true >>> Nand(True, True) false >>>", "values to ``True`` or ``False`` using ``==``. * Yes: ``if", "two simplified boolean expressions if possible. By \"simplified\" we mean", "``True >> True`` will be the same as ``1 >>", "b), And(x, y)) >>> dict(_finger(eq)) {(0, 0, 1, 0, 2):", "tuple(ordered(self._argset)) def to_nnf(self, simplify=True): args = [] for a, b", "False \"\"\" expr = sympify(expr) if is_literal(expr): return True stack", "1))) def append_symbol(arg, symbols): if isinstance(arg, Not): return -symbols[arg.args[0]] else:", "eq.args: if a.is_Symbol: d[a][0] += 1 elif a.is_Not: d[a.args[0]][1] +=", "not conj]) return info[1](*list(map(_distribute, ((info[2](c, rest), info[1], info[2]) for c", "<< x Implies(x, y) Notes ===== The ``>>`` and ``<<``", "..simplify import simplify variables = [simplify(v) for v in variables]", "thumb, the assumptions system uses a three-valued logic (``True``, ``False``,", "first argument. Return a logical And function (i.e., the \"product", "deep=True): \"\"\" This function simplifies a boolean function to its", "algorithm to convert the list of all input combos that", "elif True in argset: argset.remove(True) return Not(Xor(*argset)) else: obj._args =", "expr is in Negation Normal Form. A logical expression is", "((info[2](c, rest), info[1], info[2]) for c in conj.args)))) elif isinstance(info[0],", "have unique fingerprints, but a and b do not. \"\"\"", "That is, of the form ((A & ~B & ...)", "@property # type: ignore[misc] @cacheit def args(self): return tuple(ordered(self._argset)) def", "1: return Intersection(*[arg.as_set() for arg in self.args]) else: raise NotImplementedError('Sorry,", "simplified_terms.append(newterm) simplified_terms.extend( [terms[i] for i in [_ for _ in", "integers. In particular, since Python considers ``True`` and ``False`` to", "| c)) False \"\"\" return _is_form(expr, Or, And) def _is_form(expr,", "in SOP or POS form. The return type is an", "a), And(Not(y), b), And(x, y)) >>> dict(_finger(eq)) {(0, 0, 1,", "of them are False, and False if they are all", "\"\"\" from ..sets import EmptySet return EmptySet() true = BooleanTrue()", "===== The ``^`` operator is provided as a convenience, but", "in minterms: raise ValueError(f'{d} in minterms is also in dontcares')", "c) True >>> is_cnf(a & b & c) True >>>", "equality rather than mathematical, so it should return ``True``. The", "hash(True) def as_set(self): \"\"\" Rewrite logic operators and relationals in", "* No: ``if greeting == True:`` * Worse: ``if greeting", "are False, and False if they are all True. Returns", "if a pair of minterms differs by only one bit.", "s consisting of conjunctions and disjunctions of literals, return an", "string ('cnf' or 'dnf') or None (default). If 'cnf' or", "return -1 return index def _convert_to_varsSOP(minterm, variables): \"\"\" Converts a", "Number) or x in (0, 1): newargs.append(True if x else", "simplify=simplify) if func == Implies: a, b = args return", "*args, **kwargs) for arg in super(Xor, obj).args: if isinstance(arg, Number)", "return type is an Or or And object in Diofant.", "DNF. Note that the output is NOT simplified. Examples ========", ">= (2 ** (len(variables) - 1))): return SOPform(variables, truthtable) elif", "_finger(function2) # more quick checks if len(f1) != len(f2): return", "d & (a | b) & (~a | ~b), {a:", "in enumerate(rel): for j in range(i + 1, len(rel)): rj,", "a workaround that is valid for simplified boolean expressions. \"\"\"", "matchdict = {} for k in f1: if k not", "= [~s if s in neg else s for s", "False. Returns True otherwise. Examples ======== >>> Implies(True, False) false", "whether or not an expression is of the required form.\"\"\"", "(t & ~y) | (z & ~y), {t: a, x:", "to true. Notes ===== There is liable to be some", "if c in rel: continue nc = (~c).canonical if any(r", "import EmptySet return EmptySet() true = BooleanTrue() false: BooleanFalse =", "True if an odd number of the arguments are True", "this # file). S.true = true S.false = false converter[bool]", "if minterms == []: return false minterms = [list(i) for", "of literals, return an equivalent sentence in DNF. Note that", "== B.canonical: return B else: return Expr.__new__(cls, *args) def to_nnf(self,", "return an equivalent sentence in DNF. Note that the output", "And(x < 2, x > -2).as_set() (-2, 2) \"\"\" from", "[1, 0, 1, 1], [1, 1, 1, 1]] >>> dontcares", "arg = arg.to_nnf(simplify) if simplify: if isinstance(arg, cls): arg =", "import Application from ..core.numbers import Number from ..core.operations import LatticeOp", "= list(range(len(terms))) for i, ti in enumerate(terms[:-1]): for j_i, tj", "(~r).canonical) for r in argset if r.is_Relational] odd = False", "b) | c) False \"\"\" return _is_form(expr, And, Or) def", "======== BooleanTrue \"\"\" def __bool__(self): return False def __hash__(self): return", "Returns True if all arguments are False. Examples ======== >>>", "simplify=True): args = [] for a, b in zip(self.args, self.args[1:]):", "= [] for a, b in zip(self.args, self.args[1:]): args.append(Or(~a, b))", "B = newargs except ValueError: raise ValueError(f'{len(args)} operand(s) used for", "1, 1], [0, 1, 1, 1], ... [1, 0, 1,", "``b`` are integers. >>> Xor(x, y).subs({y: 0}) x \"\"\" def", "return not isinstance(expr, BooleanFunction) def to_int_repr(clauses, symbols): \"\"\" Takes clauses", "are inputs that can be ignored, pass them as a", "False) else: newargs.append(x) A, B = newargs except ValueError: raise", "complimentary pairs odd? start 0 -> False remove = []", ">>> Equivalent(x, And(x, True)) true \"\"\" def __new__(cls, *args, **options):", "And (or an Or) whose arguments are either symbols (x),", "rel = [] for r in argset: if isinstance(r, Relational):", "the arguments are logically equivalent. Returns False otherwise. Examples ========", "obj = super().__new__(cls, *args, **kwargs) for arg in super(Xor, obj).args:", "= newargs except ValueError: raise ValueError(f'{len(args)} operand(s) used for an", "in (True, False): return false if arg else true if", "bitwise xor. In particular, ``a ^ b`` and ``Xor(a, b)``", "~d) >>> to_nnf(Equivalent(a >> b, b >> a)) (a |", "simplify: return simplify_logic(expr, 'cnf', True) # Don't convert unless we", "to when ``True`` should be used and when ``true`` should", "[x], (0, 0, 1, 0, 3): [a, b], (0, 0,", "object in Diofant. Parameters ========== expr : string or boolean", "self.args return Or._to_nnf(~a, b, simplify=simplify) class Equivalent(BooleanFunction): \"\"\" Equivalence relation.", "in eq.args: if a.is_Symbol: d[a][0] += 1 elif a.is_Not: d[a.args[0]][1]", "that lives in a boolean space. This is used as", "True. Examples ======== >>> Not(True) false >>> Not(False) true >>>", "if ``a`` and ``b`` are integers. >>> Or(x, y).subs({x: 0})", ">>> Nor(True, True) false >>> Nor(False, True) false >>> Nor(False,", "+= 1 elif a.is_Not: d[a.args[0]][1] += 1 else: o =", "| ~b), {a: a, b: b, c: d, d: x})", "False if they are all False. Examples ======== >>> x", "matchdict[x] = f2[k][i] return matchdict if matchdict else None a", "as a Symbol in an And or Or, # of", "(i.e., the \"product of sums\" or \"POS\" form) that gives", "\"\"\" if not isinstance(expr, BooleanFunction): return {expr} return set().union(*(_find_predicates(i) for", "ordered(d.items()): inv[tuple(v)].append(k) return inv def bool_map(bool1, bool2): \"\"\" Return the", "Convert a propositional logical sentence s to conjunctive normal form.", "((A | ~B | ...) & (B | C |", "Examples ======== >>> false.as_set() EmptySet() \"\"\" from ..sets import EmptySet", "~b) & (a | ~c) \"\"\" return _distribute((expr, And, Or))", "to be some confusion as to when ``True`` should be", "which is bitwise or. Hence, ``Or(a, b)`` and ``a |", ">> b, b >> a)) (a | ~b | (a", "other): \"\"\"Overloading for | operator.\"\"\" return Or(self, other) __ror__ =", "~t) References ========== * https://en.wikipedia.org/wiki/Quine-McCluskey_algorithm \"\"\" variables = [sympify(v) for", "eval(cls, *args): try: a, b, c = args except ValueError:", "of them are True, and True if they are all", "logically equivalent. Returns False otherwise. Examples ======== >>> Equivalent(False, False,", "normal use in Python, which is bitwise and. Hence, ``And(a,", "with False they act bitwise on 0. Functions in the", "is returned according to the form with fewest args (in", "if len(self.free_symbols) == 1: return self.args[0].as_set().complement(S.Reals) else: raise NotImplementedError('Sorry, Not.as_set", "to conjunctive normal form. That is, of the form ((A", "\"``true == True is True``.\" While \"``true is True``\" is", "this sort exist, one of them is returned. For example,", "``Or(a, b)`` and ``a | b`` will return different things", "if isinstance(arg, info[1]): conj = arg break else: return info[0]", "cls.args: if isinstance(lit, Not): if not lit.args[0].is_Atom: return False else:", "or boolean expression form : string ('cnf' or 'dnf') or", "in expression') class Xor(BooleanFunction): \"\"\" Logical XOR (exclusive OR) function.", "true = BooleanTrue() false: BooleanFalse = BooleanFalse() # We want", "if func == ITE: a, b, c = args return", "expressions if possible. By \"simplified\" we mean that a function", "(Not(x)), or Or (or an And) whose arguments are only", "r.canonical, (~r).canonical)) remove = [] for i, (r, c, nc)", ">>> y << x Implies(x, y) Notes ===== The ``>>``", "function uses simplified_pairs and a redundant-group eliminating algorithm to convert", "of literals, return an equivalent sentence in CNF. Examples ========", "[True, False]: # Includes 0, 1 argset.discard(x) argset.add(True if x", "False): return false if arg else true if arg.is_Not: return", "or None (default). If 'cnf' or 'dnf', the simplest expression", "correspondence between the variables of each. If more than one", "propositional logical sentence s to disjunctive normal form. That is,", "j) in enumerate(zip(minterm1, minterm2)): if i != j: if index", "shifts. Hence, ``Implies(a, b)`` and ``a >> b`` will return", "be converted to ``true`` when necessary, similar to how you", "_args) obj._argset = _args return obj @property # type: ignore[misc]", "is_nnf((a >> b) & (b >> a)) False \"\"\" expr", "``&`` operator is provided as a convenience, but note that", "if len(argset) == 0: return false elif len(argset) == 1:", "a Not(symbol), # of times it appeared as a Symbol", "Don't convert unless we have to if is_cnf(expr): return expr", "arg in (True, False): if not arg: continue else: arg", "to be integers, ``True >> True`` will be the same", "definitions. Some useful methods def conjuncts(expr): \"\"\"Return a list of", "an expression is in conjunctive normal form. Examples ======== >>>", "is different from its normal use in Python, which is", "Returns False if the statement is True. Examples ======== >>>", "y \"\"\" zero = true identity = false @classmethod def", "NOT simplified. Examples ======== >>> distribute_or_over_and(And(Or(Not(a), b), c)) (b &", "the list of all input combinations that generate '1' (the", "normal form. That is, of the form ((A | ~B", "operators. Examples ======== >>> eliminate_implications(Implies(a, b)) b | ~a >>>", "isinstance(expr, function2): for lit in expr.args: if isinstance(lit, Not): if", "bitwise or. Hence, ``Or(a, b)`` and ``a | b`` will", "If simplified is True, checks if result contains no redundant", "return self expr = self.args[0] func, args = expr.func, expr.args", "= info[2](*[a for a in info[0].args if a is not", "(the minterms) into the smallest Sum of Products form. The", "> 1``, use ``true``. Otherwise, use ``True``\". In other words,", "relationals') return self.atoms() == other.atoms() and \\ not satisfiable(Not(Equivalent(self, other)))", "they are all True. Returns True if any of the", "in doubt, use ``True``. \"``true == True is True``.\" While", "False, True) false >>> ITE(Or(True, False), And(True, True), Xor(True, True))", "def __lshift__(self, other): \"\"\"Overloading for << operator.\"\"\" return Implies(other, self)", "<gh_stars>0 \"\"\" Boolean algebra module for Diofant. \"\"\" from collections", "logic (``True``, ``False``, ``None``), whereas ``true`` and ``false`` represent a", "Sum of Products form. The variables must be given as", "in ordered(d.items()): inv[tuple(v)].append(k) return inv def bool_map(bool1, bool2): \"\"\" Return", "conj = arg break else: return info[0] rest = info[2](*[a", "else: if not lit.is_Atom: return False return True def eliminate_implications(expr):", "operations like ~ and >> will work as expected on", "``Or(x, y)`` or ``x > 1``, use ``true``. Otherwise, use", "1]] >>> SOPform([t, x, y, z], minterms, dontcares) (y &", "eliminate_implications(Equivalent(a, b)) (a | ~b) & (b | ~a) >>>", "elif not isinstance(cls, function2): return False for lit in cls.args:", "GreaterThan, LessThan, StrictGreaterThan, StrictLessThan, Unequality) if isinstance(arg, Number) or arg", "z not in essential essential.append(z) break return essential def SOPform(variables,", "of the arguments are logically equivalent. Returns False otherwise. Examples", "return Or(Not(A), B) elif A == B: return true elif", "the prime implicant table method to recognize and eliminate redundant", "A and B are both True or both False. Returns", "formulas to be equal they must have the same literals.", "the Diofant version of False, for use in the logic", "c in clauses] def _check_pair(minterm1, minterm2): \"\"\" Checks if a", "args = expr.args for arg in args: if Not(arg) in", "& y x & y Notes ===== The ``&`` operator", "\"simplified\" we mean that a function has been denested and", "z) >>> simplify_logic(b) ~x & ~y >>> sympify(b) (z &", "Nand(BooleanFunction): \"\"\" Logical NAND function. It evaluates its arguments in", "x else False) continue if x.is_Relational: c = x.canonical if", "a convenience, but note that its use here is different", "within the input. Examples ======== >>> b = (~x &", "2, 8): [y]} So y and x have unique fingerprints,", "o else: d[ai.args[0]][3] += 1 d[ai.args[0]][-1] += o inv =", "======== >>> Xor(True, False) true >>> Xor(True, True) false >>>", "'dnf' or \\ (form is None and len(truthtable) >= (2", "simplify_logic(bool1) b = simplify_logic(bool2) m = match(a, b) if m:", "simplify is True, the result contains no redundant clauses. Examples", "return for i, x in enumerate(f1[k]): matchdict[x] = f2[k][i] return", "the Diofant boolean types ``true`` and ``false``. >>> ~True -2", "return # assemble the match dictionary if possible matchdict =", "Or(And(Not(c), a), And(Not(c), d), And(b, c)) >>> bool_map(eq1, eq2) ((x", "the rest are False. Examples ======== >>> Xor(True, False) true", "(0, 0, 1, 0, 3): [a, b], (0, 0, 1,", "x`` instead of ``if x is True``. To quote PEP", "# get the fingerprint dictionaries f1 = _finger(function1) f2 =", "false instead of False is that shorthand boolean operations like", "argset.add(arg) rel = [(r, r.canonical, (~r).canonical) for r in argset", "Diofant boolean types ``true`` and ``false``. >>> ~True -2 >>>", "break elif cj == c: break else: continue remove.append((r, rj))", "of True, a singleton that can be accessed via ``true``.", "def simplify_logic(expr, form=None, deep=True): \"\"\" This function simplifies a boolean", "evaluates its arguments in order, giving True immediately if any", "todo[i] = todo[j_i + i + 1] = None newterm", "Special case of a single negation if isinstance(expr, Not): if", "len(self.free_symbols) == 1: return self.args[0].as_set().complement(S.Reals) else: raise NotImplementedError('Sorry, Not.as_set has", "module will return this class when they evaluate to false.", "``True`` and it will automatically be converted to ``true`` when", "i, m in enumerate(minterm): if m == 0: temp.append(Not(variables[i])) elif", "~a) & (b | c)) False >>> is_nnf((a | ~a)", "def canonical(self): return self def __int__(self): return int(bool(self)) class BooleanTrue(BooleanAtom,", "``true`` should be used in various contexts throughout Diofant. An", ">>> eliminate_implications(Equivalent(a, b, c)) (a | ~c) & (b |", "in the terms using QM method. \"\"\" simplified_terms = []", "k not in f2 or len(f1[k]) != len(f2[k]): return for", "Or, And)) def _distribute(info): \"\"\"Distributes info[1] over info[2] with respect", "\"\"\"Helper to find logical predicates in BooleanFunctions. A logical predicate", "them are False, and True if they are all True.", "' expressions') class Or(LatticeOp, BooleanFunction): \"\"\" Logical OR function It", "c in conj.args)))) elif isinstance(info[0], info[1]): return info[1](*list(map(_distribute, ((x, info[1],", "info[0].\"\"\" if isinstance(info[0], info[2]): for arg in info[0].args: if isinstance(arg,", "Or(*[_convert_to_varsSOP(x, variables) for x in essential]) def POSform(variables, minterms, dontcares=None):", "# end class definitions. Some useful methods def conjuncts(expr): \"\"\"Return", "(B & C & ...) | ...). If simplify is", "as a Not(Symbol) in an And or Or, sum of", "len(a.args) + sum(isinstance(ai, Not) for ai in a.args) for ai", "is_dnf(expr): return expr expr = eliminate_implications(expr) return distribute_or_over_and(expr) def is_nnf(expr,", "False for lit in cls.args: if isinstance(lit, Not): if not", "if true in argset else argset.add(true) for a, b in", "rather than mathematical, so it should return ``True``. The assumptions", "not cls.args[0].is_Atom: return False elif not isinstance(cls, function2): return False", "bitwise and. Hence, ``And(a, b)`` and ``a & b`` will", "[y]} So y and x have unique fingerprints, but a", "else: return Expr.__new__(cls, *args) def to_nnf(self, simplify=True): a, b =", "not yet been' ' implemented for multivariate' ' expressions') class", "true >> false false \"\"\" @classmethod def eval(cls, *args): try:", "``Not(a)`` will be different if ``a`` is an integer. Furthermore,", "use ``if x`` instead of ``if x is True``. To", "Diofant. \"\"\" from collections import defaultdict from itertools import combinations,", "in enumerate(terms[:-1]): for j_i, tj in enumerate(terms[(i + 1):]): index", "if True in argset: argset.discard(True) return And(*argset) if False in", "rj)) break for a, b in remove: argset.remove(a) argset.remove(b) argset.add(True)", "in a.args) for ai in a.args: if ai.is_Symbol: d[ai][2] +=", "return to_nnf(expr) def is_literal(expr): \"\"\" Returns True if expr is", "``.args`` of any expression, then it must necessarily be ``true``", ">>> And(x < 2, x > -2).as_set() (-2, 2) \"\"\"", "if not arg: continue else: arg = true if isinstance(arg,", "= todo[j_i + i + 1] = None newterm =", "converted to ``true`` when necessary, similar to how you can", "expr = sympify(expr) if is_nnf(expr, simplify): return expr return expr.to_nnf(simplify)", "that is valid for simplified boolean expressions. \"\"\" # do", "Assign a 5-item fingerprint to each symbol in the equation:", "the terms using QM method. \"\"\" simplified_terms = [] todo", "if not lit.args[0].is_Atom: return False else: if not lit.is_Atom: return", "an Implies ' f'(pairs are required): {args!s}') if A ==", "if an odd number of the arguments are True and", "term. Used for recognizing prime implicants. \"\"\" for i, x", "r in argset: if isinstance(r, Relational): rel.append((r, r.canonical, (~r).canonical)) remove", "[sympify(v) for v in variables] if minterms == []: return", "for x in essential]) def POSform(variables, minterms, dontcares=None): \"\"\" The", "will return ``true`` or ``True``, just use ``==`` instead of", "BooleanFunction): return expr if simplify: return simplify_logic(expr, 'cnf', True) #", "be ``Basic``. On the other hand, ``==`` is not a", "def to_nnf(expr, simplify=True): \"\"\" Converts expr to Negation Normal Form.", "def __xor__(self, other): return Xor(self, other) __rxor__ = __xor__ def", "file). S.true = true S.false = false converter[bool] = lambda", "It evaluates its arguments in order, giving False immediately if", "k, v in ordered(d.items()): inv[tuple(v)].append(k) return inv def bool_map(bool1, bool2):", "as to when ``True`` should be used and when ``true``", "= maxterms + dontcares while new != old: old =", "whereas with True they act bitwise on 1. Functions in", "def is_dnf(expr): \"\"\" Test whether or not an expression is", "Equivalent into &, |, and ~. That is, return an", "v in ordered(d.items()): inv[tuple(v)].append(k) return inv def bool_map(bool1, bool2): \"\"\"", "*args, **options): from ..core.relational import Relational args = [sympify(arg, strict=True)", "from ..core.expr import Expr from ..core.function import Application from ..core.numbers", "from their normal use in Python, which is bit shifts.", "of a function from binary to it's variable form (for", "@classmethod def eval(cls, *args): try: a, b, c = args", "import ordered class Boolean(Expr): \"\"\"A boolean object is an object", "which is bitwise not. In particular, ``~a`` and ``Not(a)`` will", "c) False \"\"\" return _is_form(expr, And, Or) def is_dnf(expr): \"\"\"", "(a & ~b)) & (b | ~a | (b &", "module will return this class when they evaluate to true.", "== 1: return Intersection(*[arg.as_set() for arg in self.args]) else: raise", "x < -2).as_set() [-oo, -2) U (2, oo] \"\"\" from", "the mapping {x: a, y:b} or {x: b, y:a}. If", "Examples ======== >>> distribute_and_over_or(Or(a, And(Not(b), Not(c)))) (a | ~b) &", "not isinstance(expr, BooleanFunction): return expr if simplify: return simplify_logic(expr, 'dnf',", "shorthand boolean operations like ~ and >> will work as", "that their use here is different from their normal use", "' implemented for multivariate' ' expressions') class Not(BooleanFunction): \"\"\" Logical", "== false: return c elif b == c: return b", "return simplify_logic(expr, 'dnf', True) # Don't convert unless we have", "(the minterms) into the smallest Product of Sums form. The", "0, 1, 0, 3): [a, b], (0, 0, 1, 2,", "__rshift__(self, other): \"\"\"Overloading for >> operator.\"\"\" return Implies(self, other) def", "order, giving True immediately if any of them are True,", "NOR function. It evaluates its arguments in order, giving False", "= false converter[bool] = lambda x: true if x else", "the mapping of variables that makes the two expressions bool1", "if ``a`` and ``b`` are integers. >>> And(x, y).subs({x: 1})", "the variables of each. If more than one mappings of", "new = _simplified_pairs(old) essential = _rem_redundancy(new, minterms) return Or(*[_convert_to_varsSOP(x, variables)", "symbolic ``Boolean``, like ``Or(x, y)`` or ``x > 1``, use", "== 'cnf' or form is None: # pragma: no branch", "if func == And: return Or._to_nnf(*[~arg for arg in args],", "sympify(expr) if not isinstance(expr, BooleanFunction): return expr if simplify: return", "Relational): rel.append((r, r.canonical, (~r).canonical)) remove = [] for i, (r,", "simplify: return simplify_logic(expr, 'dnf', True) # Don't convert unless we", "false >>> Implies(False, False) true >>> Implies(True, True) true >>>", "\"\"\" f = eq.free_symbols d = {fi: [0] * 5", "variables) for x in essential]) def _find_predicates(expr): \"\"\"Helper to find", "real sets. Examples ======== >>> true.as_set() UniversalSet() \"\"\" return S.UniversalSet", "len(function1.args) != len(function2.args): return if function1.is_Symbol: return {function1: function2} #", "class Xor(BooleanFunction): \"\"\" Logical XOR (exclusive OR) function. Returns True", "A is true else it returns the result of C.", "simplify=simplify) class Equivalent(BooleanFunction): \"\"\" Equivalence relation. Equivalent(A, B) is True", "True) true >>> Nand(True, True) false >>> Nand(x, y) ~(x", "truthtable = [] for t in product([0, 1], repeat=len(variables)): t", "argset: argset.remove(True) return Not(Xor(*argset)) else: obj._args = tuple(ordered(argset)) obj._argset =", "\"\"\" if isinstance(expr, Not): return not isinstance(expr.args[0], BooleanFunction) else: return", "NotImplementedError('handling of relationals') return self.atoms() == other.atoms() and \\ not", "yes, returns index, else returns -1. \"\"\" index = -1", "+ 1] = None newterm = ti[:] newterm[index] = 3", "Implies(BooleanFunction): \"\"\" Logical implication. A implies B is equivalent to", "not in (3, minterm[i]): return False return True def _rem_redundancy(l1,", "(a | ~a), True) a | b \"\"\" expr =", "for Diofant. \"\"\" from collections import defaultdict from itertools import", "``~True`` is the same as ``~1`` which is ``-2``, which", "d = {fi: [0] * 5 for fi in f}", "args: if Not(arg) in args: return False stack.extend(expr.args) elif not", "+= 1 else: o = len(a.args) + sum(isinstance(ai, Not) for", "Nand(x, y) ~(x & y) \"\"\" @classmethod def eval(cls, *args):", ">>, <<, and Equivalent into &, |, and ~. That", "disjunctive normal form. Examples ======== >>> is_dnf(a | b |", "represent the same logical behaviour for some correspondence between the", "Returns True if all of the arguments are logically equivalent.", "to_nnf(self, simplify=True): args = [] for a, b in zip(self.args,", "form) that gives the desired outcome. If there are inputs", "and ``False`` to be integers, ``True >> True`` will be", "for ~ operator.\"\"\" return Not(self) def __rshift__(self, other): \"\"\"Overloading for", "{y: a, z: b}) The results are not necessarily unique,", "of sums\" or \"POS\" form) that gives the desired outcome.", "i, (r, c, nc) in enumerate(rel): for j in range(i", "form. Examples ======== >>> to_cnf(~(a | b) | c) (c", "evaluated to its simplest DNF form. Examples ======== >>> to_dnf(b", "| b) & (~a | ~b), {a: a, b: b,", "Not function (negation). Returns True if the statement is False.", "False) True >>> is_nnf(Not(a & b) | c) False >>>", "in minterms is also in dontcares') old = None new", "len(function2.args): return if function1.is_Symbol: return {function1: function2} # get the", "c)) False \"\"\" return _is_form(expr, Or, And) def _is_form(expr, function1,", "use in Python, which is bitwise not. In particular, ``~a``", "| b | c) True >>> is_dnf(a & b &", "There is liable to be some confusion as to when", "& c) | (c & ~a) \"\"\" return _distribute((expr, Or,", "1}) y \"\"\" zero = false identity = true nargs", "(a | ~b) & (a | ~c) \"\"\" return _distribute((expr,", "if an even number of the arguments are True and", "true >>> False >> False 0 >>> Or(True, False) true", "def is_cnf(expr): \"\"\" Test whether or not an expression is", "a symbolic operation in Diofant, since it always returns ``True``", "0, 1, 1], [1, 1, 1, 1]] >>> dontcares =", "to do the comparison, and it will work in either", "False >>> is_nnf((a | ~a) & (b | c), False)", "Or)) def distribute_or_over_and(expr): \"\"\" Given a sentence s consisting of", "form is returned; if None, the answer is returned according", "{} for k in f1: if k not in f2", "*args): return Not(And(*args)) class Nor(BooleanFunction): \"\"\" Logical NOR function. It", "The primary advantage of using false instead of False is", "return simplify_logic(self) def to_nnf(self, simplify=True): return self._to_nnf(*self.args, simplify=simplify) @classmethod def", "is valid for simplified boolean expressions. \"\"\" # do some", "of thumb, the assumptions system uses a three-valued logic (``True``,", "0, 1], [0, 0, 1, 1], ... [0, 1, 1,", "= rel[j][:2] if cj == nc: odd = ~odd break", "if isinstance(expr, Not): if not expr.args[0].is_Atom: return False if not", "Used for recognizing prime implicants. \"\"\" for i, x in", "1: return true if True in argset: argset.discard(True) return And(*argset)", "and the rest are False. Returns False if an even", "argset if r.is_Relational] odd = False # is number of", "| ~b) & (a | ~c) \"\"\" return _distribute((expr, And,", "B.is_Relational: if A.canonical == B.canonical: return true elif (~A).canonical ==", "return c elif b == c: return b elif b", "algebra module for Diofant. \"\"\" from collections import defaultdict from", "A == true or A == false or B ==", "(x), negated symbols (Not(x)), or Or (or an And) whose", "distribute_or_over_and(expr) def is_nnf(expr, simplified=True): \"\"\" Checks if expr is in", "= [list(i) for i in minterms] dontcares = [list(i) for", "BooleanFunction(Application, Boolean): \"\"\"Boolean function is a function that lives in", "returned according to the form with fewest args (in CNF", "here is different from its normal use in Python, which", ">>> Or(True, False) true See Also ======== BooleanTrue \"\"\" def", "arguments. \"\"\" essential = [] for x in terms: temporary", "arguments; A and B. Returns False if A is True", "different if ``a`` is an integer. Furthermore, since bools in", "args = [] for i in range(0, len(self.args)+1, 2): for", "\"\"\" Converts a term in the expansion of a function", "is not a BooleanFunction itself. \"\"\" if not isinstance(expr, BooleanFunction):", "there is any doubt over whether a function or expression", "that shorthand boolean operations like ~ and >> will work", "just use ``True`` and it will automatically be converted to", "returned; if None, the answer is returned according to the", "expr.func in (And, Or): if simplified: args = expr.args for", "| b) == frozenset([a, b]) True >>> disjuncts(a & b)", "if isinstance(info[0], info[2]): for arg in info[0].args: if isinstance(arg, info[1]):", "ordered class Boolean(Expr): \"\"\"A boolean object is an object for", "else s for s in args] result.append(Or(*clause)) return And._to_nnf(*result, simplify=simplify)", "~y >>> sympify(b) (z & ~x & ~y) | (~x", "the \"product of sums\" or \"POS\" form) that gives the", "here as anything within a BooleanFunction that is not a", "eq.subs({c: x})) (c & d & (a | b) &", "truth. For example, if the object ends up in the", "append_symbol(arg, symbols): if isinstance(arg, Not): return -symbols[arg.args[0]] else: return symbols[arg]", "~a) & (c | ~b) >>> to_cnf((a | b) &", "b = args return And._to_nnf(a, ~b, simplify=simplify) if func ==", "a in argset else argset.add(a) elif arg in argset: argset.remove(arg)", "~b) \"\"\" return to_nnf(expr) def is_literal(expr): \"\"\" Returns True if", "from .inference import satisfiable other = sympify(other) if self.has(Relational) or", "Not(arg) in args: return False stack.extend(expr.args) elif not is_literal(expr): return", "new = minterms + dontcares while new != old: old", "can be ignored, pass them as a list, too. The", "For example, And(x, Not(y), Or(w, Not(z))). Basic.match is not robust", "an And or Or, sum of the number of arguments", "*args, **kwargs): argset = set() obj = super().__new__(cls, *args, **kwargs)", "= __lshift__ __rlshift__ = __rshift__ def __xor__(self, other): return Xor(self,", "not. \"\"\" f = eq.free_symbols d = {fi: [0] *", "get the fingerprint dictionaries f1 = _finger(function1) f2 = _finger(function2)", "return false minterms = [list(i) for i in minterms] dontcares", "and B is False. Returns True otherwise. Examples ======== >>>", "boolean expressions if possible. By \"simplified\" we mean that a", "dontcares') old = None new = minterms + dontcares while", ">>> POSform([t, x, y, z], minterms, dontcares) z & (y", "any expression, then it must necessarily be ``true`` instead of", "other): \"\"\"Overloading for & operator.\"\"\" return And(self, other) __rand__ =", "argset.remove(arg) else: argset.add(arg) rel = [(r, r.canonical, (~r).canonical) for r", "return False elif not isinstance(cls, function2): return False for lit", "if any of them are True, and False if they", "def POSform(variables, minterms, dontcares=None): \"\"\" The POSform function uses simplified_pairs", "for arg in args: if Not(arg) in args: return False", "Returns True otherwise. Examples ======== >>> Implies(True, False) false >>>", "distribute_and_over_or(expr): \"\"\" Given a sentence s consisting of conjunctions and", "| ~B | ...) & (B | C | ...)", "and False if they are all False. Examples ======== >>>", ">>> ~True -2 >>> ~true false \"\"\" is_Not = True", "to ``True`` or ``False`` using ``==``. * Yes: ``if greeting:``", "= new new = _simplified_pairs(old) essential = _rem_redundancy(new, minterms) return", "use in Python, which is bitwise xor. In particular, ``a", "return True # Special case of a single negation if", "= arg.args else: arg = arg, for a in arg:", "b)) False \"\"\" if isinstance(expr, Not): return not isinstance(expr.args[0], BooleanFunction)", "B. Returns False if A is True and B is", "= [[0, 0, 0, 0], [0, 0, 1, 0], [0,", "Nor(x, y) ~(x | y) \"\"\" @classmethod def eval(cls, *args):", "for arg in info[0].args: if isinstance(arg, info[1]): conj = arg", "continue remove.append((r, rj)) if odd: argset.remove(true) if true in argset", "s. Examples ======== >>> disjuncts(a | b) == frozenset([a, b])", "Not(y), Or(w, Not(z))). Basic.match is not robust enough (see issue", "[] for i, m in enumerate(minterm): if m == 0:", "for arg in args] argset = set(args) for x in", "odd number of the arguments are True and the rest", "index def _convert_to_varsSOP(minterm, variables): \"\"\" Converts a term in the", "essential = [] for x in terms: temporary = []", "argset = set() for arg in args: if not is_literal(arg):", "| b \"\"\" expr = sympify(expr) if not isinstance(expr, BooleanFunction):", "enumerate(term): if x not in (3, minterm[i]): return False return", "in argset]) _args = frozenset(argset) obj = super().__new__(cls, _args) obj._argset", "will return this class when they evaluate to false. Notes", "be accessed via ``false``. This is the Diofant version of", ">>> ITE(True, x, y) x >>> ITE(False, x, y) y", "x in (0, 1): newargs.append(True if x else False) else:", "as ``~1`` which is ``-2``, which has a boolean value", "and ``false``. >>> ~True -2 >>> ~true false \"\"\" is_Not", "from ..utilities import ordered class Boolean(Expr): \"\"\"A boolean object is", "or expression will return ``true`` or ``True``, just use ``==``", "two Boolean arguments; A and B. Returns False if A", "``True``. \"``true == True is True``.\" While \"``true is True``\"", "======== >>> And(x < 2, x > -2).as_set() (-2, 2)", "if Not(arg) in args: return False stack.extend(expr.args) elif not is_literal(expr):", "if index == -1: index = x else: return -1", "are integers. In particular, since Python considers ``True`` and ``False``", "is in Negation Normal Form (NNF) if it contains only", "real sets. Examples ======== >>> Or(x > 2, x <", "clause = [~s if s in neg else s for", "true else it returns the result of C. Examples ========", "bool_map(eq, eq.subs({c: x})) (c & d & (a | b)", "``~a`` and ``Not(a)`` will be different if ``a`` is an", "expressions') class Or(LatticeOp, BooleanFunction): \"\"\" Logical OR function It evaluates", "isinstance(arg, cls): arg = arg.args else: arg = arg, for", "or Or, # of times it appeared as a Not(Symbol)", "convert the list of all input combinations that generate '1'", "\"\"\" Reduces a set of minterms, if possible, to a", "sympify(expr) # Special case of an Atom if expr.is_Atom: return", "int(bool(self)) class BooleanTrue(BooleanAtom, metaclass=Singleton): \"\"\"Diofant version of True, a singleton", "function (i.e., the \"product of sums\" or \"POS\" form) that", "reversed(list(args)): if isinstance(x, Number) or x in (0, 1): newargs.append(True", "Not(And(And(True, x), Or(x, False))) ~x >>> ~x ~x >>> Not(And(Or(x,", "will be different if ``a`` and ``b`` are integers. >>>", "' expressions') def to_nnf(self, simplify=True): if is_literal(self): return self expr", "1``, i.e., 0, which has a truth value of False.", "& (~x | ~y)) Notes ===== The ``~`` operator is", "for a, b in remove: argset.remove(a) argset.remove(b) if len(argset) ==", "~. That is, return an expression that is equivalent to", "simplify=simplify) @classmethod def _to_nnf(cls, *args, **kwargs): simplify = kwargs.get('simplify', True)", "note that its use here is different from its normal", "& z) >>> simplify_logic(b) ~x & ~y >>> sympify(b) (z", "elif a.is_Not: d[a.args[0]][1] += 1 else: o = len(a.args) +", "(a | ~c) \"\"\" return _distribute((expr, And, Or)) def distribute_or_over_and(expr):", "up in the ``.args`` of any expression, then it must", "if isinstance(arg, Unequality): return Equality(*arg.args) if isinstance(arg, StrictLessThan): return GreaterThan(*arg.args)", "false class BooleanFunction(Application, Boolean): \"\"\"Boolean function is a function that", "and eliminate redundant pairs, and return the essential arguments. \"\"\"", "And object in Diofant. Parameters ========== expr : string or", "to if is_dnf(expr): return expr expr = eliminate_implications(expr) return distribute_or_over_and(expr)", "0, 2): [x], (0, 0, 1, 0, 3): [a, b],", "Examples ======== >>> Nor(True, False) false >>> Nor(True, True) false", "y): temporary.append(y) if len(temporary) == 1: if temporary[0] not in", "(default). If 'cnf' or 'dnf', the simplest expression in the", "False elif not isinstance(cls, function2): return False for lit in", ">> operator.\"\"\" return Implies(self, other) def __lshift__(self, other): \"\"\"Overloading for", "======== >>> distribute_or_over_and(And(Or(Not(a), b), c)) (b & c) | (c", "And(c, d)) >>> bool_map(eq, eq.subs({c: x})) (c & d &", "return And(*[~arg for arg in argset]) _args = frozenset(argset) obj", "note that their use here is different from their normal", "rel): return [false] rel.append(c) newargs.append(x) return LatticeOp._new_args_filter(newargs, And) def as_set(self):", "False, and True if they are all True. Examples ========", "them are True, and False if they are all False.", "| (~b & c), True) a | c \"\"\" expr", "class Nor(BooleanFunction): \"\"\" Logical NOR function. It evaluates its arguments", "the boolean is being used as a symbolic representation of", "======== >>> x & y x & y Notes =====", "or \\ (form is None and len(truthtable) >= (2 **", "are either symbols (x), negated symbols (Not(x)), or Or (or", "expr.is_Atom: return True # Special case of a single expression", "set(args) for x in args: if isinstance(x, Number) or x", "x): return self.func(self.args[0], *[a.diff(x) for a in self.args[1:]]) # end", "b, simplify=simplify) class Equivalent(BooleanFunction): \"\"\" Equivalence relation. Equivalent(A, B) is", "and ``a | b`` will return different things if ``a``", "to s, but has only &, |, and ~ as", "two expressions bool1 and bool2 represent the same logical behaviour", "is_literal(a) True >>> is_literal(~a) True >>> is_literal(a + b) True", "if possible matchdict = {} for k in f1: if", "minterms, if possible, to a simplified set of minterms with", "_convert_to_varsPOS(maxterm, variables): \"\"\" Converts a term in the expansion of", "operations make sense.\"\"\" def __and__(self, other): \"\"\"Overloading for & operator.\"\"\"", "for the most part, you can just use ``True`` and", "Converts expr to Negation Normal Form. A logical expression is", "Returns True if any of the arguments are False. Returns", "disjuncts(expr): \"\"\"Return a list of the disjuncts in the sentence", "False if an even number of the arguments are True", "of real sets. Examples ======== >>> And(x < 2, x", "eliminate_implications(Implies(a, b)) b | ~a >>> eliminate_implications(Equivalent(a, b)) (a |", "z)`` could be ``(a, d)`` or ``(d, a)``: >>> eq1", "combinations(args, i): clause = [~s if s in neg else", "d[a][0] += 1 elif a.is_Not: d[a.args[0]][1] += 1 else: o", "l1: # pragma: no branch if _compare_term(x, z): assert z", "some quick checks if function1.__class__ != function2.__class__: return if len(function1.args)", "in Python subclass from ``int``, ``~True`` is the same as", "_compare_term(x, y): temporary.append(y) if len(temporary) == 1: if temporary[0] not", "``True``\". In other words, use ``true`` only on those contexts", "True def is_cnf(expr): \"\"\" Test whether or not an expression", "== true and c == false: return a elif b", "structural equality rather than mathematical, so it should return ``True``.", "info[1](*list(map(_distribute, ((info[2](c, rest), info[1], info[2]) for c in conj.args)))) elif", "in enumerate(maxterm): if m == 1: temp.append(Not(variables[i])) elif m ==", "b) & (~c | ~d) >>> to_nnf(Equivalent(a >> b, b", "True otherwise. Examples ======== >>> Implies(True, False) false >>> Implies(False,", "and disjunctions of literals, return an equivalent sentence in CNF.", "ValueError(f'{d} in minterms is also in dontcares') old = None", "list of all input combos that generate '1' (the minterms)", "instead of False is that shorthand boolean operations like ~", "the input. Examples ======== >>> b = (~x & ~y", "True def _rem_redundancy(l1, terms): \"\"\" After the truth table has", "exist, one of them is returned. For example, And(x, y)", "_compare_term(x, z): assert z not in essential essential.append(z) break return", "simplify_logic(self) def to_nnf(self, simplify=True): return self._to_nnf(*self.args, simplify=simplify) @classmethod def _to_nnf(cls,", "can be accessed via ``true``. This is the Diofant version", "newterm not in simplified_terms: simplified_terms.append(newterm) simplified_terms.extend( [terms[i] for i in", "set() obj = super().__new__(cls, *args, **kwargs) for arg in super(Xor,", "``x > 1``, use ``true``. Otherwise, use ``True``\". In other", "d[ai][-1] += o else: d[ai.args[0]][3] += 1 d[ai.args[0]][-1] += o", "= [] rel = [] for x in reversed(list(args)): if", "use the Diofant boolean types ``true`` and ``false``. >>> ~True", "if x else False) continue if x.is_Relational: c = x.canonical", "c) | (c & ~a) \"\"\" return _distribute((expr, Or, And))", "obj @property # type: ignore[misc] @cacheit def args(self): return tuple(ordered(self._argset))", "arg in args], simplify=simplify) if func == Or: return And._to_nnf(*[~arg", "This is the Diofant version of True, for use in", "to_nnf(expr) def is_literal(expr): \"\"\" Returns True if expr is a", "self.func(self.args[0], *[a.diff(x) for a in self.args[1:]]) # end class definitions.", "(b & c) | (~b & c), True) a |", "a, b, c = args return And._to_nnf(Or(a, ~c), Or(~a, ~b),", "for x in terms: temporary = [] for y in", "0, 1]] >>> POSform([t, x, y, z], minterms, dontcares) z", "z), And(x, y)) >>> eq2 = Or(And(Not(c), a), And(Not(c), d),", "Equivalent: return And._to_nnf(Or(*args), Or(*[~arg for arg in args]), simplify=simplify) if", "if expr.xreplace(dict(zip(variables, t))): truthtable.append(t) if deep: from ..simplify import simplify", "return False stack.extend(expr.args) elif not is_literal(expr): return False return True", "for simplified boolean expressions. \"\"\" # do some quick checks", "1, 0], [0, 1, 0, 1]] >>> POSform([t, x, y,", "Returns False if an even number of the arguments are", ">>> eliminate_implications(Equivalent(a, b)) (a | ~b) & (b | ~a)", "True >>> is_literal(~a) True >>> is_literal(a + b) True >>>", "And(b, c)) >>> bool_map(eq1, eq2) ((x & y) | (t", "argument. Return a logical And function (i.e., the \"product of", "| b`` will return different things if ``a`` and ``b``", "Boolean(Expr): \"\"\"A boolean object is an object for which logic", "fingerprints, but a and b do not. \"\"\" f =", "will work in either case. Finally, for boolean flags, it's", "= [] for x in terms: temporary = [] for", "b == false and c == true: return Not(a) def", "if the object ends up in the ``.args`` of any", "LessThan, StrictGreaterThan, StrictLessThan, Unequality) if isinstance(arg, Number) or arg in", "..core.numbers import Number from ..core.operations import LatticeOp from ..core.singleton import", "algorithm to convert the list of all input combinations that", "_distribute(info): \"\"\"Distributes info[1] over info[2] with respect to info[0].\"\"\" if", "\"\"\" expr = sympify(expr) if is_literal(expr): return True stack =", "y], [x, y]) [{1, 2}, {2}] \"\"\" symbols = dict(zip(symbols,", "return _distribute((expr, Or, And)) def _distribute(info): \"\"\"Distributes info[1] over info[2]", "|, and ~ as logical operators. Examples ======== >>> eliminate_implications(Implies(a,", "return expr expr = eliminate_implications(expr) return distribute_or_over_and(expr) def is_nnf(expr, simplified=True):", "if simplified: args = expr.args for arg in args: if", "things if ``a`` and ``b`` are integers. In particular, since", "return False for lit in cls.args: if isinstance(lit, Not): if", "version of False, a singleton that can be accessed via", "in Python, which is bitwise or. Hence, ``Or(a, b)`` and", "v B Accepts two Boolean arguments; A and B. Returns", "Examples ======== >>> to_int_repr([x | y, y], [x, y]) [{1,", "Examples ======== >>> disjuncts(a | b) == frozenset([a, b]) True", "in the ``.args`` of any expression, then it must necessarily", "-1: index = x else: return -1 return index def", "LessThan): return StrictGreaterThan(*arg.args) if isinstance(arg, GreaterThan): return StrictLessThan(*arg.args) def as_set(self):", "is bit shifts. Hence, ``Implies(a, b)`` and ``a >> b``", ">>> Not(And(a, b, c)).equals(And(Not(a), Not(b), Not(c))) False >>> Not(And(a, Not(a))).equals(Or(b,", "========== * https://en.wikipedia.org/wiki/Quine-McCluskey_algorithm \"\"\" variables = [sympify(v) for v in", "def __or__(self, other): \"\"\"Overloading for | operator.\"\"\" return Or(self, other)", "An important thing to remember is that ``sympify(True)`` returns ``true``.", "And(x, y) is logically equivalent to And(a, b) for the", ">>> And(x, y).subs({x: 1}) y \"\"\" zero = false identity", "True @classmethod def eval(cls, arg): from ..core import (Equality, GreaterThan,", "are True. Examples ======== >>> Nand(False, True) true >>> Nand(True,", "# Includes 0, 1 argset.discard(x) argset.add(True if x else False)", "y << x Implies(x, y) Notes ===== The ``>>`` and", "is either an And (or an Or) whose arguments are", "range(0, len(self.args)+1, 2): for neg in combinations(self.args, i): clause =", "simplest expression in the corresponding normal form is returned; if", "a function has been denested and is either an And", "& b | ~c) True >>> is_nnf((a | ~a) &", "temp = [] for i, m in enumerate(maxterm): if m", "And, Or)) def distribute_or_over_and(expr): \"\"\" Given a sentence s consisting", "1 argset.discard(x) argset.add(True if x else False) rel = []", "SOP or POS form. The return type is an Or", "ValueError(f'{d} in minterms is also in dontcares') maxterms = []", "a function that lives in a boolean space. This is", "(in CNF by default). deep : boolean (default True) indicates", "cj == nc: return false elif cj == c: remove.append((r,", "return True # Special case of a single expression of", "zero = true identity = false @classmethod def _new_args_filter(cls, args):", "y]) [{1, 2}, {2}] \"\"\" symbols = dict(zip(symbols, range(1, len(symbols)", "logic module. The primary advantage of using false instead of", "``sympify(True)`` returns ``true``. This means that for the most part,", "__hash__(self): return hash(False) def as_set(self): \"\"\" Rewrite logic operators and", "= lambda x: true if x else false class BooleanFunction(Application,", "of complimentary pairs odd? start 0 -> False remove =", "else: raise NotImplementedError('Sorry, Not.as_set has not yet been' ' implemented", "is_nnf((a | ~a) & (b | c), False) True >>>", "\"\"\" temp = [] for i, m in enumerate(maxterm): if", "in essential]) def POSform(variables, minterms, dontcares=None): \"\"\" The POSform function", "False. To avoid this issue, use the Diofant objects ``true``", "sets. Examples ======== >>> And(x < 2, x > -2).as_set()", "False if A is True and B is False. Returns", "in args] argset = set(args) for x in args: if", "isinstance(info[0], info[2]): for arg in info[0].args: if isinstance(arg, info[1]): conj", "& (c | ~b) \"\"\" return to_nnf(expr) def is_literal(expr): \"\"\"", "= And(Xor(a, b), c, And(c, d)) >>> bool_map(eq, eq.subs({c: x}))", "``False``, ``None``), whereas ``true`` and ``false`` represent a two-valued logic.", "expr.args for arg in args: if Not(arg) in args: return", ">>> is_literal(~a) True >>> is_literal(a + b) True >>> is_literal(Or(a,", "applied only to literals. If simplified is True, checks if", "Test whether or not an expression is in conjunctive normal", "!= len(f2): return # assemble the match dictionary if possible", "\"\"\" Given a sentence s consisting of conjunctions and disjunctions", "if len(argset) <= 1: return true if True in argset:", "z: b}) The results are not necessarily unique, but they", "if _compare_term(x, y): temporary.append(y) if len(temporary) == 1: if temporary[0]", "Or(LatticeOp, BooleanFunction): \"\"\" Logical OR function It evaluates its arguments", "conjuncts(a | b) == frozenset([Or(a, b)]) True \"\"\" return And.make_args(expr)", "to_cnf((a | b) & (a | ~a), True) a |", "variable form (for SOP). \"\"\" temp = [] for i,", "useful methods def conjuncts(expr): \"\"\"Return a list of the conjuncts", ">>> function2 = SOPform([a, b, c], [[1, 0, 1], [1,", "logical behaviour for some correspondence between the variables of each.", "newargs.append(True if x else False) continue if x.is_Relational: c =", "x.canonical if c in rel: continue nc = (~c).canonical if", "a | b \"\"\" expr = sympify(expr) if not isinstance(expr,", "if any(r == nc for r in rel): return [true]", ">>> sympify(True) true >>> ~true false >>> ~True -2 >>>", "if any of them are False, and False if they", "branch if _compare_term(x, z): assert z not in essential essential.append(z)", "``<<`` operators are provided as a convenience, but note that", "expr = self.args[0] func, args = expr.func, expr.args if func", "Return the simplified version of bool1, and the mapping of", "cacheit from ..core.expr import Expr from ..core.function import Application from", "True and B is False. Returns True otherwise. Examples ========", "======== >>> to_dnf(b & (a | c)) (a & b)", "(~b & c), True) a | c \"\"\" expr =", "the class directly from this # file). S.true = true", "f = eq.free_symbols d = {fi: [0] * 5 for", "x | y Notes ===== The ``|`` operator is provided", "y) Notes ===== The ``>>`` and ``<<`` operators are provided", "true \"\"\" def __new__(cls, *args, **options): from ..core.relational import Relational", "minterms + dontcares while new != old: old = new", "in [_ for _ in todo if _ is not", "method. \"\"\" simplified_terms = [] todo = list(range(len(terms))) for i,", "& (b | c)) False \"\"\" return _is_form(expr, Or, And)", "True, the result contains no redundant clauses. Examples ======== >>>", "= x.canonical if c in rel: continue nc = (~c).canonical", "to it's variable form (for SOP). \"\"\" temp = []", "func == Xor: result = [] for i in range(1,", "y) Notes ===== The ``^`` operator is provided as a", "Not.as_set has not yet been' ' implemented for mutivariate' '", "| b | c) True >>> is_cnf(a & b &", "0, 1, 0], [0, 1, 0, 1]] >>> SOPform([t, x,", "Not(b), Not(c))) False >>> Not(And(a, Not(a))).equals(Or(b, Not(b))) False \"\"\" from", "robust enough (see issue sympy/sympy#4835) so this is a workaround", "== Or: return And._to_nnf(*[~arg for arg in args], simplify=simplify) if", "rj, cj = rel[j][:2] if cj == nc: return false", "c = args except ValueError: raise ValueError('ITE expects exactly 3", "**kwargs): argset = set() obj = super().__new__(cls, *args, **kwargs) for", "isinstance(expr.args[0], BooleanFunction) else: return not isinstance(expr, BooleanFunction) def to_int_repr(clauses, symbols):", "& d))) (a | b) & (~c | ~d) >>>", ">>> is_dnf(a | b | c) True >>> is_dnf(a &", "in range(0, len(self.args)+1, 2): for neg in combinations(self.args, i): clause", "lit.is_Atom: return False return True def eliminate_implications(expr): \"\"\" Change >>,", "= None new = maxterms + dontcares while new !=", "...) & (B | C | ...) & ...). If", "will return different things if ``a`` and ``b`` are integers.", "x else False) else: newargs.append(x) A, B = newargs except", "expression is in conjunctive normal form. Examples ======== >>> is_cnf(a", "== true: return Not(a) def to_nnf(self, simplify=True): a, b, c", "is_nnf((a | ~a) & (b | c)) False >>> is_nnf((a", "the desired outcome. If there are inputs that can be", "prime implicant table method to recognize and eliminate redundant pairs,", "= arg, for a in arg: if Not(a) in argset:", "ValueError: raise ValueError('ITE expects exactly 3 arguments') if a ==", "simplified_terms def _compare_term(minterm, term): \"\"\" Return True if a binary", "~x & ~y >>> sympify(b) (z & ~x & ~y)", "{func} in expression') class Xor(BooleanFunction): \"\"\" Logical XOR (exclusive OR)", "inputs that can be ignored, pass them as a list,", "a is not conj]) return info[1](*list(map(_distribute, ((info[2](c, rest), info[1], info[2])", "True if all of the arguments are logically equivalent. Returns", "~True -2 >>> Or(True, False) true See Also ======== BooleanFalse", "corresponding normal form is returned; if None, the answer is", "B are both True or both False. Returns True if", "expressions') def to_nnf(self, simplify=True): if is_literal(self): return self expr =", "metaclass=Singleton): \"\"\"Diofant version of True, a singleton that can be", "is_dnf((a & b) | c) True >>> is_dnf(a & (b", "all arguments are False. Examples ======== >>> Nor(True, False) false", "======== >>> Equivalent(False, False, False) true >>> Equivalent(True, False, False)", "for arg in argset]) _args = frozenset(argset) obj = super().__new__(cls,", "def to_nnf(self, simplify=True): args = [] for i in range(0,", "minterm[i]): return False return True def _rem_redundancy(l1, terms): \"\"\" After", "======== >>> is_cnf(a | b | c) True >>> is_cnf(a", "ignore[misc] @cacheit def args(self): return tuple(ordered(self._argset)) def to_nnf(self, simplify=True): args", "contexts where the boolean is being used as a symbolic", "if form == 'cnf' or form == 'dnf' or form", "(perhaps many) functions that satisfy the conditions. Examples ======== >>>", "not lit.is_Atom: return False return True # Special case of", ">> will work as expected on this class, whereas with", "sympify(False) false >>> false >> false true >>> False >>", "for i in range(0, len(self.args)+1, 2): for neg in combinations(self.args,", "arg in args]), simplify=simplify) if func == Xor: result =", "b == c: return b elif b == true and", "& ...) | ...). If simplify is True, the expr", "true >>> x >> y Implies(x, y) >>> y <<", "for v in variables] if form == 'dnf' or \\", "======== >>> Nor(True, False) false >>> Nor(True, True) false >>>", "or dnf only') def _finger(eq): \"\"\" Assign a 5-item fingerprint", "y, z) >>> ITE(True, x, y) x >>> ITE(False, x,", "if x.is_Relational: c = x.canonical if c in rel: continue", "Aside from not satisfying the above rule of thumb, the", "eliminating algorithm to convert the list of all input combinations", "required): {args!s}') if A == true or A == false", "terms of structural equality rather than mathematical, so it should", "return Or(*[_convert_to_varsSOP(x, variables) for x in essential]) def POSform(variables, minterms,", "the first argument. Return a logical And function (i.e., the", "{args!s}') if A == true or A == false or", "just use ``==`` instead of ``is`` to do the comparison,", "and len(truthtable) >= (2 ** (len(variables) - 1))): return SOPform(variables,", "[0, 0, 1, 1], ... [0, 1, 1, 1], [1,", "& (a | c)) (a & b) | (b &", "~a) True >>> Not(And(a, b, c)).equals(And(Not(a), Not(b), Not(c))) False >>>", "stack = [expr] while stack: expr = stack.pop() if expr.func", "or [])] for d in dontcares: if d in minterms:", "from its normal use in Python, which is bitwise xor.", "(or an And) whose arguments are only symbols or negated", "b) | (b & c) >>> to_dnf((a & b) |", "and B are both True or both False. Returns True", "than S.BooleanTrue and # S.BooleanFalse, but making the class and", "True) # Don't convert unless we have to if is_cnf(expr):", "in f} for a in eq.args: if a.is_Symbol: d[a][0] +=", "True def _eval_simplify(self, ratio, measure): return simplify_logic(self) def to_nnf(self, simplify=True):", "_is_form(expr, And, Or) def is_dnf(expr): \"\"\" Test whether or not", "defaultdict from itertools import combinations, product from ..core import Atom,", "b)) args.append(Or(~self.args[-1], self.args[0])) return And._to_nnf(*args, simplify=simplify) class ITE(BooleanFunction): \"\"\" If", "distribute_or_over_and(And(Or(Not(a), b), c)) (b & c) | (c & ~a)", "argset = set(args) for x in args: if isinstance(x, Number)", "essential]) def POSform(variables, minterms, dontcares=None): \"\"\" The POSform function uses", "1 instead of ``Integer(1)``. The rule of thumb is: \"If", "& (b | c), False) True >>> is_nnf(Not(a & b)", "And._to_nnf(*args, simplify=simplify) class Nand(BooleanFunction): \"\"\" Logical NAND function. It evaluates", "| y, y], [x, y]) [{1, 2}, {2}] \"\"\" symbols", "Or) whose arguments are either symbols (x), negated symbols (Not(x)),", "|, and ~. That is, return an expression that is", "UniversalSet() \"\"\" return S.UniversalSet class BooleanFalse(BooleanAtom, metaclass=Singleton): \"\"\"Diofant version of", "Python considers ``True`` and ``False`` to be integers, ``True >>", "& ~t) References ========== * https://en.wikipedia.org/wiki/Quine-McCluskey_algorithm \"\"\" variables = [sympify(v)", "expressions') class Not(BooleanFunction): \"\"\" Logical Not function (negation). Returns True", "simplified_terms.extend( [terms[i] for i in [_ for _ in todo", "s in args] result.append(Or(*clause)) return And._to_nnf(*result, simplify=simplify) if func ==", "If yes, returns index, else returns -1. \"\"\" index =", "truthtable.append(t) if deep: from ..simplify import simplify variables = [simplify(v)", "boolean operations like ~ and >> will work as expected", "and S.false to work, rather than S.BooleanTrue and # S.BooleanFalse,", "table method to recognize and eliminate redundant pairs, and return", "``true`` only on those contexts where the boolean is being", "In particular, ``~a`` and ``Not(a)`` will be different if ``a``", "Also ======== BooleanFalse \"\"\" def __bool__(self): return True def __hash__(self):", "false.as_set() EmptySet() \"\"\" from ..sets import EmptySet return EmptySet() true", "canonical(self): return self def __int__(self): return int(bool(self)) class BooleanTrue(BooleanAtom, metaclass=Singleton):", "import SingletonWithManagedProperties as Singleton from ..core.sympify import converter, sympify from", "isinstance(x, Number) or x in [True, False]: # Includes 0,", "= _find_predicates(expr) truthtable = [] for t in product([0, 1],", "& y Notes ===== The ``&`` operator is provided as", "use ``==`` instead of ``is`` to do the comparison, and", "False if any argument is True. Returns True if all", "form. Examples ======== >>> is_dnf(a | b | c) True", ">>> is_nnf((a | ~a) & (b | c), False) True", "return false elif cj == c: remove.append((r, rj)) break for", "is that ``sympify(True)`` returns ``true``. This means that for the", "in CNF. Examples ======== >>> distribute_and_over_or(Or(a, And(Not(b), Not(c)))) (a |", "logical predicate is defined here as anything within a BooleanFunction", "use here is different from their normal use in Python,", "expr.to_nnf(simplify) def to_cnf(expr, simplify=False): \"\"\" Convert a propositional logical sentence", "+ dontcares while new != old: old = new new", "eq2) ((x & y) | (t & ~y) | (z", "Nand(False, True) true >>> Nand(True, True) false >>> Nand(x, y)", "@classmethod def eval(cls, arg): from ..core import (Equality, GreaterThan, LessThan,", "def to_nnf(self, simplify=True): return self._to_nnf(*self.args, simplify=simplify) @classmethod def _to_nnf(cls, *args,", ">>> disjuncts(a & b) == frozenset([And(a, b)]) True \"\"\" return", ">>> Not(Or(True, False)) false >>> Not(And(And(True, x), Or(x, False))) ~x", "if they are all True. Returns True if any of", "has been sufficiently simplified, use the prime implicant table method", "a sentence s consisting of conjunctions and disjunctions of literals,", "B) elif A == B: return true elif A.is_Relational and", "expr.args[0].is_Atom: return False if not isinstance(expr, function1): return False for", "return inv def bool_map(bool1, bool2): \"\"\" Return the simplified version", "function is a function that lives in a boolean space.", "b) & (a | ~a), True) a | b \"\"\"", "essential]) def _find_predicates(expr): \"\"\"Helper to find logical predicates in BooleanFunctions.", "Not(True) false >>> Not(False) true >>> Not(And(True, False)) true >>>", "argset.remove(b) argset.add(True) if len(argset) <= 1: return true if True", "if i != j: if index == -1: index =", "variable form (for POS). \"\"\" temp = [] for i,", "in zip(self.args, self.args[1:]): args.append(Or(~a, b)) args.append(Or(~self.args[-1], self.args[0])) return And._to_nnf(*args, simplify=simplify)", "for i, x in enumerate(f1[k]): matchdict[x] = f2[k][i] return matchdict", "{function1: function2} # get the fingerprint dictionaries f1 = _finger(function1)", "def to_int_repr(clauses, symbols): \"\"\" Takes clauses in CNF format and", "y): break else: for z in l1: # pragma: no", "break else: continue remove.append((r, rj)) if odd: argset.remove(true) if true", "so in terms of structural equality rather than mathematical, so", "And._to_nnf(Or(a, ~c), Or(~a, ~b), simplify=simplify) raise ValueError(f'Illegal operator {func} in", "a, x: b, y: c, z: d}) >>> eq =", "or Or, sum of the number of arguments with which", "' implemented for mutivariate' ' expressions') def to_nnf(self, simplify=True): if", "= ~odd break elif cj == c: break else: continue", "expr.args)) def simplify_logic(expr, form=None, deep=True): \"\"\" This function simplifies a", "will automatically be converted to ``true`` when necessary, similar to", "| ~a >>> eliminate_implications(Equivalent(a, b)) (a | ~b) & (b", "they are all False. Examples ======== >>> x | y", "======== >>> eliminate_implications(Implies(a, b)) b | ~a >>> eliminate_implications(Equivalent(a, b))", "that equates variables between two simplified boolean expressions if possible.", "false true >>> False >> False 0 >>> Or(True, False)", "sum(isinstance(ai, Not) for ai in a.args) for ai in a.args:", "giving True immediately if any of them are True, and", "elif cj == c: remove.append((r, rj)) break for a, b", "sums\" or \"POS\" form) that gives the desired outcome. If", "[0] * 5 for fi in f} for a in", "literal, else False. Examples ======== >>> is_literal(a) True >>> is_literal(~a)", "form.\"\"\" expr = sympify(expr) # Special case of an Atom", "class of BooleanTrue and BooleanFalse.\"\"\" is_Boolean = True @property def", "Or.as_set has not yet been' ' implemented for multivariate' '", "Not(a) def to_nnf(self, simplify=True): a, b, c = self.args return", "as a list, too. The result will be one of", "LatticeOp from ..core.singleton import S from ..core.singleton import SingletonWithManagedProperties as", "enumerate(minterm): if m == 0: temp.append(Not(variables[i])) elif m == 1:", "False they act bitwise on 0. Functions in the logic", "false \"\"\" @classmethod def eval(cls, *args): try: newargs = []", "self def __int__(self): return int(bool(self)) class BooleanTrue(BooleanAtom, metaclass=Singleton): \"\"\"Diofant version", "arguments with which it appeared, counting Symbol as 1 and", "return Not(Or(*args)) class Implies(BooleanFunction): \"\"\" Logical implication. A implies B", "of this sort exist, one of them is returned. For", "return argset.pop() elif True in argset: argset.remove(True) return Not(Xor(*argset)) else:", "which is bit shifts. Hence, ``Implies(a, b)`` and ``a >>", "arg in argset: argset.remove(arg) else: argset.add(arg) rel = [(r, r.canonical,", "== nc for r in rel): return [true] rel.append(c) newargs.append(x)", "is_nnf(a & b | ~c) True >>> is_nnf((a | ~a)", "given as the first argument. Return a logical Or function", "false elif cj == c: remove.append((r, rj)) break for a,", "symbolic operation in Diofant, since it always returns ``True`` or", "checks if len(f1) != len(f2): return # assemble the match", "b], (0, 0, 1, 2, 8): [y]} So y and", "a in info[0].args if a is not conj]) return info[1](*list(map(_distribute,", "def _convert_to_varsPOS(maxterm, variables): \"\"\" Converts a term in the expansion", "dict(_finger(eq)) {(0, 0, 1, 0, 2): [x], (0, 0, 1,", "return distribute_and_over_or(expr) def to_dnf(expr, simplify=False): \"\"\" Convert a propositional logical", "else: if not lit.is_Atom: return False return True # Special", "of real sets. Examples ======== >>> true.as_set() UniversalSet() \"\"\" return", "True, False) true >>> Xor(True, False, True, False) false >>>", "argset: return cls.zero argset.add(a) else: argset.add(arg) return cls(*argset) class And(LatticeOp,", "dict(zip(symbols, range(1, len(symbols) + 1))) def append_symbol(arg, symbols): if isinstance(arg,", "its normal use in Python, which is bitwise xor. In", "and ~ as logical operators. Examples ======== >>> eliminate_implications(Implies(a, b))", "__new__(cls, *args, **kwargs): argset = set() obj = super().__new__(cls, *args,", "And(*[_convert_to_varsPOS(x, variables) for x in essential]) def _find_predicates(expr): \"\"\"Helper to", "d: x}) \"\"\" def match(function1, function2): \"\"\"Return the mapping that", "ITE(x, y, z) >>> ITE(True, x, y) x >>> ITE(False,", "rather than S.BooleanTrue and # S.BooleanFalse, but making the class", "= __xor__ def equals(self, other, failing_expression=False): \"\"\" Returns True if", "of the number of arguments with which it appeared, counting", "is_literal(expr): return True stack = [expr] while stack: expr =", "prime implicants. \"\"\" for i, x in enumerate(term): if x", "(for SOP). \"\"\" temp = [] for i, m in", "y in l1: if _compare_term(x, y): temporary.append(y) if len(temporary) ==", "not isinstance(expr, BooleanFunction): return expr variables = _find_predicates(expr) truthtable =", "only on those contexts where the boolean is being used", "objects. if isinstance(arg, Equality): return Unequality(*arg.args) if isinstance(arg, Unequality): return", "= eliminate_implications(expr) return distribute_and_over_or(expr) def to_dnf(expr, simplify=False): \"\"\" Convert a", "True, True, False) true >>> Xor(True, False, True, False) false", "c) True >>> is_cnf((a & b) | c) False \"\"\"", "false false \"\"\" @classmethod def eval(cls, *args): try: newargs =", "satisfiable other = sympify(other) if self.has(Relational) or other.has(Relational): raise NotImplementedError('handling", "false >>> ~True -2 >>> Or(True, False) true See Also", "_rem_redundancy(new, maxterms) return And(*[_convert_to_varsPOS(x, variables) for x in essential]) def", "minterms) and (t not in dontcares): maxterms.append(t) old = None", "x >> y Implies(x, y) >>> y << x Implies(x,", "True if they are all True. Examples ======== >>> x", "be ``(a, d)`` or ``(d, a)``: >>> eq1 = Or(And(Not(y),", "boolean values to ``True`` or ``False`` using ``==``. * Yes:", "'dnf') or None (default). If 'cnf' or 'dnf', the simplest", "c: d, d: x}) \"\"\" def match(function1, function2): \"\"\"Return the", "for j_i, tj in enumerate(terms[(i + 1):]): index = _check_pair(ti,", "Implies(True, False) false >>> Implies(False, False) true >>> Implies(True, True)", "class Equivalent(BooleanFunction): \"\"\" Equivalence relation. Equivalent(A, B) is True iff", "break for a, b in remove: argset.remove(a) argset.remove(b) argset.add(True) if", "yet been' ' implemented for mutivariate' ' expressions') def to_nnf(self,", "[] for y in l1: if _compare_term(x, y): temporary.append(y) if", "(b | c), False) True >>> is_nnf(Not(a & b) |", "else: return symbols[arg] return [{append_symbol(arg, symbols) for arg in Or.make_args(c)}", "for an Implies ' f'(pairs are required): {args!s}') if A", "======== >>> true.as_set() UniversalSet() \"\"\" return S.UniversalSet class BooleanFalse(BooleanAtom, metaclass=Singleton):", "not isinstance(expr, BooleanFunction): return expr if simplify: return simplify_logic(expr, 'cnf',", "true if arg.is_Not: return arg.args[0] # Simplify Relational objects. if", "False, False) false >>> Equivalent(x, And(x, True)) true \"\"\" def", "c = x.canonical if c in rel: continue nc =", "function1.__class__ != function2.__class__: return if len(function1.args) != len(function2.args): return if", "ITE(Or(True, False), And(True, True), Xor(True, True)) true >>> ITE(x, y,", "evaluates its arguments in order, giving False immediately if any", "the same truth table. For two formulas to be equal", "B is False. Returns True otherwise. Examples ======== >>> Implies(True,", ">>> is_cnf(a & b & c) True >>> is_cnf((a &", "if a in argset else argset.add(a) elif arg in argset:", "False, a singleton that can be accessed via ``false``. This", "args] argset = set(args) for x in args: if isinstance(x,", "B, C) evaluates and returns the result of B if", "C | ...) & ...). If simplify is True, the", "& c) >>> to_dnf((a & b) | (a & ~b)", "results are not necessarily unique, but they are canonical. Here,", "use ``True`` and it will automatically be converted to ``true``", "if (t not in minterms) and (t not in dontcares):", "Negation Normal Form. A logical expression is in Negation Normal", "expressions. \"\"\" # do some quick checks if function1.__class__ !=", "if len(f1) != len(f2): return # assemble the match dictionary", "avoid this issue, use the Diofant boolean types ``true`` and", "argset.add(True) if len(argset) <= 1: return true if True in", "single negation if isinstance(expr, Not): if not expr.args[0].is_Atom: return False", "# Don't convert unless we have to if is_dnf(expr): return", "of False is that shorthand boolean operations like ~ and", "eval(cls, *args): try: newargs = [] for x in args:", "Sums form. The variables must be given as the first", "1: return argset.pop() elif True in argset: argset.remove(True) return Not(Xor(*argset))", "| ~y)) Notes ===== The ``~`` operator is provided as", "in f2 or len(f1[k]) != len(f2[k]): return for i, x", "break else: for z in l1: # pragma: no branch", "= args return And._to_nnf(a, ~b, simplify=simplify) if func == Equivalent:", "return False return True def eliminate_implications(expr): \"\"\" Change >>, <<,", "args] result.append(Or(*clause)) return And._to_nnf(*result, simplify=simplify) if func == ITE: a,", "isinstance(expr, BooleanFunction): return expr if simplify: return simplify_logic(expr, 'cnf', True)", "S from ..core.singleton import SingletonWithManagedProperties as Singleton from ..core.sympify import", "if isinstance(arg, StrictLessThan): return GreaterThan(*arg.args) if isinstance(arg, StrictGreaterThan): return LessThan(*arg.args)", "= set() obj = super().__new__(cls, *args, **kwargs) for arg in", "used for an Implies ' f'(pairs are required): {args!s}') if", "false >>> ITE(Or(True, False), And(True, True), Xor(True, True)) true >>>", "form. The variables must be given as the first argument.", "for arg in args], simplify=simplify) if func == Implies: a,", "symbols) for arg in Or.make_args(c)} for c in clauses] def", "BooleanTrue and BooleanFalse.\"\"\" is_Boolean = True @property def canonical(self): return", "~a)) \"\"\" expr = sympify(expr) if is_nnf(expr, simplify): return expr", "\"\"\" return And.make_args(expr) def disjuncts(expr): \"\"\"Return a list of the", "from ..core.singleton import SingletonWithManagedProperties as Singleton from ..core.sympify import converter,", "SOPform([a, b, c], [[1, 0, 1], [1, 0, 0]]) >>>", "Boolean): \"\"\"Boolean function is a function that lives in a", "| ~b) \"\"\" return to_nnf(expr) def is_literal(expr): \"\"\" Returns True", "args.append(Or(*clause)) return And._to_nnf(*args, simplify=simplify) class Nand(BooleanFunction): \"\"\" Logical NAND function.", "= self.args return And._to_nnf(Or(~a, b), Or(a, c), simplify=simplify) def _eval_derivative(self,", "return False return True # Special case of a single", "And) def as_set(self): \"\"\" Rewrite logic operators and relationals in", "Boolean): \"\"\"Base class of BooleanTrue and BooleanFalse.\"\"\" is_Boolean = True", "False else: if not lit.is_Atom: return False return True def", "1 d[ai.args[0]][-1] += o inv = defaultdict(list) for k, v", "which it appeared, counting Symbol as 1 and Not(Symbol) as", "tj) if index != -1: todo[i] = todo[j_i + i", ">>> True >> False 1 >>> true >> false false", "Or function (i.e., the \"sum of products\" or \"SOP\" form)", "True, for use in the logic module. The primary advantage", "between the variables of each. If more than one mappings", "form. That is, of the form ((A | ~B |", "[{1, 2}, {2}] \"\"\" symbols = dict(zip(symbols, range(1, len(symbols) +", "else clause. ITE(A, B, C) evaluates and returns the result", "..core.singleton import S from ..core.singleton import SingletonWithManagedProperties as Singleton from", "cls): arg = arg.args else: arg = arg, for a", "expr return expr.to_nnf(simplify) def to_cnf(expr, simplify=False): \"\"\" Convert a propositional", "======== >>> disjuncts(a | b) == frozenset([a, b]) True >>>", "``true`` when necessary, similar to how you can generally use", "0, which has a truth value of False. To avoid", "if they are all True. Examples ======== >>> x &", "arg in self.args]) else: raise NotImplementedError('Sorry, And.as_set has not yet", "of minterms with one less variable in the terms using", "======== >>> x | y x | y Notes =====", "Logical AND function. It evaluates its arguments in order, giving", "true >>> Implies(False, True) true >>> x >> y Implies(x,", "necessarily unique, but they are canonical. Here, ``(t, z)`` could", "True >>> disjuncts(a & b) == frozenset([And(a, b)]) True \"\"\"", "in args: if not is_literal(arg): arg = arg.to_nnf(simplify) if simplify:", "Equality): return Unequality(*arg.args) if isinstance(arg, Unequality): return Equality(*arg.args) if isinstance(arg,", "======== >>> Or(x > 2, x < -2).as_set() [-oo, -2)", "(0, 1): newargs.append(True if x else False) continue if x.is_Relational:", "the required form.\"\"\" expr = sympify(expr) # Special case of", "y], [[1, 0, 1], [0, 0, 1]]) >>> function2 =", "func == Implies: a, b = args return And._to_nnf(a, ~b,", "to it's variable form (for POS). \"\"\" temp = []", "return obj @property # type: ignore[misc] @cacheit def args(self): return", "b)]) True \"\"\" return And.make_args(expr) def disjuncts(expr): \"\"\"Return a list", "giving False immediately if any of them are False, and", "# type: ignore[misc] @cacheit def args(self): return tuple(ordered(self._argset)) def to_nnf(self,", "some # major issues (like the inability to import the", "on 0. Functions in the logic module will return this", "y, z) ITE(x, y, z) >>> ITE(True, x, y) x", "to_nnf(self, simplify=True): a, b = self.args return Or._to_nnf(~a, b, simplify=simplify)", "= [] for x in args: if isinstance(x, Number) or", "expr = eliminate_implications(expr) return distribute_or_over_and(expr) def is_nnf(expr, simplified=True): \"\"\" Checks", "And: return Or._to_nnf(*[~arg for arg in args], simplify=simplify) if func", "true elif (~A).canonical == B.canonical: return B else: return Expr.__new__(cls,", "0, 0, 0], [0, 0, 1, 0], [0, 1, 0,", "== frozenset([Or(a, b)]) True \"\"\" return And.make_args(expr) def disjuncts(expr): \"\"\"Return", "if is_cnf(expr): return expr expr = eliminate_implications(expr) return distribute_and_over_or(expr) def", "cls in expr.args: if cls.is_Atom: continue if isinstance(cls, Not): if", "the form ((A & ~B & ...) | (B &", "differs by only one bit. If yes, returns index, else", "** (len(variables) - 1))): return SOPform(variables, truthtable) elif form ==", "unique fingerprints, but a and b do not. \"\"\" f", "raise NotImplementedError('handling of relationals') return self.atoms() == other.atoms() and \\", "i in [_ for _ in todo if _ is", "-2 >>> ~true false \"\"\" is_Not = True @classmethod def", "do not. \"\"\" f = eq.free_symbols d = {fi: [0]", "b in zip(self.args, self.args[1:]): args.append(Or(~a, b)) args.append(Or(~self.args[-1], self.args[0])) return And._to_nnf(*args,", "False) false >>> Nor(True, True) false >>> Nor(False, True) false", "return self.func(self.args[0], *[a.diff(x) for a in self.args[1:]]) # end class", "a boolean space. This is used as base class for", "j: if index == -1: index = x else: return", "function (negation). Returns True if the statement is False. Returns", "of Products form. The variables must be given as the", "and x have unique fingerprints, but a and b do", "minterms) into the smallest Sum of Products form. The variables", "raise ValueError('ITE expects exactly 3 arguments') if a == true:", "True) argset = set() for arg in args: if not", "Reduces a set of minterms, if possible, to a simplified", "b}) The results are not necessarily unique, but they are", "can generally use 1 instead of ``Integer(1)``. The rule of", "simplify variables = [simplify(v) for v in variables] if form", "sentence in DNF. Note that the output is NOT simplified.", "dontcares while new != old: old = new new =", "relationals in terms of real sets. Examples ======== >>> true.as_set()", "simplified. Examples ======== >>> distribute_or_over_and(And(Or(Not(a), b), c)) (b & c)", "0, 1]] >>> SOPform([t, x, y, z], minterms, dontcares) (y", "class BooleanFalse(BooleanAtom, metaclass=Singleton): \"\"\"Diofant version of False, a singleton that", "if the statement is True. Examples ======== >>> Not(True) false", "outcome. If there are inputs that can be ignored, pass", "True >>> is_literal(a + b) True >>> is_literal(Or(a, b)) False", "for arg in Or.make_args(c)} for c in clauses] def _check_pair(minterm1,", "normal use in Python, which is bitwise not. In particular,", "appeared as a Symbol in an And or Or, #", "No: ``if greeting == True:`` * Worse: ``if greeting is", "conjunctive normal form. Examples ======== >>> is_cnf(a | b |", "Xor(x, y) Notes ===== The ``^`` operator is provided as", "(b | ~a) >>> eliminate_implications(Equivalent(a, b, c)) (a | ~c)", "minterms differs by only one bit. If yes, returns index,", "return And._to_nnf(Or(*args), Or(*[~arg for arg in args]), simplify=simplify) if func", "rel: continue nc = (~c).canonical if any(r == nc for", "form ((A & ~B & ...) | (B & C", "Expr.__new__(cls, *args) def to_nnf(self, simplify=True): a, b = self.args return", "eval(cls, *args): return Not(Or(*args)) class Implies(BooleanFunction): \"\"\" Logical implication. A", "True immediately if any of them are True, and False", "kwargs.get('simplify', True) argset = set() for arg in args: if", "= [] for i, (r, c, nc) in enumerate(rel): for", "The results are not necessarily unique, but they are canonical.", "False): if not arg: continue else: arg = true if", "Special case of a single expression of function2 if isinstance(expr,", "for >> operator.\"\"\" return Implies(self, other) def __lshift__(self, other): \"\"\"Overloading", "version of False, for use in the logic module. The", ">>> false >> false true >>> False >> False 0", "in args], simplify=simplify) if func == Or: return And._to_nnf(*[~arg for", "True immediately if any of them are False, and False", "y) y >>> ITE(x, y, y) y \"\"\" @classmethod def", "t))): truthtable.append(t) if deep: from ..simplify import simplify variables =", "logical Or function (i.e., the \"sum of products\" or \"SOP\"", "\\ not satisfiable(Not(Equivalent(self, other))) class BooleanAtom(Atom, Boolean): \"\"\"Base class of", "= _args return obj @property # type: ignore[misc] @cacheit def", "simplify=True): a, b, c = self.args return And._to_nnf(Or(~a, b), Or(a,", "a singleton that can be accessed via ``false``. This is", "``true``. Otherwise, use ``True``\". In other words, use ``true`` only", "an integer representation. Examples ======== >>> to_int_repr([x | y, y],", "ITE(A, B, C) evaluates and returns the result of B", "argset.add(arg) return cls(*argset) class And(LatticeOp, BooleanFunction): \"\"\" Logical AND function.", "~a) \"\"\" return _distribute((expr, Or, And)) def _distribute(info): \"\"\"Distributes info[1]", "| ~a | (b & ~a)) \"\"\" expr = sympify(expr)", "are all True. Examples ======== >>> x & y x", "types ``true`` and ``false``. >>> ~True -2 >>> ~true false", "index = x else: return -1 return index def _convert_to_varsSOP(minterm,", "arg = true if isinstance(arg, Xor): for a in arg.args:", "(a | b) & (~a | ~b), {a: a, b:", "Not(self) def __rshift__(self, other): \"\"\"Overloading for >> operator.\"\"\" return Implies(self,", "false. Notes ===== See note in :py:class:`~diofant.logic.boolalg.BooleanTrue`. Examples ======== >>>", "c = args return And._to_nnf(Or(a, ~c), Or(~a, ~b), simplify=simplify) raise", "two formulas to be equal they must have the same", "A logical expression is in Negation Normal Form (NNF) if", "input combinations that generate '1' (the minterms) into the smallest", "``~`` operator is provided as a convenience, but note that", "any of them are True, and True if they are", "If no such mapping exists, return False. Examples ======== >>>", "group- eliminating algorithm to convert the list of all input", "{(0, 0, 1, 0, 2): [x], (0, 0, 1, 0,", "term in the expansion of a function from binary to", "True) false >>> Nand(x, y) ~(x & y) \"\"\" @classmethod", "(NNF) if it contains only And, Or and Not, and", "a, b, c = self.args return And._to_nnf(Or(~a, b), Or(a, c),", "various contexts throughout Diofant. An important thing to remember is", "or A == false or B == true or B", "nc: odd = ~odd break elif cj == c: break", "other) __rand__ = __and__ def __or__(self, other): \"\"\"Overloading for |", "to_nnf(self, simplify=True): a, b, c = self.args return And._to_nnf(Or(~a, b),", "range(i + 1, len(rel)): rj, cj = rel[j][:2] if cj", "-symbols[arg.args[0]] else: return symbols[arg] return [{append_symbol(arg, symbols) for arg in", "or ``True``, just use ``==`` instead of ``is`` to do", "in terms of real sets. Examples ======== >>> Not(x >", "consisting of conjunctions and disjunctions of literals, return an equivalent", "== 1: temp.append(variables[i]) return And(*temp) def _convert_to_varsPOS(maxterm, variables): \"\"\" Converts", "m = match(a, b) if m: return a, m return", "True``\" is ``False``, \"``true == True``\" is ``True``, so if", "\"\"\" Takes clauses in CNF format and puts them into", "index = -1 for x, (i, j) in enumerate(zip(minterm1, minterm2)):", "``True`` or ``False`` using ``==``. * Yes: ``if greeting:`` *", "it will work in either case. Finally, for boolean flags,", "be replaced by an arbitrary symbolic ``Boolean``, like ``Or(x, y)``", "the arguments are True and the rest are False. Returns", "contexts throughout Diofant. An important thing to remember is that", "expr expr = eliminate_implications(expr) return distribute_or_over_and(expr) def is_nnf(expr, simplified=True): \"\"\"", "in clauses] def _check_pair(minterm1, minterm2): \"\"\" Checks if a pair", "import Union if len(self.free_symbols) == 1: return Union(*[arg.as_set() for arg", "newargs.append(x) return LatticeOp._new_args_filter(newargs, And) def as_set(self): \"\"\" Rewrite logic operators", "in terms of real sets. Examples ======== >>> true.as_set() UniversalSet()", "the arguments are True and the rest are False. Examples", "b elif a == false: return c elif b ==", "False, False) true >>> Equivalent(True, False, False) false >>> Equivalent(x,", "Not(Or(True, False)) false >>> Not(And(And(True, x), Or(x, False))) ~x >>>", "Change >>, <<, and Equivalent into &, |, and ~.", "in rel: continue nc = (~c).canonical if any(r == nc", "m == 0: temp.append(variables[i]) return Or(*temp) def _simplified_pairs(terms): \"\"\" Reduces", "remove.append((r, rj)) if odd: argset.remove(true) if true in argset else", "Implies: a, b = args return And._to_nnf(a, ~b, simplify=simplify) if", "1], [1, 1, 1, 1]] >>> dontcares = [[0, 0,", "literals, return an equivalent sentence in DNF. Note that the", "is the same as ``~1`` which is ``-2``, which has", "C & ...) | ...). If simplify is True, the", "rel.append(c) newargs.append(x) return LatticeOp._new_args_filter(newargs, And) def as_set(self): \"\"\" Rewrite logic", "======== >>> minterms = [[0, 0, 0, 1], [0, 0,", "r in rel): return [false] rel.append(c) newargs.append(x) return LatticeOp._new_args_filter(newargs, And)", "for arg in self.args]) else: raise NotImplementedError('Sorry, And.as_set has not", "equivalent sentence in DNF. Note that the output is NOT", "will be different if ``a`` is an integer. Furthermore, since", ">>> Nor(False, False) true >>> Nor(x, y) ~(x | y)", "false >>> x ^ y Xor(x, y) Notes ===== The", "y) ~(x | y) \"\"\" @classmethod def eval(cls, *args): return", "not isinstance(expr, BooleanFunction): return {expr} return set().union(*(_find_predicates(i) for i in", "False]: # Includes 0, 1 argset.discard(x) argset.add(True if x else", "``true`` and ``false``. >>> ~True -2 >>> ~true false \"\"\"", "if func == Implies: a, b = args return And._to_nnf(a,", "[0, 0, 1, 1], [0, 1, 1, 1], ... [1,", ">>> false.as_set() EmptySet() \"\"\" from ..sets import EmptySet return EmptySet()", "in order, giving True immediately if any of them are", "true >>> Implies(True, True) true >>> Implies(False, True) true >>>", "true See Also ======== BooleanFalse \"\"\" def __bool__(self): return True", "in product([0, 1], repeat=len(variables)): t = list(t) if (t not", "| c \"\"\" expr = sympify(expr) if not isinstance(expr, BooleanFunction):", "``False`` using ``==``. * Yes: ``if greeting:`` * No: ``if", "more quick checks if len(f1) != len(f2): return # assemble", "[[0, 0, 0, 1], [0, 0, 1, 1], ... [0,", "Application from ..core.numbers import Number from ..core.operations import LatticeOp from", "are True and the rest are False. Examples ======== >>>", "for arg in super(Xor, obj).args: if isinstance(arg, Number) or arg", "| c) False \"\"\" return _is_form(expr, And, Or) def is_dnf(expr):", "def __rshift__(self, other): \"\"\"Overloading for >> operator.\"\"\" return Implies(self, other)", "Xor(True, False, True, False) false >>> x ^ y Xor(x,", ">>> Not(And(True, False)) true >>> Not(Or(True, False)) false >>> Not(And(And(True,", "normal use in Python, which is bit shifts. Hence, ``Implies(a,", "b: b, c: d, d: x}) \"\"\" def match(function1, function2):", "not a BooleanFunction itself. \"\"\" if not isinstance(expr, BooleanFunction): return", "form. Examples ======== >>> to_dnf(b & (a | c)) (a", "= _simplified_pairs(old) essential = _rem_redundancy(new, maxterms) return And(*[_convert_to_varsPOS(x, variables) for", "True if a binary term is satisfied by the given", "len(f1[k]) != len(f2[k]): return for i, x in enumerate(f1[k]): matchdict[x]", "a in eq.args: if a.is_Symbol: d[a][0] += 1 elif a.is_Not:", "\"sum of products\" or \"SOP\" form) that gives the desired", "1], repeat=len(variables)): t = list(t) if (t not in minterms)", "Examples ======== >>> Equivalent(False, False, False) true >>> Equivalent(True, False,", "and instance names the same causes some # major issues", "\"\"\" After the truth table has been sufficiently simplified, use", "all of the arguments are logically equivalent. Returns False otherwise.", "an even number of the arguments are True and the", "== ITE: a, b, c = args return And._to_nnf(Or(a, ~c),", "and (t not in dontcares): maxterms.append(t) old = None new", "StrictGreaterThan): return LessThan(*arg.args) if isinstance(arg, LessThan): return StrictGreaterThan(*arg.args) if isinstance(arg,", "BooleanFunctions. A logical predicate is defined here as anything within", "in either case. Finally, for boolean flags, it's better to", "Not, etc. \"\"\" is_Boolean = True def _eval_simplify(self, ratio, measure):", "they act bitwise on 0. Functions in the logic module", "defined here as anything within a BooleanFunction that is not", "return False def __hash__(self): return hash(False) def as_set(self): \"\"\" Rewrite", "~y))) ~((x | y) & (~x | ~y)) Notes =====", "equivalent to And(a, b) for the mapping {x: a, y:b}", "Implies(self, other) def __lshift__(self, other): \"\"\"Overloading for << operator.\"\"\" return", "``false``. This is the Diofant version of False, for use", "sentence s. Examples ======== >>> disjuncts(a | b) == frozenset([a,", "its simplest DNF form. Examples ======== >>> to_dnf(b & (a", "See Also ======== BooleanFalse \"\"\" def __bool__(self): return True def", "be different if ``a`` and ``b`` are integers. >>> Xor(x,", "= __and__ def __or__(self, other): \"\"\"Overloading for | operator.\"\"\" return", "the same logical behaviour for some correspondence between the variables", "= SOPform([x, z, y], [[1, 0, 1], [0, 0, 1]])", "else s for s in self.args] args.append(Or(*clause)) return And._to_nnf(*args, simplify=simplify)", "] >>> eq = Or(And(Not(y), a), And(Not(y), b), And(x, y))", "convert the list of all input combos that generate '1'", "1, 1, 1], [1, 0, 1, 1], [1, 1, 1,", "be used in various contexts throughout Diofant. An important thing", "and the rest are False. Examples ======== >>> Xor(True, False)", "input combos that generate '1' (the minterms) into the smallest", "the simplest expression in the corresponding normal form is returned;", "Don't compare boolean values to ``True`` or ``False`` using ``==``.", "This function simplifies a boolean function to its simplified version", "And, Or, Not, etc. \"\"\" is_Boolean = True def _eval_simplify(self,", "@property def canonical(self): return self def __int__(self): return int(bool(self)) class", "between two simplified boolean expressions if possible. By \"simplified\" we", "the boolean in question can be replaced by an arbitrary", "return hash(False) def as_set(self): \"\"\" Rewrite logic operators and relationals", "logic operations make sense.\"\"\" def __and__(self, other): \"\"\"Overloading for &", "sympy/sympy#4835) so this is a workaround that is valid for", "Returns True if the statement is False. Returns False if", "of True, for use in the logic module. The primary", "(len(variables) - 1))): return SOPform(variables, truthtable) elif form == 'cnf'", "``True`` and ``False`` to be integers, ``True >> True`` will", "return Implies(other, self) __rrshift__ = __lshift__ __rlshift__ = __rshift__ def", "+ i + 1] = None newterm = ti[:] newterm[index]", "info[0].args if a is not conj]) return info[1](*list(map(_distribute, ((info[2](c, rest),", "is logically equivalent to And(a, b) for the mapping {x:", "enumerate(maxterm): if m == 1: temp.append(Not(variables[i])) elif m == 0:", "combinations that generate '1' (the minterms) into the smallest Product", "~y & ~z) | (~x & ~y & z) >>>", "1], ... [1, 0, 1, 1], [1, 1, 1, 1]]", "else False) rel = [] for r in argset: if", "not isinstance(expr, BooleanFunction) def to_int_repr(clauses, symbols): \"\"\" Takes clauses in", "\"\"\" @classmethod def eval(cls, *args): return Not(Or(*args)) class Implies(BooleanFunction): \"\"\"", "assert z not in essential essential.append(z) break return essential def", "accessed via ``true``. This is the Diofant version of True,", "-> False remove = [] for i, (r, c, nc)", "*args): try: newargs = [] for x in args: if", ">>> Nor(False, True) false >>> Nor(False, False) true >>> Nor(x,", "elements of ``.args`` must be ``Basic``. On the other hand,", "true. Notes ===== There is liable to be some confusion", "False) true See Also ======== BooleanFalse \"\"\" def __bool__(self): return", ">>> simplify_logic(b) ~x & ~y >>> sympify(b) (z & ~x", "not in f2 or len(f1[k]) != len(f2[k]): return for i,", "args = expr.func, expr.args if func == And: return Or._to_nnf(*[~arg", "_find_predicates(expr): \"\"\"Helper to find logical predicates in BooleanFunctions. A logical", "in essential: if _compare_term(x, y): break else: for z in", "_finger(function1) f2 = _finger(function2) # more quick checks if len(f1)", ">>> Nor(True, False) false >>> Nor(True, True) false >>> Nor(False,", "None new = minterms + dontcares while new != old:", "False else: if not lit.is_Atom: return False return True #", "elif (~A).canonical == B.canonical: return B else: return Expr.__new__(cls, *args)", "work as expected on this class, whereas with False they", "return expr return expr.to_nnf(simplify) def to_cnf(expr, simplify=False): \"\"\" Convert a", "& (b | ~a) >>> eliminate_implications(Equivalent(a, b, c)) (a |", "not isinstance(cls, function2): return False for lit in cls.args: if", "argset.discard(True) return And(*argset) if False in argset: argset.discard(False) return And(*[~arg", "BooleanFalse() # We want S.true and S.false to work, rather", "= (~x & ~y & ~z) | (~x & ~y", "clauses in CNF format and puts them into an integer", "Or.make_args(c)} for c in clauses] def _check_pair(minterm1, minterm2): \"\"\" Checks", "in argset if r.is_Relational] odd = False # is number", "| operator.\"\"\" return Or(self, other) __ror__ = __or__ def __invert__(self):", "t in product([0, 1], repeat=len(variables)): t = list(t) if expr.xreplace(dict(zip(variables,", "False))) ~x >>> ~x ~x >>> Not(And(Or(x, y), Or(~x, ~y)))", "s in self.args] args.append(Or(*clause)) return And._to_nnf(*args, simplify=simplify) class Nand(BooleanFunction): \"\"\"", "in conjunctive normal form. Examples ======== >>> is_cnf(a | b", "isinstance(arg, info[1]): conj = arg break else: return info[0] rest", ">>> Or(x, y).subs({x: 0}) y \"\"\" zero = true identity", "0, 0], [0, 0, 1, 0], [0, 1, 0, 1]]", "= eq.free_symbols d = {fi: [0] * 5 for fi", "for r in rel): return [true] rel.append(c) newargs.append(x) return LatticeOp._new_args_filter(newargs,", "if not isinstance(expr, BooleanFunction): return expr if simplify: return simplify_logic(expr,", "mapping {x: a, y:b} or {x: b, y:a}. If no", "are both True or both False. Returns True if all", "Products form. The variables must be given as the first", "~B | ...) & (B | C | ...) &", "case of a single negation if isinstance(expr, Not): if not", "has not yet been' ' implemented for multivariate' ' expressions')", "x have unique fingerprints, but a and b do not.", "if s in neg else s for s in self.args]", "d), And(b, c)) >>> bool_map(eq1, eq2) ((x & y) |", "v in variables] if minterms == []: return false minterms", "its arguments in order, giving False immediately if any of", "not an expression is in disjunctive normal form. Examples ========", "{fi: [0] * 5 for fi in f} for a", "terms): \"\"\" After the truth table has been sufficiently simplified,", "b & c) True >>> is_dnf((a & b) | c)", "set of minterms with one less variable in the terms", "product from ..core import Atom, cacheit from ..core.expr import Expr", "a set of minterms, if possible, to a simplified set", "~a), True) a | b \"\"\" expr = sympify(expr) if", "Notes ===== The ``^`` operator is provided as a convenience,", "# pragma: no branch return POSform(variables, truthtable) else: raise ValueError('form", "either symbols (x), negated symbols (Not(x)), or Or (or an", "form is None: # pragma: no branch return POSform(variables, truthtable)", "b = self.args return Or._to_nnf(~a, b, simplify=simplify) class Equivalent(BooleanFunction): \"\"\"", "they evaluate to true. Notes ===== There is liable to", "= x else: return -1 return index def _convert_to_varsSOP(minterm, variables):", "be different if ``a`` is an integer. Furthermore, since bools", "dontcares') maxterms = [] for t in product([0, 1], repeat=len(variables)):", "or x in [True, False]: # Includes 0, 1 argset.discard(x)", "for lit in expr.args: if isinstance(lit, Not): if not lit.args[0].is_Atom:", "\"\"\" The POSform function uses simplified_pairs and a redundant-group eliminating", "operators and relationals in terms of real sets. Examples ========", "argset else argset.add(a) elif arg in argset: argset.remove(arg) else: argset.add(arg)", "def __int__(self): return int(bool(self)) class BooleanTrue(BooleanAtom, metaclass=Singleton): \"\"\"Diofant version of", "expression is in Negation Normal Form (NNF) if it contains", "or other.has(Relational): raise NotImplementedError('handling of relationals') return self.atoms() == other.atoms()", "from ..core import (Equality, GreaterThan, LessThan, StrictGreaterThan, StrictLessThan, Unequality) if", "return False return True def is_cnf(expr): \"\"\" Test whether or", "True in argset: argset.remove(True) return Not(Xor(*argset)) else: obj._args = tuple(ordered(argset))", "arguments are logically equivalent. Returns False otherwise. Examples ======== >>>", "the essential arguments. \"\"\" essential = [] for x in", "any(r == nc for r in rel): return [true] rel.append(c)", "None a = simplify_logic(bool1) b = simplify_logic(bool2) m = match(a,", "l1: if _compare_term(x, y): temporary.append(y) if len(temporary) == 1: if", "0, 1, 1], ... [0, 1, 1, 1], [1, 0,", "old = new new = _simplified_pairs(old) essential = _rem_redundancy(new, minterms)", "normal form. Examples ======== >>> is_cnf(a | b | c)", "args = [] for a, b in zip(self.args, self.args[1:]): args.append(Or(~a,", "1: return self.args[0].as_set().complement(S.Reals) else: raise NotImplementedError('Sorry, Not.as_set has not yet", "function1, function2): \"\"\"Test whether or not an expression is of", "x Implies(x, y) Notes ===== The ``>>`` and ``<<`` operators", "are True and the rest are False. Returns False if", "if isinstance(arg, GreaterThan): return StrictLessThan(*arg.args) def as_set(self): \"\"\" Rewrite logic", "relationals in terms of real sets. Examples ======== >>> Or(x", "in dontcares): maxterms.append(t) old = None new = maxterms +", "arg = arg.args else: arg = arg, for a in", "> -2).as_set() (-2, 2) \"\"\" from ..sets import Intersection if", "disjuncts(a | b) == frozenset([a, b]) True >>> disjuncts(a &", "return b elif b == true and c == false:", "instead of ``if x is True``. To quote PEP 8:", "0], [0, 1, 0, 1]] >>> SOPform([t, x, y, z],", "= sympify(expr) # Special case of an Atom if expr.is_Atom:", "enumerate(zip(minterm1, minterm2)): if i != j: if index == -1:", "0], [0, 1, 0, 1]] >>> POSform([t, x, y, z],", "have to if is_dnf(expr): return expr expr = eliminate_implications(expr) return", "= frozenset(argset) obj = super().__new__(cls, _args) obj._argset = _args return", "fingerprint to each symbol in the equation: [ # of", "new new = _simplified_pairs(old) essential = _rem_redundancy(new, maxterms) return And(*[_convert_to_varsPOS(x,", "or x in (0, 1): newargs.append(True if x else False)", "in super(Xor, obj).args: if isinstance(arg, Number) or arg in (True,", "@classmethod def _to_nnf(cls, *args, **kwargs): simplify = kwargs.get('simplify', True) argset", "for a, b in remove: argset.remove(a) argset.remove(b) argset.add(True) if len(argset)", "| (b & ~a)) \"\"\" expr = sympify(expr) if is_nnf(expr,", "only') def _finger(eq): \"\"\" Assign a 5-item fingerprint to each", "expression form : string ('cnf' or 'dnf') or None (default).", "True) false >>> Xor(True, False, True, True, False) true >>>", "Logical Not function (negation). Returns True if the statement is", "return an expression that is equivalent to s, but has", "a function or expression will return ``true`` or ``True``, just", "BooleanFalse \"\"\" def __bool__(self): return True def __hash__(self): return hash(True)", "Examples ======== >>> true.as_set() UniversalSet() \"\"\" return S.UniversalSet class BooleanFalse(BooleanAtom,", "y)) >>> dict(_finger(eq)) {(0, 0, 1, 0, 2): [x], (0,", "* Worse: ``if greeting is True:`` Examples ======== >>> sympify(True)", "b, c = args except ValueError: raise ValueError('ITE expects exactly", "args except ValueError: raise ValueError('ITE expects exactly 3 arguments') if", "simplified_pairs and a redundant-group eliminating algorithm to convert the list", "true identity = false @classmethod def _new_args_filter(cls, args): newargs =", "todo = list(range(len(terms))) for i, ti in enumerate(terms[:-1]): for j_i,", "want S.true and S.false to work, rather than S.BooleanTrue and", "z) | (z & ~t) References ========== * https://en.wikipedia.org/wiki/Quine-McCluskey_algorithm \"\"\"", "distribute_and_over_or(Or(a, And(Not(b), Not(c)))) (a | ~b) & (a | ~c)", "return Intersection(*[arg.as_set() for arg in self.args]) else: raise NotImplementedError('Sorry, And.as_set", "False. Examples ======== >>> x | y x | y", "isinstance(cls, function2): return False for lit in cls.args: if isinstance(lit,", "======== >>> (a >> b).equals(~b >> ~a) True >>> Not(And(a,", "variables of each. If more than one mappings of this", "& ~y), {t: a, x: b, y: c, z: d})", "a, b, c = args except ValueError: raise ValueError('ITE expects", "b & c) True >>> is_cnf((a & b) | c)", "newargs.append(x) return LatticeOp._new_args_filter(newargs, Or) def as_set(self): \"\"\" Rewrite logic operators", "_compare_term(x, y): break else: for z in l1: # pragma:", "lives in a boolean space. This is used as base", "B.canonical: return B else: return Expr.__new__(cls, *args) def to_nnf(self, simplify=True):", "= Or(And(Not(y), t), And(Not(y), z), And(x, y)) >>> eq2 =", "# assemble the match dictionary if possible matchdict = {}", ">>> eq = And(Xor(a, b), c, And(c, d)) >>> bool_map(eq,", "use in the logic module. The primary advantage of using", "symbols): \"\"\" Takes clauses in CNF format and puts them", "frozenset([a, b]) True >>> conjuncts(a | b) == frozenset([Or(a, b)])", "\"\"\" Change >>, <<, and Equivalent into &, |, and", "if is_literal(expr): return True stack = [expr] while stack: expr", "Return a logical Or function (i.e., the \"sum of products\"", "x, y, z], minterms, dontcares) (y & z) | (z", "true: return Not(a) def to_nnf(self, simplify=True): a, b, c =", "exists, return False. Examples ======== >>> function1 = SOPform([x, z,", "function simplifies a boolean function to its simplified version in", "for j in range(i + 1, len(rel)): rj, cj =", "import Number from ..core.operations import LatticeOp from ..core.singleton import S", "Not is applied only to literals. If simplified is True,", "elif cj == c: break else: continue remove.append((r, rj)) if", "an integer. Furthermore, since bools in Python subclass from ``int``,", "that satisfy the conditions. Examples ======== >>> minterms = [[0,", "x else False) rel = [] for r in argset:", "it appeared as a Not(Symbol) in an And or Or,", "arg.is_Not: return arg.args[0] # Simplify Relational objects. if isinstance(arg, Equality):", "and ``a >> b`` will return different things if ``a``", "def __and__(self, other): \"\"\"Overloading for & operator.\"\"\" return And(self, other)", "considers ``True`` and ``False`` to be integers, ``True >> True``", "any of them are True, and False if they are", "is not a symbolic operation in Diofant, since it always", "if m == 0: temp.append(Not(variables[i])) elif m == 1: temp.append(variables[i])", "arg.args: argset.remove(a) if a in argset else argset.add(a) elif arg", "``false`` represent a two-valued logic. When in doubt, use ``True``.", "as the first argument. Return a logical And function (i.e.,", "contained within the input. Examples ======== >>> b = (~x", "info[2] with respect to info[0].\"\"\" if isinstance(info[0], info[2]): for arg", "and Not is applied only to literals. If simplify is", "~a) & (b | c), False) True >>> is_nnf(Not(a &", "Notes ===== The ``|`` operator is provided as a convenience,", "used in various contexts throughout Diofant. An important thing to", "import defaultdict from itertools import combinations, product from ..core import", "f2[k][i] return matchdict if matchdict else None a = simplify_logic(bool1)", "will return this class when they evaluate to true. Notes", "m in enumerate(maxterm): if m == 1: temp.append(Not(variables[i])) elif m", "1 else: o = len(a.args) + sum(isinstance(ai, Not) for ai", "def SOPform(variables, minterms, dontcares=None): \"\"\" The SOPform function uses simplified_pairs", "b)`` and ``a | b`` will return different things if", "``if x`` instead of ``if x is True``. To quote", "def disjuncts(expr): \"\"\"Return a list of the disjuncts in the", "is used as base class for And, Or, Not, etc.", "r.canonical, (~r).canonical) for r in argset if r.is_Relational] odd =", "return false if arg else true if arg.is_Not: return arg.args[0]", "object for which logic operations make sense.\"\"\" def __and__(self, other):", "``>>`` and ``<<`` operators are provided as a convenience, but", "boolean space. This is used as base class for And,", "args): newargs = [] rel = [] for x in", "= [] rel = [] for x in args: if", "'1' (the minterms) into the smallest Product of Sums form.", "is_cnf(a | b | c) True >>> is_cnf(a & b", "False. Examples ======== >>> function1 = SOPform([x, z, y], [[1,", ">>> Nor(x, y) ~(x | y) \"\"\" @classmethod def eval(cls,", "(a & ~b) | (b & c) | (~b &", "\"\"\" simplified_terms = [] todo = list(range(len(terms))) for i, ti", ">> b`` will return different things if ``a`` and ``b``", "return Or(self, other) __ror__ = __or__ def __invert__(self): \"\"\"Overloading for", "= __rshift__ def __xor__(self, other): return Xor(self, other) __rxor__ =", "2 ] >>> eq = Or(And(Not(y), a), And(Not(y), b), And(x,", "True >>> is_nnf((a | ~a) & (b | c)) False", "= len(a.args) + sum(isinstance(ai, Not) for ai in a.args) for", "True. Examples ======== >>> Nand(False, True) true >>> Nand(True, True)", "in terms: temporary = [] for y in l1: if", "is True iff A and B are both True or", "S.UniversalSet class BooleanFalse(BooleanAtom, metaclass=Singleton): \"\"\"Diofant version of False, a singleton", "its use here is different from its normal use in", "[] rel = [] for x in reversed(list(args)): if isinstance(x,", "argset.add(a) else: argset.add(arg) return cls(*argset) class And(LatticeOp, BooleanFunction): \"\"\" Logical", "s in neg else s for s in self.args] args.append(Or(*clause))", "expr = eliminate_implications(expr) return distribute_and_over_or(expr) def to_dnf(expr, simplify=False): \"\"\" Convert", "for a in arg: if Not(a) in argset: return cls.zero", "== false or B == true or B == false:", "S.BooleanFalse, but making the class and instance names the same", "appeared as a Not(symbol), # of times it appeared as", "arg in (True, False): return false if arg else true", "Or, And) def _is_form(expr, function1, function2): \"\"\"Test whether or not", "for x in essential]) def _find_predicates(expr): \"\"\"Helper to find logical", "Not): if not expr.args[0].is_Atom: return False if not isinstance(expr, function1):", "The SOPform function uses simplified_pairs and a redundant group- eliminating", "newargs.append(x) A, B = newargs except ValueError: raise ValueError(f'{len(args)} operand(s)", "desired outcome. If there are inputs that can be ignored,", "in Python, which is bitwise not. In particular, ``~a`` and", "bool_map(function1, function2) (y & ~z, {y: a, z: b}) The", "else: return info[0] rest = info[2](*[a for a in info[0].args", "metaclass=Singleton): \"\"\"Diofant version of False, a singleton that can be", ">> a)) (a | ~b | (a & ~b)) &", "been denested and is either an And (or an Or)", "Python, which is bit shifts. Hence, ``Implies(a, b)`` and ``a", "> 0, evaluate=False).as_set() (-oo, 0] \"\"\" if len(self.free_symbols) == 1:", "each symbol in the equation: [ # of times it", "True if the given formulas have the same truth table.", "= True @classmethod def eval(cls, arg): from ..core import (Equality,", "yet been' ' implemented for multivariate' ' expressions') class Or(LatticeOp,", "if False in argset: argset.discard(False) return And(*[~arg for arg in", "expr s. Examples ======== >>> conjuncts(a & b) == frozenset([a,", "from ..sets import EmptySet return EmptySet() true = BooleanTrue() false:", "r.is_Relational] odd = False # is number of complimentary pairs", "immediately if any of them are True, and True if", "For example, And(x, y) is logically equivalent to And(a, b)", "~true false >>> ~True -2 >>> Or(True, False) true See", "if isinstance(r, Relational): rel.append((r, r.canonical, (~r).canonical)) remove = [] for", "true if isinstance(arg, Xor): for a in arg.args: argset.remove(a) if", "i, ti in enumerate(terms[:-1]): for j_i, tj in enumerate(terms[(i +", "Return True if a binary term is satisfied by the", "in arg: if Not(a) in argset: return cls.zero argset.add(a) else:", "eliminate_implications(expr): \"\"\" Change >>, <<, and Equivalent into &, |,", "y).subs({x: 1}) y \"\"\" zero = false identity = true", "from ..simplify import simplify variables = [simplify(v) for v in", "operator.\"\"\" return Implies(other, self) __rrshift__ = __lshift__ __rlshift__ = __rshift__", "collections import defaultdict from itertools import combinations, product from ..core", "expression that is equivalent to s, but has only &,", "None and len(truthtable) >= (2 ** (len(variables) - 1))): return", "literals. If simplify is True, the result contains no redundant", "\"\"\"Base class of BooleanTrue and BooleanFalse.\"\"\" is_Boolean = True @property", "return an equivalent sentence in CNF. Examples ======== >>> distribute_and_over_or(Or(a,", "<< operator.\"\"\" return Implies(other, self) __rrshift__ = __lshift__ __rlshift__ =", "Not(BooleanFunction): \"\"\" Logical Not function (negation). Returns True if the", "necessarily be ``true`` instead of ``True``, as elements of ``.args``", "of B if A is true else it returns the", "ITE(True, x, y) x >>> ITE(False, x, y) y >>>", "return SOPform(variables, truthtable) elif form == 'cnf' or form is", "it must necessarily be ``true`` instead of ``True``, as elements", "is_nnf(expr, simplify): return expr return expr.to_nnf(simplify) def to_cnf(expr, simplify=False): \"\"\"", "and bool2 represent the same logical behaviour for some correspondence", "if a.is_Symbol: d[a][0] += 1 elif a.is_Not: d[a.args[0]][1] += 1", "\"\"\" Return True if a binary term is satisfied by", "in terms of real sets. Examples ======== >>> And(x <", "[sympify(arg, strict=True) for arg in args] argset = set(args) for", "instance names the same causes some # major issues (like", "Implies ' f'(pairs are required): {args!s}') if A == true", "in a boolean space. This is used as base class", "which has a boolean value of True. To avoid this", "= eliminate_implications(expr) return distribute_or_over_and(expr) def is_nnf(expr, simplified=True): \"\"\" Checks if", "b) if m: return a, m return m is not", "And(LatticeOp, BooleanFunction): \"\"\" Logical AND function. It evaluates its arguments", "= false identity = true nargs = None @classmethod def", "logically equivalent to And(a, b) for the mapping {x: a,", "eq = And(Xor(a, b), c, And(c, d)) >>> bool_map(eq, eq.subs({c:", "binary term is satisfied by the given term. Used for", "less variable in the terms using QM method. \"\"\" simplified_terms", "like ~ and >> will work as expected on this", "for boolean flags, it's better to just use ``if x``", "= [] for i, m in enumerate(minterm): if m ==", "boolean (default True) indicates whether to recursively simplify any non-boolean", "disjunctive normal form. That is, of the form ((A &", "and True if they are all False. Returns False if", "is defined here as anything within a BooleanFunction that is", "etc. \"\"\" is_Boolean = True def _eval_simplify(self, ratio, measure): return", "in argset: if isinstance(r, Relational): rel.append((r, r.canonical, (~r).canonical)) remove =", "boolean expressions. \"\"\" # do some quick checks if function1.__class__", "it contains only And, Or and Not, and Not is", "for ai in a.args) for ai in a.args: if ai.is_Symbol:", "a list, too. The result will be one of the", "B Accepts two Boolean arguments; A and B. Returns False", "literals, return an equivalent sentence in CNF. Examples ======== >>>", "``a`` is an integer. Furthermore, since bools in Python subclass", "c)) False >>> is_nnf((a | ~a) & (b | c),", "provided as a convenience, but note that their use here", "if ``a`` and ``b`` are integers. In particular, since Python", "function or expression will return ``true`` or ``True``, just use", "return expr if simplify: return simplify_logic(expr, 'dnf', True) # Don't", "valid for simplified boolean expressions. \"\"\" # do some quick", "===== There is liable to be some confusion as to", "them are True, and True if they are all False.", ">>> Implies(True, True) true >>> Implies(False, True) true >>> x", "def __bool__(self): return True def __hash__(self): return hash(True) def as_set(self):", "(Equality, GreaterThan, LessThan, StrictGreaterThan, StrictLessThan, Unequality) if isinstance(arg, Number) or", "they act bitwise on 1. Functions in the logic module", "And.as_set has not yet been' ' implemented for multivariate' '", "each. If more than one mappings of this sort exist,", "it always returns ``True`` or ``False``, and does so in", "- 1))): return SOPform(variables, truthtable) elif form == 'cnf' or", "giving False immediately if any of them are True, and", "y, z], minterms, dontcares) z & (y | ~t) References", "true if x else false class BooleanFunction(Application, Boolean): \"\"\"Boolean function", "other words, use ``true`` only on those contexts where the", ">>> SOPform([t, x, y, z], minterms, dontcares) (y & z)", "if ``a`` is an integer. Furthermore, since bools in Python", "is of the required form.\"\"\" expr = sympify(expr) # Special", "old: old = new new = _simplified_pairs(old) essential = _rem_redundancy(new,", "whose arguments are only symbols or negated symbols. For example,", ">>> eq2 = Or(And(Not(c), a), And(Not(c), d), And(b, c)) >>>", "BooleanTrue(BooleanAtom, metaclass=Singleton): \"\"\"Diofant version of True, a singleton that can", "true instead of True is that shorthand boolean operations like", "``true`` and ``false`` represent a two-valued logic. When in doubt,", "is_literal(Or(a, b)) False \"\"\" if isinstance(expr, Not): return not isinstance(expr.args[0],", "if cls.is_Atom: continue if isinstance(cls, Not): if not cls.args[0].is_Atom: return", "in terms of structural equality rather than mathematical, so it", "======== >>> function1 = SOPform([x, z, y], [[1, 0, 1],", "func == And: return Or._to_nnf(*[~arg for arg in args], simplify=simplify)", "if not isinstance(expr, BooleanFunction): return {expr} return set().union(*(_find_predicates(i) for i", "| y) & (~x | ~y)) Notes ===== The ``~``", "``False``. Aside from not satisfying the above rule of thumb,", "is equivalent to s, but has only &, |, and", "B == false: return Or(Not(A), B) elif A == B:", "return POSform(variables, truthtable) else: raise ValueError('form can be cnf or", "return And._to_nnf(Or(~a, b), Or(a, c), simplify=simplify) def _eval_derivative(self, x): return", "unique, but they are canonical. Here, ``(t, z)`` could be", "combos that generate '1' (the minterms) into the smallest Sum", "thing to remember is that ``sympify(True)`` returns ``true``. This means", "| ...) & ...). If simplify is True, the expr", "Symbol in an And or Or, # of times it", "& (c | ~b) >>> to_cnf((a | b) & (a", "is, of the form ((A | ~B | ...) &", ">>> ITE(True, False, True) false >>> ITE(Or(True, False), And(True, True),", "in rel): return [false] rel.append(c) newargs.append(x) return LatticeOp._new_args_filter(newargs, And) def", "..core.sympify import converter, sympify from ..utilities import ordered class Boolean(Expr):", "if any of them are True, and True if they", "A logical predicate is defined here as anything within a", "tj in enumerate(terms[(i + 1):]): index = _check_pair(ti, tj) if", "&, |, and ~. That is, return an expression that", "s for s in args] result.append(Or(*clause)) return And._to_nnf(*result, simplify=simplify) if", ">>> Equivalent(True, False, False) false >>> Equivalent(x, And(x, True)) true", "& ~x & ~y) | (~x & ~y & ~z)", "BooleanFunction): return expr variables = _find_predicates(expr) truthtable = [] for", "Rewrite logic operators and relationals in terms of real sets.", "y) y \"\"\" @classmethod def eval(cls, *args): try: a, b,", "for i, x in enumerate(term): if x not in (3,", "order, giving False immediately if any of them are False,", "if expr is in Negation Normal Form. A logical expression", "implicant table method to recognize and eliminate redundant pairs, and", "2, x > -2).as_set() (-2, 2) \"\"\" from ..sets import", "elif A == B: return true elif A.is_Relational and B.is_Relational:", "i in minterms] dontcares = [list(i) for i in (dontcares", "for r in rel): return [false] rel.append(c) newargs.append(x) return LatticeOp._new_args_filter(newargs,", "more than one mappings of this sort exist, one of", "Not(And(*args)) class Nor(BooleanFunction): \"\"\" Logical NOR function. It evaluates its", "dontcares) (y & z) | (z & ~t) References ==========", "input. Examples ======== >>> b = (~x & ~y &", "= sympify(expr) if is_nnf(expr, simplify): return expr return expr.to_nnf(simplify) def", "for which logic operations make sense.\"\"\" def __and__(self, other): \"\"\"Overloading", "t = list(t) if expr.xreplace(dict(zip(variables, t))): truthtable.append(t) if deep: from", "False)) false >>> Not(And(And(True, x), Or(x, False))) ~x >>> ~x", "# is number of complimentary pairs odd? start 0 ->", "we mean that a function has been denested and is", "to_nnf(Equivalent(a >> b, b >> a)) (a | ~b |", ">>> x & y x & y Notes ===== The", "isinstance(expr, Not): if not expr.args[0].is_Atom: return False if not isinstance(expr,", "version of True, for use in the logic module. The", "b \"\"\" expr = sympify(expr) if not isinstance(expr, BooleanFunction): return", "else None a = simplify_logic(bool1) b = simplify_logic(bool2) m =", "y) >>> y << x Implies(x, y) Notes ===== The", "bool1 and bool2 represent the same logical behaviour for some", "<<, and Equivalent into &, |, and ~. That is,", "False. Examples ======== >>> is_literal(a) True >>> is_literal(~a) True >>>", "= -1 for x, (i, j) in enumerate(zip(minterm1, minterm2)): if", "Examples ======== >>> ITE(True, False, True) false >>> ITE(Or(True, False),", "((A & ~B & ...) | (B & C &", "evaluate=False).as_set() (-oo, 0] \"\"\" if len(self.free_symbols) == 1: return self.args[0].as_set().complement(S.Reals)", "or And object in Diofant. Parameters ========== expr : string", "\"\"\" def __bool__(self): return True def __hash__(self): return hash(True) def", "= list(t) if (t not in minterms) and (t not", "for i in minterms] dontcares = [list(i) for i in", "``1 >> 1``, i.e., 0, which has a truth value", "simplified, use the prime implicant table method to recognize and", "= simplify_logic(bool1) b = simplify_logic(bool2) m = match(a, b) if", "lit.args[0].is_Atom: return False else: if not lit.is_Atom: return False return", "for c in conj.args)))) elif isinstance(info[0], info[1]): return info[1](*list(map(_distribute, ((x,", "is True. Returns True if all arguments are False. Examples", "use ``true``. Otherwise, use ``True``\". In other words, use ``true``", "And._to_nnf(a, ~b, simplify=simplify) if func == Equivalent: return And._to_nnf(Or(*args), Or(*[~arg", "\"\"\" return _distribute((expr, Or, And)) def _distribute(info): \"\"\"Distributes info[1] over", "is True. Examples ======== >>> Not(True) false >>> Not(False) true", "def _finger(eq): \"\"\" Assign a 5-item fingerprint to each symbol", "* https://en.wikipedia.org/wiki/Quine-McCluskey_algorithm \"\"\" variables = [sympify(v) for v in variables]", "# more quick checks if len(f1) != len(f2): return #", "v in variables] if form == 'dnf' or \\ (form", "= sympify(expr) if not isinstance(expr, BooleanFunction): return expr variables =", "part, you can just use ``True`` and it will automatically", "minterms = [list(i) for i in minterms] dontcares = [list(i)", "d[ai.args[0]][-1] += o inv = defaultdict(list) for k, v in", "Implies(x, y) Notes ===== The ``>>`` and ``<<`` operators are", "# file). S.true = true S.false = false converter[bool] =", "b`` and ``Xor(a, b)`` will be different if ``a`` and", "argset.remove(true) if true in argset else argset.add(true) for a, b", "arg in args] argset = set(args) for x in args:", "Examples ======== >>> (a >> b).equals(~b >> ~a) True >>>", "an object for which logic operations make sense.\"\"\" def __and__(self,", "elif len(argset) == 1: return argset.pop() elif True in argset:", "0: temp.append(Not(variables[i])) elif m == 1: temp.append(variables[i]) return And(*temp) def", "Accepts two Boolean arguments; A and B. Returns False if", "x in args: if isinstance(x, Number) or x in [True,", "newterm[index] = 3 if newterm not in simplified_terms: simplified_terms.append(newterm) simplified_terms.extend(", "or B == true or B == false: return Or(Not(A),", "Equivalent(A, B) is True iff A and B are both", "list of the disjuncts in the sentence s. Examples ========", "__rlshift__ = __rshift__ def __xor__(self, other): return Xor(self, other) __rxor__", "cj = rel[j][:2] if cj == nc: odd = ~odd", "eq2 = Or(And(Not(c), a), And(Not(c), d), And(b, c)) >>> bool_map(eq1,", "_is_form(expr, function1, function2): \"\"\"Test whether or not an expression is", "i.e., 0, which has a truth value of False. To", "arg: continue else: arg = true if isinstance(arg, Xor): for", "bool1, and the mapping of variables that makes the two", "(c | ~a) & (c | ~b) >>> to_cnf((a |", ">>> Nand(x, y) ~(x & y) \"\"\" @classmethod def eval(cls,", "in variables] if form == 'dnf' or \\ (form is", "eliminate redundant pairs, and return the essential arguments. \"\"\" essential", "in Negation Normal Form. A logical expression is in Negation", "self.has(Relational) or other.has(Relational): raise NotImplementedError('handling of relationals') return self.atoms() ==", "[] for t in product([0, 1], repeat=len(variables)): t = list(t)", "module for Diofant. \"\"\" from collections import defaultdict from itertools", "lit in cls.args: if isinstance(lit, Not): if not lit.args[0].is_Atom: return", "index, else returns -1. \"\"\" index = -1 for x,", "the expr is evaluated to its simplest DNF form. Examples", "Examples ======== >>> And(x < 2, x > -2).as_set() (-2,", "of products\" or \"SOP\" form) that gives the desired outcome.", "False), And(True, True), Xor(True, True)) true >>> ITE(x, y, z)", "dnf only') def _finger(eq): \"\"\" Assign a 5-item fingerprint to", "is_nnf(expr, simplified=True): \"\"\" Checks if expr is in Negation Normal", "& ~y >>> sympify(b) (z & ~x & ~y) |", "True) false >>> Nor(False, True) false >>> Nor(False, False) true", "for x in info[0].args)))) else: return info[0] def to_nnf(expr, simplify=True):", "but note that their use here is different from their", "range(1, len(symbols) + 1))) def append_symbol(arg, symbols): if isinstance(arg, Not):", "them as a list, too. The result will be one", "nc: return false elif cj == c: remove.append((r, rj)) break", "``b`` are integers. >>> And(x, y).subs({x: 1}) y \"\"\" zero", "Given a sentence s consisting of conjunctions and disjunctions of", "respect to info[0].\"\"\" if isinstance(info[0], info[2]): for arg in info[0].args:", "1], ... [0, 1, 1, 1], [1, 0, 1, 1],", ">>> Not(False) true >>> Not(And(True, False)) true >>> Not(Or(True, False))", "-1. \"\"\" index = -1 for x, (i, j) in", "f1: if k not in f2 or len(f1[k]) != len(f2[k]):", "remember is that ``sympify(True)`` returns ``true``. This means that for", "for arg in self.args]) else: raise NotImplementedError('Sorry, Or.as_set has not", "= [list(i) for i in (dontcares or [])] for d", "temporary = [] for y in l1: if _compare_term(x, y):", "is_dnf(expr): \"\"\" Test whether or not an expression is in", "sentence s to conjunctive normal form. That is, of the", "if they are all False. Examples ======== >>> x |", "A.is_Relational and B.is_Relational: if A.canonical == B.canonical: return true elif", "| ~a) & (c | ~b) >>> to_cnf((a | b)", "the smallest Sum of Products form. The variables must be", ">>> distribute_or_over_and(And(Or(Not(a), b), c)) (b & c) | (c &", "as_set(self): \"\"\" Rewrite logic operators and relationals in terms of", "in disjunctive normal form. Examples ======== >>> is_dnf(a | b", "object ends up in the ``.args`` of any expression, then", "for z in l1: # pragma: no branch if _compare_term(x,", "be integers, ``True >> True`` will be the same as", "self.args]) else: raise NotImplementedError('Sorry, And.as_set has not yet been' '", "else true if arg.is_Not: return arg.args[0] # Simplify Relational objects.", "simplify=True): return self._to_nnf(*self.args, simplify=simplify) @classmethod def _to_nnf(cls, *args, **kwargs): simplify", "this is a workaround that is valid for simplified boolean", "a term in the expansion of a function from binary", "module. The primary advantage of using true instead of True", "return the essential arguments. \"\"\" essential = [] for x", "combinations, product from ..core import Atom, cacheit from ..core.expr import", "(~c | ~d) >>> to_nnf(Equivalent(a >> b, b >> a))", "integers, ``True >> True`` will be the same as ``1", "ends up in the ``.args`` of any expression, then it", "since bools in Python subclass from ``int``, ``~True`` is the", "False return True def eliminate_implications(expr): \"\"\" Change >>, <<, and", "5 for fi in f} for a in eq.args: if", "function2} # get the fingerprint dictionaries f1 = _finger(function1) f2", "if arg.is_Not: return arg.args[0] # Simplify Relational objects. if isinstance(arg,", "to_nnf(self, simplify=True): return self._to_nnf(*self.args, simplify=simplify) @classmethod def _to_nnf(cls, *args, **kwargs):", "end class definitions. Some useful methods def conjuncts(expr): \"\"\"Return a", "In other words, use ``true`` only on those contexts where", "True def __hash__(self): return hash(True) def as_set(self): \"\"\" Rewrite logic", "return False return True def _rem_redundancy(l1, terms): \"\"\" After the", "comparison, and it will work in either case. Finally, for", "variables = _find_predicates(expr) truthtable = [] for t in product([0,", "~b) & (b | ~a) >>> eliminate_implications(Equivalent(a, b, c)) (a", "S.true = true S.false = false converter[bool] = lambda x:", "class Implies(BooleanFunction): \"\"\" Logical implication. A implies B is equivalent", "any of the arguments are False. Returns False if all", "false >>> Not(False) true >>> Not(And(True, False)) true >>> Not(Or(True,", "logic. When in doubt, use ``True``. \"``true == True is", "return Not(And(*args)) class Nor(BooleanFunction): \"\"\" Logical NOR function. It evaluates", "t = list(t) if (t not in minterms) and (t", "symbol in the equation: [ # of times it appeared", "throughout Diofant. An important thing to remember is that ``sympify(True)``", "evaluates and returns the result of B if A is", "False) true >>> Xor(True, True) false >>> Xor(True, False, True,", "if x else False) else: newargs.append(x) A, B = newargs", "false >>> Xor(True, False, True, True, False) true >>> Xor(True,", "| ~c) & (b | ~a) & (c | ~b)", "case. Finally, for boolean flags, it's better to just use", "in expr.args)) def simplify_logic(expr, form=None, deep=True): \"\"\" This function simplifies", "See note in :py:class:`~diofant.logic.boolalg.BooleanTrue`. Examples ======== >>> sympify(False) false >>>", "return False else: if not lit.is_Atom: return False return True", "a, b: b, c: d, d: x}) \"\"\" def match(function1,", "Yes: ``if greeting:`` * No: ``if greeting == True:`` *", "in range(i + 1, len(rel)): rj, cj = rel[j][:2] if", "import converter, sympify from ..utilities import ordered class Boolean(Expr): \"\"\"A", "a propositional logical sentence s to conjunctive normal form. That", "this issue, use the Diofant objects ``true`` and ``false``. >>>", "either an And (or an Or) whose arguments are either", "cj == c: break else: continue remove.append((r, rj)) if odd:", "operand(s) used for an Implies ' f'(pairs are required): {args!s}')", "not is_literal(expr): return False return True def is_cnf(expr): \"\"\" Test", "& ~z, {y: a, z: b}) The results are not", ">>> bool_map(eq1, eq2) ((x & y) | (t & ~y)", "when they evaluate to false. Notes ===== See note in", "in f1: if k not in f2 or len(f1[k]) !=", "cls.is_Atom: continue if isinstance(cls, Not): if not cls.args[0].is_Atom: return False", "to its simplified version in SOP or POS form. The", "= True @property def canonical(self): return self def __int__(self): return", "\"``true == True``\" is ``True``, so if there is any", "i in (dontcares or [])] for d in dontcares: if", "(-2, 2) \"\"\" from ..sets import Intersection if len(self.free_symbols) ==", "things if ``a`` and ``b`` are integers. >>> Or(x, y).subs({x:", "operators are provided as a convenience, but note that their", "you can just use ``True`` and it will automatically be", "dontcares=None): \"\"\" The SOPform function uses simplified_pairs and a redundant", "def __new__(cls, *args, **kwargs): argset = set() obj = super().__new__(cls,", "\"\"\"Return a list of the conjuncts in the expr s.", "immediately if any of them are False, and True if", "for s in args] result.append(Or(*clause)) return And._to_nnf(*result, simplify=simplify) if func", "DNF form. Examples ======== >>> to_dnf(b & (a | c))", "sympify(other) if self.has(Relational) or other.has(Relational): raise NotImplementedError('handling of relationals') return", "other.atoms() and \\ not satisfiable(Not(Equivalent(self, other))) class BooleanAtom(Atom, Boolean): \"\"\"Base", "``Xor(a, b)`` will be different if ``a`` and ``b`` are", "argset.remove(a) argset.remove(b) if len(argset) == 0: return false elif len(argset)", "a, b in zip(self.args, self.args[1:]): args.append(Or(~a, b)) args.append(Or(~self.args[-1], self.args[0])) return", "... [0, 1, 1, 1], [1, 0, 1, 1], [1,", "..core.singleton import SingletonWithManagedProperties as Singleton from ..core.sympify import converter, sympify", ">>> is_literal(Or(a, b)) False \"\"\" if isinstance(expr, Not): return not", "def _convert_to_varsSOP(minterm, variables): \"\"\" Converts a term in the expansion", "len(self.free_symbols) == 1: return Union(*[arg.as_set() for arg in self.args]) else:", "maxterms = [] for t in product([0, 1], repeat=len(variables)): t", "of ``Integer(1)``. The rule of thumb is: \"If the boolean", "form == 'dnf' or \\ (form is None and len(truthtable)", "Xor(True, True) false >>> Xor(True, False, True, True, False) true", "\"\"\" zero = false identity = true nargs = None", "frozenset([a, b]) True >>> disjuncts(a & b) == frozenset([And(a, b)])", "mathematical, so it should return ``True``. The assumptions system should", "return B else: return Expr.__new__(cls, *args) def to_nnf(self, simplify=True): a,", "[[1, 0, 1], [1, 0, 0]]) >>> bool_map(function1, function2) (y", "yet been' ' implemented for multivariate' ' expressions') class Not(BooleanFunction):", "the Diofant objects ``true`` and ``false``. >>> True >> False", "(b & c) | (c & ~a) \"\"\" return _distribute((expr,", "if len(self.free_symbols) == 1: return Union(*[arg.as_set() for arg in self.args])", "y in essential: if _compare_term(x, y): break else: for z", "do the comparison, and it will work in either case.", "eval(cls, *args): return Not(And(*args)) class Nor(BooleanFunction): \"\"\" Logical NOR function.", "in simplified_terms: simplified_terms.append(newterm) simplified_terms.extend( [terms[i] for i in [_ for", "is bitwise or. Hence, ``Or(a, b)`` and ``a | b``", "is_cnf((a & b) | c) False \"\"\" return _is_form(expr, And,", "& b) | (a & ~b) | (b & c)", "rel.append(c) newargs.append(x) return LatticeOp._new_args_filter(newargs, Or) def as_set(self): \"\"\" Rewrite logic", "representation of truth. For example, if the object ends up", "class Boolean(Expr): \"\"\"A boolean object is an object for which", "eval(cls, arg): from ..core import (Equality, GreaterThan, LessThan, StrictGreaterThan, StrictLessThan,", ">>> ~True -2 >>> Or(True, False) true See Also ========", "# Special case of a single expression of function2 if", "of BooleanTrue and BooleanFalse.\"\"\" is_Boolean = True @property def canonical(self):", "that can be ignored, pass them as a list, too.", "order, giving True immediately if any of them are False,", "a == false: return c elif b == c: return", "simplify=simplify) def _eval_derivative(self, x): return self.func(self.args[0], *[a.diff(x) for a in", "on this class, whereas with False they act bitwise on", "means that for the most part, you can just use", "not lit.args[0].is_Atom: return False else: if not lit.is_Atom: return False", "expr is evaluated to its simplest DNF form. Examples ========", "and ``false``. >>> True >> False 1 >>> true >>", "in l1: # pragma: no branch if _compare_term(x, z): assert", "return tuple(ordered(self._argset)) def to_nnf(self, simplify=True): args = [] for i", "~c) & (b | ~a) & (c | ~b) \"\"\"", "y) is logically equivalent to And(a, b) for the mapping", "And(Not(y), b), And(x, y)) >>> dict(_finger(eq)) {(0, 0, 1, 0,", "True they act bitwise on 1. Functions in the logic", "``a & b`` will return different things if ``a`` and", "~x ~x >>> Not(And(Or(x, y), Or(~x, ~y))) ~((x | y)", "Examples ======== >>> sympify(False) false >>> false >> false true", "appeared as a Not(Symbol) in an And or Or, sum", "is, of the form ((A & ~B & ...) |", "in [True, False]: # Includes 0, 1 argset.discard(x) argset.add(True if", "elif A.is_Relational and B.is_Relational: if A.canonical == B.canonical: return true", "the above rule of thumb, the assumptions system uses a", "The ``^`` operator is provided as a convenience, but note", "' f'(pairs are required): {args!s}') if A == true or", "false >>> Nor(False, False) true >>> Nor(x, y) ~(x |", "of the arguments are True and the rest are False.", "_new_args_filter(cls, args): newargs = [] rel = [] for x", "Not(Xor(*argset)) else: obj._args = tuple(ordered(argset)) obj._argset = frozenset(argset) return obj", "expr is evaluated to its simplest CNF form. Examples ========", "x in essential]) def POSform(variables, minterms, dontcares=None): \"\"\" The POSform", "terms of real sets. Examples ======== >>> Not(x > 0,", "True >>> conjuncts(a | b) == frozenset([Or(a, b)]) True \"\"\"", "in neg else s for s in self.args] args.append(Or(*clause)) return", "Examples ======== >>> to_dnf(b & (a | c)) (a &", "form is None: expr = sympify(expr) if not isinstance(expr, BooleanFunction):", "len(f2[k]): return for i, x in enumerate(f1[k]): matchdict[x] = f2[k][i]", "uses simplified_pairs and a redundant group- eliminating algorithm to convert", "r in argset if r.is_Relational] odd = False # is", "(True, False): if not arg: continue else: arg = true", "implication. A implies B is equivalent to !A v B", "c)) (a & b) | (b & c) >>> to_dnf((a", "arg in args: if Not(arg) in args: return False stack.extend(expr.args)", "it's variable form (for POS). \"\"\" temp = [] for", "argset.pop() elif True in argset: argset.remove(True) return Not(Xor(*argset)) else: obj._args", "relation. Equivalent(A, B) is True iff A and B are", "for y in essential: if _compare_term(x, y): break else: for", "argset: argset.discard(True) return And(*argset) if False in argset: argset.discard(False) return", "b) == frozenset([a, b]) True >>> disjuncts(a & b) ==", "this class when they evaluate to false. Notes ===== See", "cls(*argset) class And(LatticeOp, BooleanFunction): \"\"\" Logical AND function. It evaluates", "function from binary to it's variable form (for SOP). \"\"\"", "import simplify variables = [simplify(v) for v in variables] if", "implies B is equivalent to !A v B Accepts two", "b | c) True >>> is_cnf(a & b & c)", "Not(Symbol) as 2 ] >>> eq = Or(And(Not(y), a), And(Not(y),", "of a single expression of function2 if isinstance(expr, function2): for", "objects ``true`` and ``false``. >>> True >> False 1 >>>", "Not) for ai in a.args) for ai in a.args: if", "\"If the boolean in question can be replaced by an", "Examples ======== >>> distribute_or_over_and(And(Or(Not(a), b), c)) (b & c) |", "| ~b) & (b | ~a) >>> eliminate_implications(Equivalent(a, b, c))", "return Implies(self, other) def __lshift__(self, other): \"\"\"Overloading for << operator.\"\"\"", "conditions. Examples ======== >>> minterms = [[0, 0, 0, 1],", "and when ``true`` should be used in various contexts throughout", "True) a | b \"\"\" expr = sympify(expr) if not", "is_cnf(expr): \"\"\" Test whether or not an expression is in", "def _compare_term(minterm, term): \"\"\" Return True if a binary term", "= f2[k][i] return matchdict if matchdict else None a =", "quick checks if len(f1) != len(f2): return # assemble the", "inv[tuple(v)].append(k) return inv def bool_map(bool1, bool2): \"\"\" Return the simplified", "\"\"\"Boolean function is a function that lives in a boolean", "ITE(x, y, z) ITE(x, y, z) >>> ITE(True, x, y)", "B if A is true else it returns the result", "EmptySet() \"\"\" from ..sets import EmptySet return EmptySet() true =", "None new = maxterms + dontcares while new != old:", "0, 1, 0, 2): [x], (0, 0, 1, 0, 3):", "for k, v in ordered(d.items()): inv[tuple(v)].append(k) return inv def bool_map(bool1,", "= minterms + dontcares while new != old: old =", "StrictGreaterThan(*arg.args) if isinstance(arg, GreaterThan): return StrictLessThan(*arg.args) def as_set(self): \"\"\" Rewrite", "Expr from ..core.function import Application from ..core.numbers import Number from", "that generate '1' (the minterms) into the smallest Sum of", "(z & ~t) References ========== * https://en.wikipedia.org/wiki/Quine-McCluskey_algorithm \"\"\" variables =", "not None]]) return simplified_terms def _compare_term(minterm, term): \"\"\" Return True", "from ..core.relational import Relational from .inference import satisfiable other =", "Or, # of times it appeared as a Not(Symbol) in", "======== >>> to_int_repr([x | y, y], [x, y]) [{1, 2},", "other): \"\"\"Overloading for << operator.\"\"\" return Implies(other, self) __rrshift__ =", "\"\"\" zero = true identity = false @classmethod def _new_args_filter(cls,", "in dontcares: if d in minterms: raise ValueError(f'{d} in minterms", "a two-valued logic. When in doubt, use ``True``. \"``true ==", "\\ (form is None and len(truthtable) >= (2 ** (len(variables)", "+ b) True >>> is_literal(Or(a, b)) False \"\"\" if isinstance(expr,", "d in dontcares: if d in minterms: raise ValueError(f'{d} in", "0, 1 argset.discard(x) argset.add(True if x else False) rel =", "Diofant. Parameters ========== expr : string or boolean expression form", "= BooleanTrue() false: BooleanFalse = BooleanFalse() # We want S.true", "s for s in self.args] args.append(Or(*clause)) return And._to_nnf(*args, simplify=simplify) class", "expr = stack.pop() if expr.func in (And, Or): if simplified:", "remove = [] for i, (r, c, nc) in enumerate(rel):", "or \"POS\" form) that gives the desired outcome. If there", "if not expr.args[0].is_Atom: return False if not isinstance(expr, function1): return", "and ``Xor(a, b)`` will be different if ``a`` and ``b``", "And(Not(y), z), And(x, y)) >>> eq2 = Or(And(Not(c), a), And(Not(c),", "default). deep : boolean (default True) indicates whether to recursively", "= [sympify(v) for v in variables] if minterms == []:", ">>> ~true false \"\"\" is_Not = True @classmethod def eval(cls,", "Notes ===== The ``>>`` and ``<<`` operators are provided as", "return Equality(*arg.args) if isinstance(arg, StrictLessThan): return GreaterThan(*arg.args) if isinstance(arg, StrictGreaterThan):", "Or) def is_dnf(expr): \"\"\" Test whether or not an expression", "2): for neg in combinations(args, i): clause = [~s if", "function2 if isinstance(expr, function2): for lit in expr.args: if isinstance(lit,", "== 'dnf' or form is None: expr = sympify(expr) if", "mapping that equates variables between two simplified boolean expressions if", "if function1.is_Symbol: return {function1: function2} # get the fingerprint dictionaries", "return int(bool(self)) class BooleanTrue(BooleanAtom, metaclass=Singleton): \"\"\"Diofant version of True, a", "function2) (y & ~z, {y: a, z: b}) The results", ">>> to_dnf(b & (a | c)) (a & b) |", "Convert a propositional logical sentence s to disjunctive normal form.", "an expression that is equivalent to s, but has only", "its normal use in Python, which is bitwise and. Hence,", "to how you can generally use 1 instead of ``Integer(1)``.", "== Implies: a, b = args return And._to_nnf(a, ~b, simplify=simplify)", "not satisfiable(Not(Equivalent(self, other))) class BooleanAtom(Atom, Boolean): \"\"\"Base class of BooleanTrue", ">>> Implies(False, True) true >>> x >> y Implies(x, y)", "use in Python, which is bitwise or. Hence, ``Or(a, b)``", "And, Or) def is_dnf(expr): \"\"\" Test whether or not an", "< 2, x > -2).as_set() (-2, 2) \"\"\" from ..sets", "recognizing prime implicants. \"\"\" for i, x in enumerate(term): if", "~z) >>> simplify_logic(_) ~x & ~y \"\"\" if form ==", "if func == Or: return And._to_nnf(*[~arg for arg in args],", "= defaultdict(list) for k, v in ordered(d.items()): inv[tuple(v)].append(k) return inv", "as the first argument. Return a logical Or function (i.e.,", "def conjuncts(expr): \"\"\"Return a list of the conjuncts in the", "of them are False, and True if they are all", "b).equals(~b >> ~a) True >>> Not(And(a, b, c)).equals(And(Not(a), Not(b), Not(c)))", "When in doubt, use ``True``. \"``true == True is True``.\"", "with one less variable in the terms using QM method.", "True, False) false >>> x ^ y Xor(x, y) Notes", "making the class and instance names the same causes some", "things if ``a`` and ``b`` are integers. >>> And(x, y).subs({x:", "[0, 0, 1, 0], [0, 1, 0, 1]] >>> POSform([t,", "odd = False # is number of complimentary pairs odd?", "& b) | c) False \"\"\" return _is_form(expr, And, Or)", "# Special case of a single negation if isinstance(expr, Not):", "len(argset) <= 1: return true if True in argset: argset.discard(True)", "of the form ((A & ~B & ...) | (B", "| (t & ~y) | (z & ~y), {t: a,", "a logical Or function (i.e., the \"sum of products\" or", "above rule of thumb, the assumptions system uses a three-valued", "of an Atom if expr.is_Atom: return True # Special case", "not in essential: essential.append(temporary[0]) for x in terms: for y", "``Boolean``, like ``Or(x, y)`` or ``x > 1``, use ``true``.", "b, c: d, d: x}) \"\"\" def match(function1, function2): \"\"\"Return", "anything within a BooleanFunction that is not a BooleanFunction itself.", ">>> bool_map(eq, eq.subs({c: x})) (c & d & (a |", "y)) >>> eq2 = Or(And(Not(c), a), And(Not(c), d), And(b, c))", ">> y Implies(x, y) >>> y << x Implies(x, y)", "= [[0, 0, 0, 1], [0, 0, 1, 1], ...", "but note that its use here is different from its", "The rule of thumb is: \"If the boolean in question", "new new = _simplified_pairs(old) essential = _rem_redundancy(new, minterms) return Or(*[_convert_to_varsSOP(x,", "b = (~x & ~y & ~z) | (~x &", "!= -1: todo[i] = todo[j_i + i + 1] =", "' implemented for multivariate' ' expressions') class Or(LatticeOp, BooleanFunction): \"\"\"", "func, args = expr.func, expr.args if func == And: return", ">>> Not(True) false >>> Not(False) true >>> Not(And(True, False)) true", "form. Examples ======== >>> is_cnf(a | b | c) True", "only one bit. If yes, returns index, else returns -1.", "newargs.append(True if x else False) else: newargs.append(x) A, B =", "in self.args] args.append(Or(*clause)) return And._to_nnf(*args, simplify=simplify) class Nand(BooleanFunction): \"\"\" Logical", "== And: return Or._to_nnf(*[~arg for arg in args], simplify=simplify) if", "True >>> is_dnf(a & (b | c)) False \"\"\" return", "a.args) for ai in a.args: if ai.is_Symbol: d[ai][2] += 1", "are True, and False if they are all False. Examples", "arg in argset]) _args = frozenset(argset) obj = super().__new__(cls, _args)", "if not isinstance(expr, BooleanFunction): return expr variables = _find_predicates(expr) truthtable", "not is_literal(arg): arg = arg.to_nnf(simplify) if simplify: if isinstance(arg, cls):", "See Also ======== BooleanTrue \"\"\" def __bool__(self): return False def", "if _ is not None]]) return simplified_terms def _compare_term(minterm, term):", ">>> Implies(False, False) true >>> Implies(True, True) true >>> Implies(False,", "a logical And function (i.e., the \"product of sums\" or", "is, return an expression that is equivalent to s, but", "Logical OR function It evaluates its arguments in order, giving", "Hence, ``Or(a, b)`` and ``a | b`` will return different", "class when they evaluate to false. Notes ===== See note", "a in arg.args: argset.remove(a) if a in argset else argset.add(a)", "info[1](*list(map(_distribute, ((x, info[1], info[2]) for x in info[0].args)))) else: return", "= self.args[0] func, args = expr.func, expr.args if func ==", "False) true >>> Nor(x, y) ~(x | y) \"\"\" @classmethod", "def __bool__(self): return False def __hash__(self): return hash(False) def as_set(self):", "return _is_form(expr, And, Or) def is_dnf(expr): \"\"\" Test whether or", "failing_expression=False): \"\"\" Returns True if the given formulas have the", "logical expression is in Negation Normal Form (NNF) if it", "if r.is_Relational] odd = False # is number of complimentary", "= [(r, r.canonical, (~r).canonical) for r in argset if r.is_Relational]", "``Basic``. On the other hand, ``==`` is not a symbolic", "'1' (the minterms) into the smallest Sum of Products form.", "to convert the list of all input combinations that generate", "satisfied by the given term. Used for recognizing prime implicants.", "conjuncts in the expr s. Examples ======== >>> conjuncts(a &", "many) functions that satisfy the conditions. Examples ======== >>> minterms", "And(Not(c), d), And(b, c)) >>> bool_map(eq1, eq2) ((x & y)", "list, too. The result will be one of the (perhaps", "True if the statement is False. Returns False if the", "5-item fingerprint to each symbol in the equation: [ #", "equivalent to s, but has only &, |, and ~", "frozenset([Or(a, b)]) True \"\"\" return And.make_args(expr) def disjuncts(expr): \"\"\"Return a", "with respect to info[0].\"\"\" if isinstance(info[0], info[2]): for arg in", "8): [y]} So y and x have unique fingerprints, but", "return this class when they evaluate to false. Notes =====", "in l1: if _compare_term(x, y): temporary.append(y) if len(temporary) == 1:", "use ``true`` only on those contexts where the boolean is", "use ``True`` and ``False``. Aside from not satisfying the above", "[[0, 0, 0, 1], [0, 0, 1, 1], [0, 1,", "~b | (a & ~b)) & (b | ~a |", "Xor(True, True)) true >>> ITE(x, y, z) ITE(x, y, z)", "return self.args[0].as_set().complement(S.Reals) else: raise NotImplementedError('Sorry, Not.as_set has not yet been'", "| (~x & ~y & ~z) >>> simplify_logic(_) ~x &", "in Python, which is bitwise xor. In particular, ``a ^", "AND function. It evaluates its arguments in order, giving False", "to be equal they must have the same literals. Examples", "maxterms.append(t) old = None new = maxterms + dontcares while", "set().union(*(_find_predicates(i) for i in expr.args)) def simplify_logic(expr, form=None, deep=True): \"\"\"", "By \"simplified\" we mean that a function has been denested", "in combinations(args, i): clause = [~s if s in neg", "True``.\" While \"``true is True``\" is ``False``, \"``true == True``\"", "case of an Atom if expr.is_Atom: return True # Special", "when ``True`` should be used and when ``true`` should be", "of all input combinations that generate '1' (the minterms) into", "argset: if isinstance(r, Relational): rel.append((r, r.canonical, (~r).canonical)) remove = []", "(default True) indicates whether to recursively simplify any non-boolean functions", "no such mapping exists, return False. Examples ======== >>> function1", "| y Notes ===== The ``|`` operator is provided as", "how you can generally use 1 instead of ``Integer(1)``. The", "primary advantage of using false instead of False is that", "| ~a), True) a | b \"\"\" expr = sympify(expr)", "table. For two formulas to be equal they must have", "self.args[1:]): args.append(Or(~a, b)) args.append(Or(~self.args[-1], self.args[0])) return And._to_nnf(*args, simplify=simplify) class ITE(BooleanFunction):", "used as a symbolic representation of truth. For example, if", "immediately if any of them are False, and False if", "\"\"\" @classmethod def eval(cls, *args): try: a, b, c =", "truth table has been sufficiently simplified, use the prime implicant", "function2): \"\"\"Return the mapping that equates variables between two simplified", "be the same as ``1 >> 1``, i.e., 0, which", "isinstance(r, Relational): rel.append((r, r.canonical, (~r).canonical)) remove = [] for i,", "c) True >>> is_dnf((a & b) | c) True >>>", "compare boolean values to ``True`` or ``False`` using ``==``. *", "other hand, ``==`` is not a symbolic operation in Diofant,", "simplify = kwargs.get('simplify', True) argset = set() for arg in", "False) true >>> Equivalent(True, False, False) false >>> Equivalent(x, And(x,", "(i, j) in enumerate(zip(minterm1, minterm2)): if i != j: if", "eliminate_implications(expr) return distribute_or_over_and(expr) def is_nnf(expr, simplified=True): \"\"\" Checks if expr", "simplified set of minterms with one less variable in the", "result will be one of the (perhaps many) functions that", "isinstance(arg, Not): return -symbols[arg.args[0]] else: return symbols[arg] return [{append_symbol(arg, symbols)", "false @classmethod def _new_args_filter(cls, args): newargs = [] rel =", "if any of the arguments are False. Returns False if", "StrictLessThan): return GreaterThan(*arg.args) if isinstance(arg, StrictGreaterThan): return LessThan(*arg.args) if isinstance(arg,", "x in enumerate(f1[k]): matchdict[x] = f2[k][i] return matchdict if matchdict", "a)) (a | ~b | (a & ~b)) & (b", "arg.args[0] # Simplify Relational objects. if isinstance(arg, Equality): return Unequality(*arg.args)", "it appeared as a Not(symbol), # of times it appeared", "from itertools import combinations, product from ..core import Atom, cacheit", "Or or And object in Diofant. Parameters ========== expr :", "# Simplify Relational objects. if isinstance(arg, Equality): return Unequality(*arg.args) if", "(0, 0, 1, 2, 8): [y]} So y and x", "z) ITE(x, y, z) >>> ITE(True, x, y) x >>>", "essential arguments. \"\"\" essential = [] for x in terms:", "normal form. Examples ======== >>> is_dnf(a | b | c)", "| C | ...) & ...). If simplify is True,", "if A == true or A == false or B", "x ^ y Xor(x, y) Notes ===== The ``^`` operator", "======== >>> sympify(True) true >>> ~true false >>> ~True -2", "Diofant, since it always returns ``True`` or ``False``, and does", "in the logic module will return this class when they", "represent a two-valued logic. When in doubt, use ``True``. \"``true", "__invert__(self): \"\"\"Overloading for ~ operator.\"\"\" return Not(self) def __rshift__(self, other):", "and Equivalent into &, |, and ~. That is, return", "for ai in a.args: if ai.is_Symbol: d[ai][2] += 1 d[ai][-1]", "instead of ``Integer(1)``. The rule of thumb is: \"If the", "| y x | y Notes ===== The ``|`` operator", "..core import Atom, cacheit from ..core.expr import Expr from ..core.function", "y) \"\"\" @classmethod def eval(cls, *args): return Not(And(*args)) class Nor(BooleanFunction):", "\"\"\" Logical AND function. It evaluates its arguments in order,", "A is True and B is False. Returns True otherwise.", "None]]) return simplified_terms def _compare_term(minterm, term): \"\"\" Return True if", "Xor(True, False) true >>> Xor(True, True) false >>> Xor(True, False,", "if the statement is False. Returns False if the statement", "Or(True, False) true See Also ======== BooleanFalse \"\"\" def __bool__(self):", "raise NotImplementedError('Sorry, And.as_set has not yet been' ' implemented for", "\"\"\" Test whether or not an expression is in disjunctive", "return True def is_cnf(expr): \"\"\" Test whether or not an", "| c) False >>> is_nnf((a >> b) & (b >>", "boolean expression form : string ('cnf' or 'dnf') or None", "def eval(cls, *args): try: a, b, c = args except", "class directly from this # file). S.true = true S.false", "contains no redundant clauses. Examples ======== >>> to_nnf(Not((~a & ~b)", "& (b | ~a) & (c | ~b) \"\"\" return", "func == Or: return And._to_nnf(*[~arg for arg in args], simplify=simplify)", "``a`` and ``b`` are integers. In particular, since Python considers", "if None, the answer is returned according to the form", "assemble the match dictionary if possible matchdict = {} for", "Returns True if the given formulas have the same truth", "x.is_Relational: c = x.canonical if c in rel: continue nc", "and True if they are all True. Examples ======== >>>", "is also in dontcares') old = None new = minterms", "rest = info[2](*[a for a in info[0].args if a is", "any doubt over whether a function or expression will return", "info[0] def to_nnf(expr, simplify=True): \"\"\" Converts expr to Negation Normal", "the rest are False. Returns False if an even number", "\"\"\" Returns True if expr is a literal, else False.", "function1 = SOPform([x, z, y], [[1, 0, 1], [0, 0,", "on this class, whereas with True they act bitwise on", "in argset: argset.discard(True) return And(*argset) if False in argset: argset.discard(False)", "CNF format and puts them into an integer representation. Examples", "else argset.add(a) elif arg in argset: argset.remove(arg) else: argset.add(arg) rel", "for y in l1: if _compare_term(x, y): temporary.append(y) if len(temporary)", "~y \"\"\" if form == 'cnf' or form == 'dnf'", "a single expression of function2 if isinstance(expr, function2): for lit", "self.args[1:]]) # end class definitions. Some useful methods def conjuncts(expr):", "_rem_redundancy(new, minterms) return Or(*[_convert_to_varsSOP(x, variables) for x in essential]) def", "arguments are either symbols (x), negated symbols (Not(x)), or Or", "to_int_repr(clauses, symbols): \"\"\" Takes clauses in CNF format and puts", "for neg in combinations(args, i): clause = [~s if s", "true See Also ======== BooleanTrue \"\"\" def __bool__(self): return False", "a BooleanFunction itself. \"\"\" if not isinstance(expr, BooleanFunction): return {expr}", "a pair of minterms differs by only one bit. If", "import Expr from ..core.function import Application from ..core.numbers import Number", "``.args`` must be ``Basic``. On the other hand, ``==`` is", "| c)) (a & b) | (b & c) >>>", "== True:`` * Worse: ``if greeting is True:`` Examples ========", "whose arguments are either symbols (x), negated symbols (Not(x)), or", "return True stack = [expr] while stack: expr = stack.pop()", "is_literal(expr): \"\"\" Returns True if expr is a literal, else", "& ~y) | (~x & ~y & ~z) >>> simplify_logic(_)", "\"\"\" return Or.make_args(expr) def distribute_and_over_or(expr): \"\"\" Given a sentence s", "if arg else true if arg.is_Not: return arg.args[0] # Simplify", "to_cnf(expr, simplify=False): \"\"\" Convert a propositional logical sentence s to", "itertools import combinations, product from ..core import Atom, cacheit from", "(a | c)) (a & b) | (b & c)", "-2 >>> Or(True, False) true See Also ======== BooleanFalse \"\"\"", "operation in Diofant, since it always returns ``True`` or ``False``,", "BooleanFalse.\"\"\" is_Boolean = True @property def canonical(self): return self def", "if m: return a, m return m is not None", "def append_symbol(arg, symbols): if isinstance(arg, Not): return -symbols[arg.args[0]] else: return", "y)`` or ``x > 1``, use ``true``. Otherwise, use ``True``\".", "isinstance(arg, StrictGreaterThan): return LessThan(*arg.args) if isinstance(arg, LessThan): return StrictGreaterThan(*arg.args) if", "and ``False``. Aside from not satisfying the above rule of", "from ..core.operations import LatticeOp from ..core.singleton import S from ..core.singleton", "other, failing_expression=False): \"\"\" Returns True if the given formulas have", "a, z: b}) The results are not necessarily unique, but", "same logical behaviour for some correspondence between the variables of", "same as ``~1`` which is ``-2``, which has a boolean", "y and x have unique fingerprints, but a and b", "``|`` operator is provided as a convenience, but note that", "return symbols[arg] return [{append_symbol(arg, symbols) for arg in Or.make_args(c)} for", "you can generally use 1 instead of ``Integer(1)``. The rule", "true elif A.is_Relational and B.is_Relational: if A.canonical == B.canonical: return", "S.false = false converter[bool] = lambda x: true if x", "if x else false class BooleanFunction(Application, Boolean): \"\"\"Boolean function is", "equals(self, other, failing_expression=False): \"\"\" Returns True if the given formulas", "pairs, and return the essential arguments. \"\"\" essential = []", ">> a)) False \"\"\" expr = sympify(expr) if is_literal(expr): return", "Number) or arg in (True, False): if not arg: continue", "To avoid this issue, use the Diofant objects ``true`` and", "@cacheit def args(self): return tuple(ordered(self._argset)) def to_nnf(self, simplify=True): args =", "True if any of the arguments are False. Returns False", "\"\"\" Return the simplified version of bool1, and the mapping", "terms of real sets. Examples ======== >>> true.as_set() UniversalSet() \"\"\"", "bit. If yes, returns index, else returns -1. \"\"\" index", "| (c & d))) (a | b) & (~c |", "And, Or and Not, and Not is applied only to", "True), Xor(True, True)) true >>> ITE(x, y, z) ITE(x, y,", "b elif b == true and c == false: return", "for a in self.args[1:]]) # end class definitions. Some useful", "y) \"\"\" @classmethod def eval(cls, *args): return Not(Or(*args)) class Implies(BooleanFunction):", "And or Or, sum of the number of arguments with", "or Or (or an And) whose arguments are only symbols", "that can be accessed via ``false``. This is the Diofant", "&, |, and ~ as logical operators. Examples ======== >>>", "simplified boolean expressions. \"\"\" # do some quick checks if", "for i in expr.args)) def simplify_logic(expr, form=None, deep=True): \"\"\" This", "Returns False if all arguments are True. Examples ======== >>>", "is being used as a symbolic representation of truth. For", "b, b >> a)) (a | ~b | (a &", "of structural equality rather than mathematical, so it should return", "= arg break else: return info[0] rest = info[2](*[a for", "\"\"\" If then else clause. ITE(A, B, C) evaluates and", ">>> x | y x | y Notes ===== The", "todo if _ is not None]]) return simplified_terms def _compare_term(minterm,", "function from binary to it's variable form (for POS). \"\"\"", "of using true instead of True is that shorthand boolean", "x in args: if isinstance(x, Number) or x in (0,", "true >>> Xor(True, False, True, False) false >>> x ^", "def _is_form(expr, function1, function2): \"\"\"Test whether or not an expression", "of True is that shorthand boolean operations like ~ and", "and ``b`` are integers. In particular, since Python considers ``True``", "& (a | ~c) \"\"\" return _distribute((expr, And, Or)) def", "d[ai][2] += 1 d[ai][-1] += o else: d[ai.args[0]][3] += 1", "in todo if _ is not None]]) return simplified_terms def", "[list(i) for i in (dontcares or [])] for d in", "return {function1: function2} # get the fingerprint dictionaries f1 =", "from binary to it's variable form (for POS). \"\"\" temp", "enumerate(f1[k]): matchdict[x] = f2[k][i] return matchdict if matchdict else None", "whether or not an expression is in conjunctive normal form.", "info[1]): return info[1](*list(map(_distribute, ((x, info[1], info[2]) for x in info[0].args))))", "times it appeared as a Symbol in an And or", "literals. Examples ======== >>> (a >> b).equals(~b >> ~a) True", "simplify_logic(expr, 'dnf', True) # Don't convert unless we have to", "a, b in remove: argset.remove(a) argset.remove(b) if len(argset) == 0:", "from its normal use in Python, which is bitwise or.", "+= o else: d[ai.args[0]][3] += 1 d[ai.args[0]][-1] += o inv", "a Symbol in an And or Or, # of times", "no redundant clauses. Examples ======== >>> to_nnf(Not((~a & ~b) |", "otherwise. Examples ======== >>> Implies(True, False) false >>> Implies(False, False)", "BooleanFalse(BooleanAtom, metaclass=Singleton): \"\"\"Diofant version of False, a singleton that can", "conjunctive normal form. That is, of the form ((A |", "If more than one mappings of this sort exist, one", "and \\ not satisfiable(Not(Equivalent(self, other))) class BooleanAtom(Atom, Boolean): \"\"\"Base class", "in conj.args)))) elif isinstance(info[0], info[1]): return info[1](*list(map(_distribute, ((x, info[1], info[2])", "This is used as base class for And, Or, Not,", "in BooleanFunctions. A logical predicate is defined here as anything", "implicants. \"\"\" for i, x in enumerate(term): if x not", "it appeared as a Symbol, # of times it appeared", "argset.add(a) elif arg in argset: argset.remove(arg) else: argset.add(arg) rel =", "info[1], info[2]) for c in conj.args)))) elif isinstance(info[0], info[1]): return", "to convert the list of all input combos that generate", "is satisfied by the given term. Used for recognizing prime", "only And, Or and Not, and Not is applied only", "m == 0: temp.append(Not(variables[i])) elif m == 1: temp.append(variables[i]) return", "version in SOP or POS form. The return type is", "else argset.add(true) for a, b in remove: argset.remove(a) argset.remove(b) if", "else: continue remove.append((r, rj)) if odd: argset.remove(true) if true in", "that the output is NOT simplified. Examples ======== >>> distribute_or_over_and(And(Or(Not(a),", "Not): return -symbols[arg.args[0]] else: return symbols[arg] return [{append_symbol(arg, symbols) for", "pragma: no branch return POSform(variables, truthtable) else: raise ValueError('form can", "whether or not an expression is in disjunctive normal form.", "of using false instead of False is that shorthand boolean", "the equation: [ # of times it appeared as a", "return Xor(self, other) __rxor__ = __xor__ def equals(self, other, failing_expression=False):", "2): for neg in combinations(self.args, i): clause = [~s if", "Not): if not cls.args[0].is_Atom: return False elif not isinstance(cls, function2):", "example, if the object ends up in the ``.args`` of", "in terms of real sets. Examples ======== >>> Or(x >", "_distribute((expr, Or, And)) def _distribute(info): \"\"\"Distributes info[1] over info[2] with", "is_literal(a + b) True >>> is_literal(Or(a, b)) False \"\"\" if", "list of the conjuncts in the expr s. Examples ========", "of the (perhaps many) functions that satisfy the conditions. Examples", "info[0] rest = info[2](*[a for a in info[0].args if a", "1], [0, 1, 1, 1], ... [1, 0, 1, 1],", "args: if isinstance(x, Number) or x in (0, 1): newargs.append(True", "in rel): return [true] rel.append(c) newargs.append(x) return LatticeOp._new_args_filter(newargs, Or) def", "[] rel = [] for x in args: if isinstance(x,", "is None: expr = sympify(expr) if not isinstance(expr, BooleanFunction): return", "obj._args = tuple(ordered(argset)) obj._argset = frozenset(argset) return obj @property #", "if possible. By \"simplified\" we mean that a function has", "simplified=True): \"\"\" Checks if expr is in Negation Normal Form.", "\"\"\" Logical XOR (exclusive OR) function. Returns True if an", "Or(True, False) true See Also ======== BooleanTrue \"\"\" def __bool__(self):", "function It evaluates its arguments in order, giving True immediately", "| (B & C & ...) | ...). If simplify", "is any doubt over whether a function or expression will", "a, b = self.args return Or._to_nnf(~a, b, simplify=simplify) class Equivalent(BooleanFunction):", ">>> to_cnf(~(a | b) | c) (c | ~a) &", "answer is returned according to the form with fewest args", "(c & d))) (a | b) & (~c | ~d)", "expressions bool1 and bool2 represent the same logical behaviour for", "= [[0, 0, 0, 1], [0, 0, 1, 1], [0,", "redundant clauses. Examples ======== >>> is_nnf(a & b | ~c)", ">>> Not(And(a, Not(a))).equals(Or(b, Not(b))) False \"\"\" from ..core.relational import Relational", "of relationals') return self.atoms() == other.atoms() and \\ not satisfiable(Not(Equivalent(self,", "True >>> is_literal(Or(a, b)) False \"\"\" if isinstance(expr, Not): return", "converter, sympify from ..utilities import ordered class Boolean(Expr): \"\"\"A boolean", "in argset: argset.remove(arg) else: argset.add(arg) rel = [(r, r.canonical, (~r).canonical)", "as 2 ] >>> eq = Or(And(Not(y), a), And(Not(y), b),", "form (for POS). \"\"\" temp = [] for i, m", "_simplified_pairs(old) essential = _rem_redundancy(new, maxterms) return And(*[_convert_to_varsPOS(x, variables) for x", "by only one bit. If yes, returns index, else returns", "== []: return false minterms = [list(i) for i in", "And(x, y).subs({x: 1}) y \"\"\" zero = false identity =", "False. Returns False if any argument is True. Returns True", "so it should return ``True``. The assumptions system should use", "such mapping exists, return False. Examples ======== >>> function1 =", "neg in combinations(args, i): clause = [~s if s in", ">> false true >>> False >> False 0 >>> Or(True,", "``True``, as elements of ``.args`` must be ``Basic``. On the", "return expr if simplify: return simplify_logic(expr, 'cnf', True) # Don't", "if s in neg else s for s in args]", "else False) continue if x.is_Relational: c = x.canonical if c", "``true`` or ``True``, just use ``==`` instead of ``is`` to", "if matchdict else None a = simplify_logic(bool1) b = simplify_logic(bool2)", "(b | c)) False \"\"\" return _is_form(expr, Or, And) def", "return expr.to_nnf(simplify) def to_cnf(expr, simplify=False): \"\"\" Convert a propositional logical", "different from its normal use in Python, which is bitwise", "is_dnf(a & (b | c)) False \"\"\" return _is_form(expr, Or,", "0]]) >>> bool_map(function1, function2) (y & ~z, {y: a, z:", "& (y | ~t) References ========== * https://en.wikipedia.org/wiki/Quine-McCluskey_algorithm \"\"\" variables", "one less variable in the terms using QM method. \"\"\"", "1``, use ``true``. Otherwise, use ``True``\". In other words, use", "or ``False`` using ``==``. * Yes: ``if greeting:`` * No:", "args: return False stack.extend(expr.args) elif not is_literal(expr): return False return", "the form with fewest args (in CNF by default). deep", "Or (or an And) whose arguments are only symbols or", "\"\"\"Overloading for | operator.\"\"\" return Or(self, other) __ror__ = __or__", "Normal Form (NNF) if it contains only And, Or and", "with fewest args (in CNF by default). deep : boolean", ">>> ITE(x, y, z) ITE(x, y, z) >>> ITE(True, x,", "identity = false @classmethod def _new_args_filter(cls, args): newargs = []", "is True:`` Examples ======== >>> sympify(True) true >>> ~true false", "variables must be given as the first argument. Return a", "Not(symbol), # of times it appeared as a Symbol in", "__or__(self, other): \"\"\"Overloading for | operator.\"\"\" return Or(self, other) __ror__", "| c)) False >>> is_nnf((a | ~a) & (b |", "+= 1 d[ai][-1] += o else: d[ai.args[0]][3] += 1 d[ai.args[0]][-1]", "of the required form.\"\"\" expr = sympify(expr) # Special case", "0 >>> Or(True, False) true See Also ======== BooleanTrue \"\"\"", "Takes clauses in CNF format and puts them into an", "and. Hence, ``And(a, b)`` and ``a & b`` will return", "True @property def canonical(self): return self def __int__(self): return int(bool(self))", "Or: return And._to_nnf(*[~arg for arg in args], simplify=simplify) if func", "StrictLessThan, Unequality) if isinstance(arg, Number) or arg in (True, False):", "and ``false`` represent a two-valued logic. When in doubt, use", "for recognizing prime implicants. \"\"\" for i, x in enumerate(term):", "is evaluated to its simplest DNF form. Examples ======== >>>", "True`` will be the same as ``1 >> 1``, i.e.,", "(2 ** (len(variables) - 1))): return SOPform(variables, truthtable) elif form", "def distribute_and_over_or(expr): \"\"\" Given a sentence s consisting of conjunctions", "Examples ======== >>> Xor(True, False) true >>> Xor(True, True) false", "else it returns the result of C. Examples ======== >>>", "z], minterms, dontcares) (y & z) | (z & ~t)", "Not(x > 0, evaluate=False).as_set() (-oo, 0] \"\"\" if len(self.free_symbols) ==", "in (dontcares or [])] for d in dontcares: if d", "term): \"\"\" Return True if a binary term is satisfied", "else False) else: newargs.append(x) A, B = newargs except ValueError:", "arguments are False. Returns False if all arguments are True.", "True. Returns True if all arguments are False. Examples ========", "return expr variables = _find_predicates(expr) truthtable = [] for t", "= None new = minterms + dontcares while new !=", "for mutivariate' ' expressions') def to_nnf(self, simplify=True): if is_literal(self): return", "y:b} or {x: b, y:a}. If no such mapping exists,", "Or(x, y).subs({x: 0}) y \"\"\" zero = true identity =", "can be cnf or dnf only') def _finger(eq): \"\"\" Assign", "nc for r in rel): return [true] rel.append(c) newargs.append(x) return", "to false. Notes ===== See note in :py:class:`~diofant.logic.boolalg.BooleanTrue`. Examples ========", "= {fi: [0] * 5 for fi in f} for", "b)`` and ``a >> b`` will return different things if", "= False # is number of complimentary pairs odd? start", "break else: return info[0] rest = info[2](*[a for a in", "Examples ======== >>> conjuncts(a & b) == frozenset([a, b]) True", "if isinstance(arg, LessThan): return StrictGreaterThan(*arg.args) if isinstance(arg, GreaterThan): return StrictLessThan(*arg.args)", "variables = [simplify(v) for v in variables] if form ==", "in info[0].args if a is not conj]) return info[1](*list(map(_distribute, ((info[2](c,", "``a | b`` will return different things if ``a`` and", "simplify: if isinstance(arg, cls): arg = arg.args else: arg =", ">> True`` will be the same as ``1 >> 1``,", "== true or A == false or B == true", "c)).equals(And(Not(a), Not(b), Not(c))) False >>> Not(And(a, Not(a))).equals(Or(b, Not(b))) False \"\"\"", "False, for use in the logic module. The primary advantage", "False \"\"\" return _is_form(expr, And, Or) def is_dnf(expr): \"\"\" Test", "\"SOP\" form) that gives the desired outcome. If there are", "for arg in args], simplify=simplify) if func == Or: return", "``^`` operator is provided as a convenience, but note that", "when ``true`` should be used in various contexts throughout Diofant.", "in self.args]) else: raise NotImplementedError('Sorry, And.as_set has not yet been'", "newargs = [] rel = [] for x in args:", "symbols = dict(zip(symbols, range(1, len(symbols) + 1))) def append_symbol(arg, symbols):", "true S.false = false converter[bool] = lambda x: true if", "``a`` and ``b`` are integers. >>> Or(x, y).subs({x: 0}) y", "((x & y) | (t & ~y) | (z &", "i, m in enumerate(maxterm): if m == 1: temp.append(Not(variables[i])) elif", "iff A and B are both True or both False.", "expansion of a function from binary to it's variable form", "& (B | C | ...) & ...). If simplify", "other) __rxor__ = __xor__ def equals(self, other, failing_expression=False): \"\"\" Returns", "are integers. >>> Xor(x, y).subs({y: 0}) x \"\"\" def __new__(cls,", "stack: expr = stack.pop() if expr.func in (And, Or): if", "arguments are True and the rest are False. Examples ========", ">>> bool_map(function1, function2) (y & ~z, {y: a, z: b})", "True >>> is_dnf((a & b) | c) True >>> is_dnf(a", "isinstance(arg, Equality): return Unequality(*arg.args) if isinstance(arg, Unequality): return Equality(*arg.args) if", "new = _simplified_pairs(old) essential = _rem_redundancy(new, maxterms) return And(*[_convert_to_varsPOS(x, variables)", "a)) False \"\"\" expr = sympify(expr) if is_literal(expr): return True", "return True def _rem_redundancy(l1, terms): \"\"\" After the truth table", "simplest CNF form. Examples ======== >>> to_cnf(~(a | b) |", "that generate '1' (the minterms) into the smallest Product of", "\"\"\"Distributes info[1] over info[2] with respect to info[0].\"\"\" if isinstance(info[0],", "product([0, 1], repeat=len(variables)): t = list(t) if expr.xreplace(dict(zip(variables, t))): truthtable.append(t)", ">> False 0 >>> Or(True, False) true See Also ========", "enumerate(terms[(i + 1):]): index = _check_pair(ti, tj) if index !=", "a Symbol, # of times it appeared as a Not(symbol),", "in Diofant, since it always returns ``True`` or ``False``, and", "(~A).canonical == B.canonical: return B else: return Expr.__new__(cls, *args) def", "| b) | c) (c | ~a) & (c |", "@classmethod def eval(cls, *args): try: newargs = [] for x", "is_cnf(a & b & c) True >>> is_cnf((a & b)", "neg else s for s in args] result.append(Or(*clause)) return And._to_nnf(*result,", "x in reversed(list(args)): if isinstance(x, Number) or x in (0,", "expr : string or boolean expression form : string ('cnf'", "to work, rather than S.BooleanTrue and # S.BooleanFalse, but making", "...) | ...). If simplify is True, the expr is", "= arg.to_nnf(simplify) if simplify: if isinstance(arg, cls): arg = arg.args", "for x in args: if isinstance(x, Number) or x in", "is ``-2``, which has a boolean value of True. To", "the disjuncts in the sentence s. Examples ======== >>> disjuncts(a", "False # is number of complimentary pairs odd? start 0", "fewest args (in CNF by default). deep : boolean (default", ">>> ITE(x, y, y) y \"\"\" @classmethod def eval(cls, *args):", "return And(*temp) def _convert_to_varsPOS(maxterm, variables): \"\"\" Converts a term in", "neg else s for s in self.args] args.append(Or(*clause)) return And._to_nnf(*args,", "times it appeared as a Not(symbol), # of times it", "Returns False if any argument is True. Returns True if", "given term. Used for recognizing prime implicants. \"\"\" for i,", "it will automatically be converted to ``true`` when necessary, similar", "False) true >>> Xor(True, False, True, False) false >>> x", "and return the essential arguments. \"\"\" essential = [] for", "if not lit.is_Atom: return False return True # Special case", "is an object for which logic operations make sense.\"\"\" def", "& b & c) True >>> is_cnf((a & b) |", "False) true >>> Implies(True, True) true >>> Implies(False, True) true", "y).subs({x: 0}) y \"\"\" zero = true identity = false", "real sets. Examples ======== >>> And(x < 2, x >", "= [sympify(arg, strict=True) for arg in args] argset = set(args)", "== Xor: result = [] for i in range(1, len(args)+1,", "version of bool1, and the mapping of variables that makes", "sort exist, one of them is returned. For example, And(x,", "\"\"\"A boolean object is an object for which logic operations", "Union(*[arg.as_set() for arg in self.args]) else: raise NotImplementedError('Sorry, Or.as_set has", "= [simplify(v) for v in variables] if form == 'dnf'", "arbitrary symbolic ``Boolean``, like ``Or(x, y)`` or ``x > 1``,", "1], [1, 0, 0]]) >>> bool_map(function1, function2) (y & ~z,", "f2 or len(f1[k]) != len(f2[k]): return for i, x in", "x & y x & y Notes ===== The ``&``", "newargs = [] rel = [] for x in reversed(list(args)):", "module. The primary advantage of using false instead of False", "for c in clauses] def _check_pair(minterm1, minterm2): \"\"\" Checks if", "return True def __hash__(self): return hash(True) def as_set(self): \"\"\" Rewrite", "1 d[ai][-1] += o else: d[ai.args[0]][3] += 1 d[ai.args[0]][-1] +=", "match(function1, function2): \"\"\"Return the mapping that equates variables between two", "in (0, 1): newargs.append(True if x else False) continue if", "\"\"\" if form == 'cnf' or form == 'dnf' or", "argset else argset.add(true) for a, b in remove: argset.remove(a) argset.remove(b)", "enumerate(rel): for j in range(i + 1, len(rel)): rj, cj", "logic module. The primary advantage of using true instead of", "\"\"\" Converts expr to Negation Normal Form. A logical expression", "if not lit.is_Atom: return False return True def eliminate_implications(expr): \"\"\"", "work as expected on this class, whereas with True they", "generate '1' (the minterms) into the smallest Product of Sums", "table has been sufficiently simplified, use the prime implicant table", "and a redundant-group eliminating algorithm to convert the list of", "And) whose arguments are only symbols or negated symbols. For", ">>> distribute_and_over_or(Or(a, And(Not(b), Not(c)))) (a | ~b) & (a |", "minterms, dontcares=None): \"\"\" The POSform function uses simplified_pairs and a", "to !A v B Accepts two Boolean arguments; A and", "Number from ..core.operations import LatticeOp from ..core.singleton import S from", "this class, whereas with False they act bitwise on 0.", "in DNF. Note that the output is NOT simplified. Examples", "1, 0, 3): [a, b], (0, 0, 1, 2, 8):", "1):]): index = _check_pair(ti, tj) if index != -1: todo[i]", "A and B. Returns False if A is True and", "to_nnf(self, simplify=True): if is_literal(self): return self expr = self.args[0] func,", "c, And(c, d)) >>> bool_map(eq, eq.subs({c: x})) (c & d", "*args): return Not(Or(*args)) class Implies(BooleanFunction): \"\"\" Logical implication. A implies", "if _compare_term(x, z): assert z not in essential essential.append(z) break", "lit in expr.args: if isinstance(lit, Not): if not lit.args[0].is_Atom: return", "the list of all input combos that generate '1' (the", "NotImplementedError('Sorry, And.as_set has not yet been' ' implemented for multivariate'", "cj == c: remove.append((r, rj)) break for a, b in", "recognize and eliminate redundant pairs, and return the essential arguments.", "terms: for y in essential: if _compare_term(x, y): break else:", "EmptySet return EmptySet() true = BooleanTrue() false: BooleanFalse = BooleanFalse()", "true >>> ~true false >>> ~True -2 >>> Or(True, False)", "argset.discard(False) return And(*[~arg for arg in argset]) _args = frozenset(argset)", "= [] todo = list(range(len(terms))) for i, ti in enumerate(terms[:-1]):", "isinstance(expr, BooleanFunction): return expr variables = _find_predicates(expr) truthtable = []", "1], [0, 0, 1, 1], [0, 1, 1, 1], ...", "for r in argset if r.is_Relational] odd = False #", "'dnf', the simplest expression in the corresponding normal form is", "using true instead of True is that shorthand boolean operations", "is_cnf(expr): return expr expr = eliminate_implications(expr) return distribute_and_over_or(expr) def to_dnf(expr,", "simplify is True, the expr is evaluated to its simplest", "sense.\"\"\" def __and__(self, other): \"\"\"Overloading for & operator.\"\"\" return And(self,", "Examples ======== >>> Not(x > 0, evaluate=False).as_set() (-oo, 0] \"\"\"", "And._to_nnf(Or(*args), Or(*[~arg for arg in args]), simplify=simplify) if func ==", "...) | (B & C & ...) | ...). If", "inv def bool_map(bool1, bool2): \"\"\" Return the simplified version of", "in argset else argset.add(a) elif arg in argset: argset.remove(arg) else:", "isinstance(expr, BooleanFunction): return {expr} return set().union(*(_find_predicates(i) for i in expr.args))", "args (in CNF by default). deep : boolean (default True)", "in (0, 1): newargs.append(True if x else False) else: newargs.append(x)", "Equivalence relation. Equivalent(A, B) is True iff A and B", ">>> b = (~x & ~y & ~z) | (~x", "simplified is True, checks if result contains no redundant clauses.", "Not(Or(*args)) class Implies(BooleanFunction): \"\"\" Logical implication. A implies B is", "~b) >>> to_cnf((a | b) & (a | ~a), True)", "info[1]): conj = arg break else: return info[0] rest =", "be ``true`` instead of ``True``, as elements of ``.args`` must", "``int``, ``~True`` is the same as ``~1`` which is ``-2``,", "True) true >>> x >> y Implies(x, y) >>> y", "[ # of times it appeared as a Symbol, #", "but has only &, |, and ~ as logical operators.", "to disjunctive normal form. That is, of the form ((A", "are all False. Returns False if any argument is True.", "true >>> ITE(x, y, z) ITE(x, y, z) >>> ITE(True,", "in args: return False stack.extend(expr.args) elif not is_literal(expr): return False", "raise NotImplementedError('Sorry, Or.as_set has not yet been' ' implemented for", "return Union(*[arg.as_set() for arg in self.args]) else: raise NotImplementedError('Sorry, Or.as_set", "them is returned. For example, And(x, y) is logically equivalent", "j in range(i + 1, len(rel)): rj, cj = rel[j][:2]", "of real sets. Examples ======== >>> false.as_set() EmptySet() \"\"\" from", "``Integer(1)``. The rule of thumb is: \"If the boolean in", "integers. >>> And(x, y).subs({x: 1}) y \"\"\" zero = false", "be given as the first argument. Return a logical And", "Or(x, False))) ~x >>> ~x ~x >>> Not(And(Or(x, y), Or(~x,", "simplify any non-boolean functions contained within the input. Examples ========", "b) | c) (c | ~a) & (c | ~b)", "normal use in Python, which is bitwise or. Hence, ``Or(a,", "_args return obj @property # type: ignore[misc] @cacheit def args(self):", "| c) True >>> is_dnf(a & (b | c)) False", "NotImplementedError('Sorry, Not.as_set has not yet been' ' implemented for mutivariate'", "b) == frozenset([And(a, b)]) True \"\"\" return Or.make_args(expr) def distribute_and_over_or(expr):", "+= 1 d[ai.args[0]][-1] += o inv = defaultdict(list) for k,", "b, y: c, z: d}) >>> eq = And(Xor(a, b),", "b == true and c == false: return a elif", "None, the answer is returned according to the form with", "in argset: argset.discard(False) return And(*[~arg for arg in argset]) _args", "& ~y & z) >>> simplify_logic(b) ~x & ~y >>>", "\"\"\"Overloading for >> operator.\"\"\" return Implies(self, other) def __lshift__(self, other):", "liable to be some confusion as to when ``True`` should", "After the truth table has been sufficiently simplified, use the", ">>> is_dnf((a & b) | c) True >>> is_dnf(a &", "\"\"\" from ..sets import Union if len(self.free_symbols) == 1: return", "Don't convert unless we have to if is_dnf(expr): return expr", "argset.remove(b) if len(argset) == 0: return false elif len(argset) ==", "args]), simplify=simplify) if func == Xor: result = [] for", "operator is provided as a convenience, but note that its", "eliminating algorithm to convert the list of all input combos", "return expr expr = eliminate_implications(expr) return distribute_and_over_or(expr) def to_dnf(expr, simplify=False):", "operator.\"\"\" return Or(self, other) __ror__ = __or__ def __invert__(self): \"\"\"Overloading", "return matchdict if matchdict else None a = simplify_logic(bool1) b", "object is an object for which logic operations make sense.\"\"\"", "== 0: temp.append(Not(variables[i])) elif m == 1: temp.append(variables[i]) return And(*temp)", "= args except ValueError: raise ValueError('ITE expects exactly 3 arguments')", "(or an Or) whose arguments are either symbols (x), negated", "variables) for x in essential]) def POSform(variables, minterms, dontcares=None): \"\"\"", "0. Functions in the logic module will return this class", "doubt, use ``True``. \"``true == True is True``.\" While \"``true", "[a, b], (0, 0, 1, 2, 8): [y]} So y", "not arg: continue else: arg = true if isinstance(arg, Xor):", "Or, sum of the number of arguments with which it", "Notes ===== The ``~`` operator is provided as a convenience,", "of real sets. Examples ======== >>> Not(x > 0, evaluate=False).as_set()", "if result contains no redundant clauses. Examples ======== >>> is_nnf(a", "simplify=simplify) if func == Equivalent: return And._to_nnf(Or(*args), Or(*[~arg for arg", "in enumerate(zip(minterm1, minterm2)): if i != j: if index ==", "to each symbol in the equation: [ # of times", "of ``if x is True``. To quote PEP 8: Don't", "in arg.args: argset.remove(a) if a in argset else argset.add(a) elif", "note in :py:class:`~diofant.logic.boolalg.BooleanTrue`. Examples ======== >>> sympify(False) false >>> false", "is_dnf(a & b & c) True >>> is_dnf((a & b)", "as anything within a BooleanFunction that is not a BooleanFunction", "===== The ``|`` operator is provided as a convenience, but", "SOPform(variables, truthtable) elif form == 'cnf' or form is None:", "and does so in terms of structural equality rather than", "argset.discard(x) argset.add(True if x else False) rel = [] for", "method to recognize and eliminate redundant pairs, and return the", "function. It evaluates its arguments in order, giving True immediately", "inv = defaultdict(list) for k, v in ordered(d.items()): inv[tuple(v)].append(k) return", "in enumerate(minterm): if m == 0: temp.append(Not(variables[i])) elif m ==", "(2, oo] \"\"\" from ..sets import Union if len(self.free_symbols) ==", "True) indicates whether to recursively simplify any non-boolean functions contained", "Equivalent(False, False, False) true >>> Equivalent(True, False, False) false >>>", "b, c], [[1, 0, 1], [1, 0, 0]]) >>> bool_map(function1,", "dontcares) z & (y | ~t) References ========== * https://en.wikipedia.org/wiki/Quine-McCluskey_algorithm", "in terms of real sets. Examples ======== >>> false.as_set() EmptySet()", "self.args] args.append(Or(*clause)) return And._to_nnf(*args, simplify=simplify) class Nand(BooleanFunction): \"\"\" Logical NAND", "given formulas have the same truth table. For two formulas", "clauses] def _check_pair(minterm1, minterm2): \"\"\" Checks if a pair of", "True # Special case of a single expression of function2", "True >>> is_nnf(Not(a & b) | c) False >>> is_nnf((a", "functions contained within the input. Examples ======== >>> b =", "Xor(True, False, True, True, False) true >>> Xor(True, False, True,", "expr.args: if isinstance(lit, Not): if not lit.args[0].is_Atom: return False else:", "True, a singleton that can be accessed via ``true``. This", "== false and c == true: return Not(a) def to_nnf(self,", "return index def _convert_to_varsSOP(minterm, variables): \"\"\" Converts a term in", "possible matchdict = {} for k in f1: if k", "c) False >>> is_nnf((a >> b) & (b >> a))", "Returns False otherwise. Examples ======== >>> Equivalent(False, False, False) true", "rel = [(r, r.canonical, (~r).canonical) for r in argset if", "simplify=False): \"\"\" Convert a propositional logical sentence s to conjunctive", "Or(x > 2, x < -2).as_set() [-oo, -2) U (2,", "<= 1: return true if True in argset: argset.discard(True) return", "expression is of the required form.\"\"\" expr = sympify(expr) #", "@classmethod def _new_args_filter(cls, args): newargs = [] rel = []", "oo] \"\"\" from ..sets import Union if len(self.free_symbols) == 1:", "have the same truth table. For two formulas to be", "True) false >>> Nor(False, False) true >>> Nor(x, y) ~(x", "an equivalent sentence in CNF. Examples ======== >>> distribute_and_over_or(Or(a, And(Not(b),", "~a) >>> eliminate_implications(Equivalent(a, b, c)) (a | ~c) & (b", "False for cls in expr.args: if cls.is_Atom: continue if isinstance(cls,", "0, 1, 0], [0, 1, 0, 1]] >>> POSform([t, x,", "variables that makes the two expressions bool1 and bool2 represent", "\"\"\" def __new__(cls, *args, **options): from ..core.relational import Relational args", "None newterm = ti[:] newterm[index] = 3 if newterm not", "class when they evaluate to true. Notes ===== There is", "to info[0].\"\"\" if isinstance(info[0], info[2]): for arg in info[0].args: if", "& c) True >>> is_dnf((a & b) | c) True", "Boolean arguments; A and B. Returns False if A is", "len(argset) == 1: return argset.pop() elif True in argset: argset.remove(True)", "if isinstance(arg, Not): return -symbols[arg.args[0]] else: return symbols[arg] return [{append_symbol(arg,", ">>> to_nnf(Not((~a & ~b) | (c & d))) (a |", "of all input combos that generate '1' (the minterms) into", "False if the statement is True. Examples ======== >>> Not(True)", "``False`` to be integers, ``True >> True`` will be the", "use in Python, which is bit shifts. Hence, ``Implies(a, b)``", "Form. A logical expression is in Negation Normal Form (NNF)", "info[0].args)))) else: return info[0] def to_nnf(expr, simplify=True): \"\"\" Converts expr", "Diofant objects ``true`` and ``false``. >>> True >> False 1", "maxterms + dontcares while new != old: old = new", "of a single negation if isinstance(expr, Not): if not expr.args[0].is_Atom:", "if Not(a) in argset: return cls.zero argset.add(a) else: argset.add(arg) return", "in the sentence s. Examples ======== >>> disjuncts(a | b)", "whereas with False they act bitwise on 0. Functions in", "than one mappings of this sort exist, one of them", "should use ``True`` and ``False``. Aside from not satisfying the", "can just use ``True`` and it will automatically be converted", "be ignored, pass them as a list, too. The result", "same truth table. For two formulas to be equal they", "for s in self.args] args.append(Or(*clause)) return And._to_nnf(*args, simplify=simplify) class Nand(BooleanFunction):", "\"\"\" def __bool__(self): return False def __hash__(self): return hash(False) def", "Not): if not lit.args[0].is_Atom: return False else: if not lit.is_Atom:", "return Unequality(*arg.args) if isinstance(arg, Unequality): return Equality(*arg.args) if isinstance(arg, StrictLessThan):", "m == 1: temp.append(variables[i]) return And(*temp) def _convert_to_varsPOS(maxterm, variables): \"\"\"", "And(Xor(a, b), c, And(c, d)) >>> bool_map(eq, eq.subs({c: x})) (c", "argset.remove(True) return Not(Xor(*argset)) else: obj._args = tuple(ordered(argset)) obj._argset = frozenset(argset)", "True, the expr is evaluated to its simplest DNF form.", "variables] if form == 'dnf' or \\ (form is None", "BooleanAtom(Atom, Boolean): \"\"\"Base class of BooleanTrue and BooleanFalse.\"\"\" is_Boolean =", "~y & ~z) >>> simplify_logic(_) ~x & ~y \"\"\" if", "BooleanFunction): return expr if simplify: return simplify_logic(expr, 'dnf', True) #", "And._to_nnf(Or(~a, b), Or(a, c), simplify=simplify) def _eval_derivative(self, x): return self.func(self.args[0],", "_check_pair(minterm1, minterm2): \"\"\" Checks if a pair of minterms differs", "for << operator.\"\"\" return Implies(other, self) __rrshift__ = __lshift__ __rlshift__", "operator.\"\"\" return And(self, other) __rand__ = __and__ def __or__(self, other):", "work in either case. Finally, for boolean flags, it's better", "integers. >>> Or(x, y).subs({x: 0}) y \"\"\" zero = true", "| y) \"\"\" @classmethod def eval(cls, *args): return Not(Or(*args)) class", "[[0, 0, 0, 0], [0, 0, 1, 0], [0, 1,", "dontcares = [list(i) for i in (dontcares or [])] for", "len(rel)): rj, cj = rel[j][:2] if cj == nc: odd", "y \"\"\" zero = false identity = true nargs =", "as a Not(symbol), # of times it appeared as a", "symbols): if isinstance(arg, Not): return -symbols[arg.args[0]] else: return symbols[arg] return", "y) & (~x | ~y)) Notes ===== The ``~`` operator", "Nor(True, True) false >>> Nor(False, True) false >>> Nor(False, False)", "the given term. Used for recognizing prime implicants. \"\"\" for", "x), Or(x, False))) ~x >>> ~x ~x >>> Not(And(Or(x, y),", "``is`` to do the comparison, and it will work in", "or 'dnf') or None (default). If 'cnf' or 'dnf', the", "string or boolean expression form : string ('cnf' or 'dnf')", "========== expr : string or boolean expression form : string", "confusion as to when ``True`` should be used and when", "len(truthtable) >= (2 ** (len(variables) - 1))): return SOPform(variables, truthtable)", "True) false >>> ITE(Or(True, False), And(True, True), Xor(True, True)) true", "= _finger(function1) f2 = _finger(function2) # more quick checks if", "other = sympify(other) if self.has(Relational) or other.has(Relational): raise NotImplementedError('handling of", "isinstance(expr, BooleanFunction) def to_int_repr(clauses, symbols): \"\"\" Takes clauses in CNF", "``(a, d)`` or ``(d, a)``: >>> eq1 = Or(And(Not(y), t),", "import Relational args = [sympify(arg, strict=True) for arg in args]", "i in range(1, len(args)+1, 2): for neg in combinations(args, i):", "info[1], info[2]) for x in info[0].args)))) else: return info[0] def", ">>> eq1 = Or(And(Not(y), t), And(Not(y), z), And(x, y)) >>>", "eliminate_implications(Equivalent(a, b, c)) (a | ~c) & (b | ~a)", "if A.canonical == B.canonical: return true elif (~A).canonical == B.canonical:", "``==`` instead of ``is`` to do the comparison, and it", "``And(a, b)`` and ``a & b`` will return different things", "odd? start 0 -> False remove = [] for i,", "& operator.\"\"\" return And(self, other) __rand__ = __and__ def __or__(self,", "except ValueError: raise ValueError('ITE expects exactly 3 arguments') if a", "simplify=simplify) if func == ITE: a, b, c = args", "| b) & (~c | ~d) >>> to_nnf(Equivalent(a >> b,", "Examples ======== >>> to_cnf(~(a | b) | c) (c |", "Return a logical And function (i.e., the \"product of sums\"", "the statement is True. Examples ======== >>> Not(True) false >>>", "in minterms is also in dontcares') maxterms = [] for", "else: obj._args = tuple(ordered(argset)) obj._argset = frozenset(argset) return obj @property", "info[2]) for c in conj.args)))) elif isinstance(info[0], info[1]): return info[1](*list(map(_distribute,", "x >>> ITE(False, x, y) y >>> ITE(x, y, y)", "if _compare_term(x, y): break else: for z in l1: #", "rj)) if odd: argset.remove(true) if true in argset else argset.add(true)", "Some useful methods def conjuncts(expr): \"\"\"Return a list of the", "or both False. Returns True if all of the arguments", "Logical NAND function. It evaluates its arguments in order, giving", "| c), False) True >>> is_nnf(Not(a & b) | c)", "thumb is: \"If the boolean in question can be replaced", "\"\"\" is_Not = True @classmethod def eval(cls, arg): from ..core", "And.make_args(expr) def disjuncts(expr): \"\"\"Return a list of the disjuncts in", "~x >>> Not(And(Or(x, y), Or(~x, ~y))) ~((x | y) &", "Relational objects. if isinstance(arg, Equality): return Unequality(*arg.args) if isinstance(arg, Unequality):", "& b) | c) True >>> is_dnf(a & (b |", "True) true >>> Implies(False, True) true >>> x >> y", "in self.args[1:]]) # end class definitions. Some useful methods def", "sentence s to disjunctive normal form. That is, of the", "Not(c))) False >>> Not(And(a, Not(a))).equals(Or(b, Not(b))) False \"\"\" from ..core.relational", "mutivariate' ' expressions') def to_nnf(self, simplify=True): if is_literal(self): return self", "False >>> Not(And(a, Not(a))).equals(Or(b, Not(b))) False \"\"\" from ..core.relational import", ">>> Not(x > 0, evaluate=False).as_set() (-oo, 0] \"\"\" if len(self.free_symbols)", "for a, b in zip(self.args, self.args[1:]): args.append(Or(~a, b)) args.append(Or(~self.args[-1], self.args[0]))", "simplify): return expr return expr.to_nnf(simplify) def to_cnf(expr, simplify=False): \"\"\" Convert", "the logic module. The primary advantage of using false instead", "1, 1]] >>> dontcares = [[0, 0, 0, 0], [0,", "| ~a) & (b | c)) False >>> is_nnf((a |", "appeared, counting Symbol as 1 and Not(Symbol) as 2 ]", "b in remove: argset.remove(a) argset.remove(b) argset.add(True) if len(argset) <= 1:", "symbols. For example, And(x, Not(y), Or(w, Not(z))). Basic.match is not", "Or(~x, ~y))) ~((x | y) & (~x | ~y)) Notes", "simplified_pairs and a redundant group- eliminating algorithm to convert the", "Returns True if an odd number of the arguments are", "sets. Examples ======== >>> Or(x > 2, x < -2).as_set()", "If simplify is True, the result contains no redundant clauses.", "x in enumerate(term): if x not in (3, minterm[i]): return", "True, and False if they are all False. Examples ========", "range(1, len(args)+1, 2): for neg in combinations(args, i): clause =", "Or(Not(A), B) elif A == B: return true elif A.is_Relational", "| (b & c) | (~b & c), True) a", "replaced by an arbitrary symbolic ``Boolean``, like ``Or(x, y)`` or", "0, 0, 1], [0, 0, 1, 1], ... [0, 1,", "func == Equivalent: return And._to_nnf(Or(*args), Or(*[~arg for arg in args]),", "negated symbols. For example, And(x, Not(y), Or(w, Not(z))). Basic.match is", "def eliminate_implications(expr): \"\"\" Change >>, <<, and Equivalent into &,", "# S.BooleanFalse, but making the class and instance names the", "| c) (c | ~a) & (c | ~b) >>>", "will work as expected on this class, whereas with True", "False stack.extend(expr.args) elif not is_literal(expr): return False return True def", "x & y Notes ===== The ``&`` operator is provided", "there are inputs that can be ignored, pass them as", "b]) True >>> conjuncts(a | b) == frozenset([Or(a, b)]) True", "& c) | (~b & c), True) a | c", "if A is true else it returns the result of", "truthtable) elif form == 'cnf' or form is None: #", "\"\"\" Checks if a pair of minterms differs by only", "return And._to_nnf(a, ~b, simplify=simplify) if func == Equivalent: return And._to_nnf(Or(*args),", "they evaluate to false. Notes ===== See note in :py:class:`~diofant.logic.boolalg.BooleanTrue`.", "is_nnf(Not(a & b) | c) False >>> is_nnf((a >> b)", "the output is NOT simplified. Examples ======== >>> distribute_or_over_and(And(Or(Not(a), b),", "expr to Negation Normal Form. A logical expression is in", "expression of function2 if isinstance(expr, function2): for lit in expr.args:", "x})) (c & d & (a | b) & (~a", "truth table. For two formulas to be equal they must", "None: # pragma: no branch return POSform(variables, truthtable) else: raise", "== True is True``.\" While \"``true is True``\" is ``False``,", "== 'dnf' or \\ (form is None and len(truthtable) >=", "\"\"\" from ..sets import Intersection if len(self.free_symbols) == 1: return", "y \"\"\" @classmethod def eval(cls, *args): try: a, b, c", "which is bitwise and. Hence, ``And(a, b)`` and ``a &", "the given formulas have the same truth table. For two", "if all of the arguments are logically equivalent. Returns False", "if isinstance(cls, Not): if not cls.args[0].is_Atom: return False elif not", "list(range(len(terms))) for i, ti in enumerate(terms[:-1]): for j_i, tj in", "True or both False. Returns True if all of the", "| ~b) >>> to_cnf((a | b) & (a | ~a),", "remove: argset.remove(a) argset.remove(b) argset.add(True) if len(argset) <= 1: return true", "work, rather than S.BooleanTrue and # S.BooleanFalse, but making the", "cj == nc: odd = ~odd break elif cj ==", ">>> is_nnf((a | ~a) & (b | c)) False >>>", "negation if isinstance(expr, Not): if not expr.args[0].is_Atom: return False if", "a in self.args[1:]]) # end class definitions. Some useful methods", "elif m == 0: temp.append(variables[i]) return Or(*temp) def _simplified_pairs(terms): \"\"\"", "return set().union(*(_find_predicates(i) for i in expr.args)) def simplify_logic(expr, form=None, deep=True):", "while new != old: old = new new = _simplified_pairs(old)", "False \"\"\" if isinstance(expr, Not): return not isinstance(expr.args[0], BooleanFunction) else:", "as 1 and Not(Symbol) as 2 ] >>> eq =", "``if greeting:`` * No: ``if greeting == True:`` * Worse:", ">>> Xor(x, y).subs({y: 0}) x \"\"\" def __new__(cls, *args, **kwargs):", "as expected on this class, whereas with False they act", "and returns the result of B if A is true", "in terms: for y in essential: if _compare_term(x, y): break", "def eval(cls, *args): return Not(Or(*args)) class Implies(BooleanFunction): \"\"\" Logical implication.", "False >> False 0 >>> Or(True, False) true See Also", "by the given term. Used for recognizing prime implicants. \"\"\"", "minterm2)): if i != j: if index == -1: index", "~c) True >>> is_nnf((a | ~a) & (b | c))", "an expression is in disjunctive normal form. Examples ======== >>>", "\"\"\"Return the mapping that equates variables between two simplified boolean", "the same literals. Examples ======== >>> (a >> b).equals(~b >>", ">>> Xor(True, False, True, True, False) true >>> Xor(True, False,", "Otherwise, use ``True``\". In other words, use ``true`` only on", "the same as ``1 >> 1``, i.e., 0, which has", "y Xor(x, y) Notes ===== The ``^`` operator is provided", "if simplify: return simplify_logic(expr, 'dnf', True) # Don't convert unless", "= BooleanFalse() # We want S.true and S.false to work,", "is True``. To quote PEP 8: Don't compare boolean values", "minterms) return Or(*[_convert_to_varsSOP(x, variables) for x in essential]) def POSform(variables,", "must be ``Basic``. On the other hand, ``==`` is not", "a redundant-group eliminating algorithm to convert the list of all", "BooleanFunction): \"\"\" Logical OR function It evaluates its arguments in", "And(x, True)) true \"\"\" def __new__(cls, *args, **options): from ..core.relational", "0: temp.append(variables[i]) return Or(*temp) def _simplified_pairs(terms): \"\"\" Reduces a set", "def to_nnf(self, simplify=True): if is_literal(self): return self expr = self.args[0]", "rest), info[1], info[2]) for c in conj.args)))) elif isinstance(info[0], info[1]):", "are False, and True if they are all True. Examples", "simplified_terms: simplified_terms.append(newterm) simplified_terms.extend( [terms[i] for i in [_ for _", "CNF. Examples ======== >>> distribute_and_over_or(Or(a, And(Not(b), Not(c)))) (a | ~b)", "for k in f1: if k not in f2 or", "1, 0, 1]] >>> POSform([t, x, y, z], minterms, dontcares)", "false identity = true nargs = None @classmethod def _new_args_filter(cls,", "on those contexts where the boolean is being used as", "if isinstance(arg, StrictGreaterThan): return LessThan(*arg.args) if isinstance(arg, LessThan): return StrictGreaterThan(*arg.args)", "Notes ===== There is liable to be some confusion as", "| ~t) References ========== * https://en.wikipedia.org/wiki/Quine-McCluskey_algorithm \"\"\" variables = [sympify(v)", "POSform(variables, truthtable) else: raise ValueError('form can be cnf or dnf", "that makes the two expressions bool1 and bool2 represent the", "symbols[arg] return [{append_symbol(arg, symbols) for arg in Or.make_args(c)} for c", "propositional logical sentence s to conjunctive normal form. That is,", "in Diofant. Parameters ========== expr : string or boolean expression", "0: return false elif len(argset) == 1: return argset.pop() elif", "f1 = _finger(function1) f2 = _finger(function2) # more quick checks", "Checks if a pair of minterms differs by only one", "x in terms: temporary = [] for y in l1:", "and Not is applied only to literals. If simplified is", "Logical NOR function. It evaluates its arguments in order, giving", "| (b & c) >>> to_dnf((a & b) | (a", "'cnf', True) # Don't convert unless we have to if", "in an And or Or, # of times it appeared", "lit.is_Atom: return False return True # Special case of a", "& (b | c)) False >>> is_nnf((a | ~a) &", "b) | c) False >>> is_nnf((a >> b) & (b", "list(t) if expr.xreplace(dict(zip(variables, t))): truthtable.append(t) if deep: from ..simplify import", "[1, 1, 1, 1]] >>> dontcares = [[0, 0, 0,", "of arguments with which it appeared, counting Symbol as 1", "has been denested and is either an And (or an", "if isinstance(expr, function2): for lit in expr.args: if isinstance(lit, Not):", "a and b do not. \"\"\" f = eq.free_symbols d", "boolean object is an object for which logic operations make", "for d in dontcares: if d in minterms: raise ValueError(f'{d}", "C) evaluates and returns the result of B if A", "\"\"\"Overloading for ~ operator.\"\"\" return Not(self) def __rshift__(self, other): \"\"\"Overloading", "== other.atoms() and \\ not satisfiable(Not(Equivalent(self, other))) class BooleanAtom(Atom, Boolean):", "t), And(Not(y), z), And(x, y)) >>> eq2 = Or(And(Not(c), a),", "y Notes ===== The ``|`` operator is provided as a", "which logic operations make sense.\"\"\" def __and__(self, other): \"\"\"Overloading for", "odd = ~odd break elif cj == c: break else:", "y Notes ===== The ``&`` operator is provided as a", "in the expr s. Examples ======== >>> conjuncts(a & b)", "simplified: args = expr.args for arg in args: if Not(arg)", "return [true] rel.append(c) newargs.append(x) return LatticeOp._new_args_filter(newargs, Or) def as_set(self): \"\"\"", "the answer is returned according to the form with fewest", "func == ITE: a, b, c = args return And._to_nnf(Or(a,", ">>> disjuncts(a | b) == frozenset([a, b]) True >>> disjuncts(a", "simplify=False): \"\"\" Convert a propositional logical sentence s to disjunctive", "applied only to literals. If simplify is True, the result", "hash(False) def as_set(self): \"\"\" Rewrite logic operators and relationals in", "different things if ``a`` and ``b`` are integers. In particular,", "Examples ======== >>> is_dnf(a | b | c) True >>>", "expr = sympify(expr) if is_literal(expr): return True stack = [expr]", "in dontcares') maxterms = [] for t in product([0, 1],", "== false: return a elif b == false and c", "just use ``if x`` instead of ``if x is True``.", "simplify=simplify) class ITE(BooleanFunction): \"\"\" If then else clause. ITE(A, B,", "first argument. Return a logical Or function (i.e., the \"sum", "In particular, ``a ^ b`` and ``Xor(a, b)`` will be", "The primary advantage of using true instead of True is", "__lshift__(self, other): \"\"\"Overloading for << operator.\"\"\" return Implies(other, self) __rrshift__", "and the mapping of variables that makes the two expressions", "whereas ``true`` and ``false`` represent a two-valued logic. When in", "not. In particular, ``~a`` and ``Not(a)`` will be different if", "of the arguments are False. Returns False if all arguments", "z & (y | ~t) References ========== * https://en.wikipedia.org/wiki/Quine-McCluskey_algorithm \"\"\"", "[true] rel.append(c) newargs.append(x) return LatticeOp._new_args_filter(newargs, Or) def as_set(self): \"\"\" Rewrite", "z in l1: # pragma: no branch if _compare_term(x, z):", "the first argument. Return a logical Or function (i.e., the", "= true identity = false @classmethod def _new_args_filter(cls, args): newargs", "_ is not None]]) return simplified_terms def _compare_term(minterm, term): \"\"\"", "Examples ======== >>> Nand(False, True) true >>> Nand(True, True) false", "b)) (a | ~b) & (b | ~a) >>> eliminate_implications(Equivalent(a,", "and ``b`` are integers. >>> Or(x, y).subs({x: 0}) y \"\"\"", "an odd number of the arguments are True and the", "index != -1: todo[i] = todo[j_i + i + 1]", "== nc: return false elif cj == c: remove.append((r, rj))", "... [1, 0, 1, 1], [1, 1, 1, 1]] >>>", "for lit in cls.args: if isinstance(lit, Not): if not lit.args[0].is_Atom:", "is True``.\" While \"``true is True``\" is ``False``, \"``true ==", "(i.e., the \"sum of products\" or \"SOP\" form) that gives", "1, 1], ... [1, 0, 1, 1], [1, 1, 1,", "use the Diofant objects ``true`` and ``false``. >>> True >>", "= rel[j][:2] if cj == nc: return false elif cj", "(dontcares or [])] for d in dontcares: if d in", "False. Returns True if all of the arguments are logically", "continue else: arg = true if isinstance(arg, Xor): for a", "def distribute_or_over_and(expr): \"\"\" Given a sentence s consisting of conjunctions", "Notes ===== See note in :py:class:`~diofant.logic.boolalg.BooleanTrue`. Examples ======== >>> sympify(False)", "True >> False 1 >>> true >> false false \"\"\"", "= _rem_redundancy(new, maxterms) return And(*[_convert_to_varsPOS(x, variables) for x in essential])", "is False. Returns True otherwise. Examples ======== >>> Implies(True, False)", "has a boolean value of True. To avoid this issue,", "sets. Examples ======== >>> Not(x > 0, evaluate=False).as_set() (-oo, 0]", "are only symbols or negated symbols. For example, And(x, Not(y),", "new = maxterms + dontcares while new != old: old", "todo[j_i + i + 1] = None newterm = ti[:]", "c: return b elif b == true and c ==", "Examples ======== >>> x | y x | y Notes", "= new new = _simplified_pairs(old) essential = _rem_redundancy(new, maxterms) return", "in question can be replaced by an arbitrary symbolic ``Boolean``,", "``~1`` which is ``-2``, which has a boolean value of", "(~x & ~y & ~z) >>> simplify_logic(_) ~x & ~y", "not satisfying the above rule of thumb, the assumptions system", "== frozenset([a, b]) True >>> disjuncts(a & b) == frozenset([And(a,", "x in essential]) def _find_predicates(expr): \"\"\"Helper to find logical predicates", "of False, a singleton that can be accessed via ``false``.", "d, d: x}) \"\"\" def match(function1, function2): \"\"\"Return the mapping", "| ~c) \"\"\" return _distribute((expr, And, Or)) def distribute_or_over_and(expr): \"\"\"", "else False. Examples ======== >>> is_literal(a) True >>> is_literal(~a) True", "**kwargs) for arg in super(Xor, obj).args: if isinstance(arg, Number) or", ">>> Nand(True, True) false >>> Nand(x, y) ~(x & y)", "ai.is_Symbol: d[ai][2] += 1 d[ai][-1] += o else: d[ai.args[0]][3] +=", "d)`` or ``(d, a)``: >>> eq1 = Or(And(Not(y), t), And(Not(y),", "a boolean value of True. To avoid this issue, use", "``true`` instead of ``True``, as elements of ``.args`` must be", "converter[bool] = lambda x: true if x else false class", "use here is different from its normal use in Python,", "space. This is used as base class for And, Or,", "a.args: if ai.is_Symbol: d[ai][2] += 1 d[ai][-1] += o else:", "normal use in Python, which is bitwise xor. In particular,", "return -symbols[arg.args[0]] else: return symbols[arg] return [{append_symbol(arg, symbols) for arg", ">>> to_cnf((a | b) & (a | ~a), True) a", "also in dontcares') maxterms = [] for t in product([0,", "Not(a))).equals(Or(b, Not(b))) False \"\"\" from ..core.relational import Relational from .inference", "Functions in the logic module will return this class when", "index = _check_pair(ti, tj) if index != -1: todo[i] =", "= [] for t in product([0, 1], repeat=len(variables)): t =", "args.append(Or(~self.args[-1], self.args[0])) return And._to_nnf(*args, simplify=simplify) class ITE(BooleanFunction): \"\"\" If then", "~(x & y) \"\"\" @classmethod def eval(cls, *args): return Not(And(*args))", "for _ in todo if _ is not None]]) return", "(~a | ~b), {a: a, b: b, c: d, d:", "Unequality): return Equality(*arg.args) if isinstance(arg, StrictLessThan): return GreaterThan(*arg.args) if isinstance(arg,", "methods def conjuncts(expr): \"\"\"Return a list of the conjuncts in", "f2 = _finger(function2) # more quick checks if len(f1) !=", "rest are False. Returns False if an even number of", "multivariate' ' expressions') class Or(LatticeOp, BooleanFunction): \"\"\" Logical OR function", "b) for the mapping {x: a, y:b} or {x: b,", "satisfiable(Not(Equivalent(self, other))) class BooleanAtom(Atom, Boolean): \"\"\"Base class of BooleanTrue and", "1): newargs.append(True if x else False) else: newargs.append(x) A, B", "for x, (i, j) in enumerate(zip(minterm1, minterm2)): if i !=", "ratio, measure): return simplify_logic(self) def to_nnf(self, simplify=True): return self._to_nnf(*self.args, simplify=simplify)", "of times it appeared as a Not(symbol), # of times", ">>> False >> False 0 >>> Or(True, False) true See", "& (a | ~a), True) a | b \"\"\" expr", "function2): for lit in expr.args: if isinstance(lit, Not): if not", "version of True, a singleton that can be accessed via", "GreaterThan): return StrictLessThan(*arg.args) def as_set(self): \"\"\" Rewrite logic operators and", "expr.xreplace(dict(zip(variables, t))): truthtable.append(t) if deep: from ..simplify import simplify variables", "True)) true >>> ITE(x, y, z) ITE(x, y, z) >>>", "b), c, And(c, d)) >>> bool_map(eq, eq.subs({c: x})) (c &", "set() for arg in args: if not is_literal(arg): arg =", "no branch if _compare_term(x, z): assert z not in essential", "simplified version of bool1, and the mapping of variables that", "def equals(self, other, failing_expression=False): \"\"\" Returns True if the given", "to if is_cnf(expr): return expr expr = eliminate_implications(expr) return distribute_and_over_or(expr)", "convenience, but note that its use here is different from", "1]] >>> POSform([t, x, y, z], minterms, dontcares) z &", "& y) \"\"\" @classmethod def eval(cls, *args): return Not(And(*args)) class", "an Atom if expr.is_Atom: return True # Special case of", "POSform([t, x, y, z], minterms, dontcares) z & (y |", "Or and Not, and Not is applied only to literals.", "\"\"\" Checks if expr is in Negation Normal Form. A", "import LatticeOp from ..core.singleton import S from ..core.singleton import SingletonWithManagedProperties", "If simplify is True, the expr is evaluated to its", "== true or B == false: return Or(Not(A), B) elif", "args.append(Or(~a, b)) args.append(Or(~self.args[-1], self.args[0])) return And._to_nnf(*args, simplify=simplify) class ITE(BooleanFunction): \"\"\"", "== 0: temp.append(variables[i]) return Or(*temp) def _simplified_pairs(terms): \"\"\" Reduces a", "rule of thumb, the assumptions system uses a three-valued logic", "Not(And(True, False)) true >>> Not(Or(True, False)) false >>> Not(And(And(True, x),", "The ``~`` operator is provided as a convenience, but note", "return not isinstance(expr.args[0], BooleanFunction) else: return not isinstance(expr, BooleanFunction) def", "False 0 >>> Or(True, False) true See Also ======== BooleanTrue", "\"\"\" Test whether or not an expression is in conjunctive", "c: remove.append((r, rj)) break for a, b in remove: argset.remove(a)", "Xor(BooleanFunction): \"\"\" Logical XOR (exclusive OR) function. Returns True if", "arg in args: if not is_literal(arg): arg = arg.to_nnf(simplify) if", "b`` will return different things if ``a`` and ``b`` are", "bitwise on 1. Functions in the logic module will return", "expr if simplify: return simplify_logic(expr, 'dnf', True) # Don't convert", "stack.pop() if expr.func in (And, Or): if simplified: args =", "A == B: return true elif A.is_Relational and B.is_Relational: if", "def _distribute(info): \"\"\"Distributes info[1] over info[2] with respect to info[0].\"\"\"", "arg break else: return info[0] rest = info[2](*[a for a", "XOR (exclusive OR) function. Returns True if an odd number", "in an And or Or, sum of the number of", "' expressions') class Not(BooleanFunction): \"\"\" Logical Not function (negation). Returns", "has not yet been' ' implemented for mutivariate' ' expressions')", "else: return -1 return index def _convert_to_varsSOP(minterm, variables): \"\"\" Converts", "POSform(variables, minterms, dontcares=None): \"\"\" The POSform function uses simplified_pairs and", "not an expression is in conjunctive normal form. Examples ========", "``None``), whereas ``true`` and ``false`` represent a two-valued logic. When", ">>> Not(And(Or(x, y), Or(~x, ~y))) ~((x | y) & (~x", "\"\"\" Logical Not function (negation). Returns True if the statement", "an And or Or, # of times it appeared as", "if not is_literal(arg): arg = arg.to_nnf(simplify) if simplify: if isinstance(arg,", "expected on this class, whereas with False they act bitwise", "boolean flags, it's better to just use ``if x`` instead", "1, 0], [0, 1, 0, 1]] >>> SOPform([t, x, y,", "been' ' implemented for multivariate' ' expressions') class Or(LatticeOp, BooleanFunction):", "1, 1], [1, 1, 1, 1]] >>> dontcares = [[0,", "unless we have to if is_dnf(expr): return expr expr =", "essential.append(temporary[0]) for x in terms: for y in essential: if", "the two expressions bool1 and bool2 represent the same logical", "false: return Or(Not(A), B) elif A == B: return true", "super().__new__(cls, _args) obj._argset = _args return obj @property # type:", "Not(c)))) (a | ~b) & (a | ~c) \"\"\" return", "words, use ``true`` only on those contexts where the boolean", "puts them into an integer representation. Examples ======== >>> to_int_repr([x", "old = new new = _simplified_pairs(old) essential = _rem_redundancy(new, maxterms)", "(a >> b).equals(~b >> ~a) True >>> Not(And(a, b, c)).equals(And(Not(a),", "b) True >>> is_literal(Or(a, b)) False \"\"\" if isinstance(expr, Not):", "z: d}) >>> eq = And(Xor(a, b), c, And(c, d))", "-2).as_set() [-oo, -2) U (2, oo] \"\"\" from ..sets import", "over whether a function or expression will return ``true`` or", "to recursively simplify any non-boolean functions contained within the input.", "if isinstance(expr, Not): return not isinstance(expr.args[0], BooleanFunction) else: return not", "Relational from .inference import satisfiable other = sympify(other) if self.has(Relational)", "d[a.args[0]][1] += 1 else: o = len(a.args) + sum(isinstance(ai, Not)", "CNF by default). deep : boolean (default True) indicates whether", "minterms is also in dontcares') maxterms = [] for t", "clause. ITE(A, B, C) evaluates and returns the result of", "evaluate to true. Notes ===== There is liable to be", "b | ~c) True >>> is_nnf((a | ~a) & (b", "Implies(False, True) true >>> x >> y Implies(x, y) >>>", "truth value of False. To avoid this issue, use the", "a redundant group- eliminating algorithm to convert the list of", "And(True, True), Xor(True, True)) true >>> ITE(x, y, z) ITE(x,", "and ``b`` are integers. >>> And(x, y).subs({x: 1}) y \"\"\"", "Not(And(a, b, c)).equals(And(Not(a), Not(b), Not(c))) False >>> Not(And(a, Not(a))).equals(Or(b, Not(b)))", ">>> Xor(True, False) true >>> Xor(True, True) false >>> Xor(True,", "c) (c | ~a) & (c | ~b) >>> to_cnf((a", "of ``is`` to do the comparison, and it will work", "sympify(b) (z & ~x & ~y) | (~x & ~y", "simplify_logic(b) ~x & ~y >>> sympify(b) (z & ~x &", "__rshift__ def __xor__(self, other): return Xor(self, other) __rxor__ = __xor__", "& ~B & ...) | (B & C & ...)", "Relational args = [sympify(arg, strict=True) for arg in args] argset", "that for the most part, you can just use ``True``", "To quote PEP 8: Don't compare boolean values to ``True``", "required form.\"\"\" expr = sympify(expr) # Special case of an", "return a elif b == false and c == true:", "frozenset([And(a, b)]) True \"\"\" return Or.make_args(expr) def distribute_and_over_or(expr): \"\"\" Given", "quick checks if function1.__class__ != function2.__class__: return if len(function1.args) !=", "return StrictLessThan(*arg.args) def as_set(self): \"\"\" Rewrite logic operators and relationals", "false >>> Nand(x, y) ~(x & y) \"\"\" @classmethod def", "make sense.\"\"\" def __and__(self, other): \"\"\"Overloading for & operator.\"\"\" return", "arg in args], simplify=simplify) if func == Implies: a, b", "all True. Returns True if any of the arguments are", "ti[:] newterm[index] = 3 if newterm not in simplified_terms: simplified_terms.append(newterm)", "a BooleanFunction that is not a BooleanFunction itself. \"\"\" if", "For example, if the object ends up in the ``.args``", "be cnf or dnf only') def _finger(eq): \"\"\" Assign a", "or \"SOP\" form) that gives the desired outcome. If there", "and is either an And (or an Or) whose arguments", "``false``. >>> ~True -2 >>> ~true false \"\"\" is_Not =", "to recognize and eliminate redundant pairs, and return the essential", "match dictionary if possible matchdict = {} for k in", "& ~a)) \"\"\" expr = sympify(expr) if is_nnf(expr, simplify): return", "= _simplified_pairs(old) essential = _rem_redundancy(new, minterms) return Or(*[_convert_to_varsSOP(x, variables) for", "since it always returns ``True`` or ``False``, and does so", "return self def __int__(self): return int(bool(self)) class BooleanTrue(BooleanAtom, metaclass=Singleton): \"\"\"Diofant", "single expression of function2 if isinstance(expr, function2): for lit in", "return [{append_symbol(arg, symbols) for arg in Or.make_args(c)} for c in", "x, y) y >>> ITE(x, y, y) y \"\"\" @classmethod", "======== >>> false.as_set() EmptySet() \"\"\" from ..sets import EmptySet return", "directly from this # file). S.true = true S.false =", "_check_pair(ti, tj) if index != -1: todo[i] = todo[j_i +", "be accessed via ``true``. This is the Diofant version of", "BooleanFunction): \"\"\" Logical AND function. It evaluates its arguments in", "LatticeOp._new_args_filter(newargs, And) def as_set(self): \"\"\" Rewrite logic operators and relationals", "distribute_and_over_or(expr) def to_dnf(expr, simplify=False): \"\"\" Convert a propositional logical sentence", "+ 1))) def append_symbol(arg, symbols): if isinstance(arg, Not): return -symbols[arg.args[0]]", "variables] if minterms == []: return false minterms = [list(i)", "2}, {2}] \"\"\" symbols = dict(zip(symbols, range(1, len(symbols) + 1)))", "avoid this issue, use the Diofant objects ``true`` and ``false``.", "the mapping that equates variables between two simplified boolean expressions", ">>> is_cnf((a & b) | c) False \"\"\" return _is_form(expr,", "= [expr] while stack: expr = stack.pop() if expr.func in", "into &, |, and ~. That is, return an expression", "False, and False if they are all True. Returns True", "is in disjunctive normal form. Examples ======== >>> is_dnf(a |", "three-valued logic (``True``, ``False``, ``None``), whereas ``true`` and ``false`` represent", "| c) True >>> is_cnf(a & b & c) True", "the number of arguments with which it appeared, counting Symbol", "to the form with fewest args (in CNF by default).", "[~s if s in neg else s for s in", "of times it appeared as a Symbol in an And", "in argset: argset.remove(True) return Not(Xor(*argset)) else: obj._args = tuple(ordered(argset)) obj._argset", "not isinstance(expr.args[0], BooleanFunction) else: return not isinstance(expr, BooleanFunction) def to_int_repr(clauses,", "is a function that lives in a boolean space. This", "BooleanTrue() false: BooleanFalse = BooleanFalse() # We want S.true and", "Special case of an Atom if expr.is_Atom: return True #", "S.false to work, rather than S.BooleanTrue and # S.BooleanFalse, but", "= match(a, b) if m: return a, m return m", "same as ``1 >> 1``, i.e., 0, which has a", "len(f2): return # assemble the match dictionary if possible matchdict", "in enumerate(term): if x not in (3, minterm[i]): return False", "b)`` will be different if ``a`` and ``b`` are integers.", "has a truth value of False. To avoid this issue,", "simplify=simplify) raise ValueError(f'Illegal operator {func} in expression') class Xor(BooleanFunction): \"\"\"", "all False. Returns False if any argument is True. Returns", "for a in eq.args: if a.is_Symbol: d[a][0] += 1 elif", ">>> true.as_set() UniversalSet() \"\"\" return S.UniversalSet class BooleanFalse(BooleanAtom, metaclass=Singleton): \"\"\"Diofant", "A, B = newargs except ValueError: raise ValueError(f'{len(args)} operand(s) used", "info[2]) for x in info[0].args)))) else: return info[0] def to_nnf(expr,", "into the smallest Product of Sums form. The variables must", "literals. If simplified is True, checks if result contains no", "or arg in (True, False): return false if arg else", "not in essential essential.append(z) break return essential def SOPform(variables, minterms,", "for v in variables] if minterms == []: return false", "in product([0, 1], repeat=len(variables)): t = list(t) if expr.xreplace(dict(zip(variables, t))):", "greeting == True:`` * Worse: ``if greeting is True:`` Examples", "True:`` Examples ======== >>> sympify(True) true >>> ~true false >>>", "identity = true nargs = None @classmethod def _new_args_filter(cls, args):", "*args, **kwargs): simplify = kwargs.get('simplify', True) argset = set() for", "the inability to import the class directly from this #", "and B.is_Relational: if A.canonical == B.canonical: return true elif (~A).canonical", "and ~. That is, return an expression that is equivalent", "in range(1, len(args)+1, 2): for neg in combinations(args, i): clause", "variables = [sympify(v) for v in variables] if minterms ==", "in a.args: if ai.is_Symbol: d[ai][2] += 1 d[ai][-1] += o", "LatticeOp._new_args_filter(newargs, Or) def as_set(self): \"\"\" Rewrite logic operators and relationals", "[x, y]) [{1, 2}, {2}] \"\"\" symbols = dict(zip(symbols, range(1,", "False def __hash__(self): return hash(False) def as_set(self): \"\"\" Rewrite logic", "``false``. >>> True >> False 1 >>> true >> false", "temp = [] for i, m in enumerate(minterm): if m", "are all False. Examples ======== >>> x | y x", "of minterms differs by only one bit. If yes, returns", "sentence s consisting of conjunctions and disjunctions of literals, return", "greeting is True:`` Examples ======== >>> sympify(True) true >>> ~true", "ITE: a, b, c = args return And._to_nnf(Or(a, ~c), Or(~a,", "& y) | (t & ~y) | (z & ~y),", "since Python considers ``True`` and ``False`` to be integers, ``True", "B) is True iff A and B are both True", "is True, the expr is evaluated to its simplest DNF", "a singleton that can be accessed via ``true``. This is", "the most part, you can just use ``True`` and it", "issue sympy/sympy#4835) so this is a workaround that is valid", "Returns True if expr is a literal, else False. Examples", "from ..core.function import Application from ..core.numbers import Number from ..core.operations", "= [] for i in range(1, len(args)+1, 2): for neg", "x else false class BooleanFunction(Application, Boolean): \"\"\"Boolean function is a", "== 1: temp.append(Not(variables[i])) elif m == 0: temp.append(variables[i]) return Or(*temp)", "dontcares=None): \"\"\" The POSform function uses simplified_pairs and a redundant-group", "return different things if ``a`` and ``b`` are integers. In", ">>> conjuncts(a | b) == frozenset([Or(a, b)]) True \"\"\" return", "& (~c | ~d) >>> to_nnf(Equivalent(a >> b, b >>", "m == 1: temp.append(Not(variables[i])) elif m == 0: temp.append(variables[i]) return", "greeting:`` * No: ``if greeting == True:`` * Worse: ``if", "whether to recursively simplify any non-boolean functions contained within the", "and it will automatically be converted to ``true`` when necessary,", "super(Xor, obj).args: if isinstance(arg, Number) or arg in (True, False):", "z): assert z not in essential essential.append(z) break return essential", "~y)) Notes ===== The ``~`` operator is provided as a", "'dnf' or form is None: expr = sympify(expr) if not", "predicates in BooleanFunctions. A logical predicate is defined here as", "~z) | (~x & ~y & z) >>> simplify_logic(b) ~x", "== B.canonical: return true elif (~A).canonical == B.canonical: return B", "(c | ~b) >>> to_cnf((a | b) & (a |", "to Negation Normal Form. A logical expression is in Negation", "info[1] over info[2] with respect to info[0].\"\"\" if isinstance(info[0], info[2]):", "t in product([0, 1], repeat=len(variables)): t = list(t) if (t", "the expansion of a function from binary to it's variable", "{x: b, y:a}. If no such mapping exists, return False.", "info[0].args: if isinstance(arg, info[1]): conj = arg break else: return", "else: raise ValueError('form can be cnf or dnf only') def", "the conjuncts in the expr s. Examples ======== >>> conjuncts(a", "in Python, which is bitwise and. Hence, ``And(a, b)`` and", "= _check_pair(ti, tj) if index != -1: todo[i] = todo[j_i", "false >>> Nor(False, True) false >>> Nor(False, False) true >>>", "or negated symbols. For example, And(x, Not(y), Or(w, Not(z))). Basic.match", "to And(a, b) for the mapping {x: a, y:b} or", "'cnf' or form == 'dnf' or form is None: expr", "except ValueError: raise ValueError(f'{len(args)} operand(s) used for an Implies '", "[0, 1, 1, 1], ... [1, 0, 1, 1], [1,", "BooleanTrue \"\"\" def __bool__(self): return False def __hash__(self): return hash(False)", "Logical implication. A implies B is equivalent to !A v", "return LatticeOp._new_args_filter(newargs, And) def as_set(self): \"\"\" Rewrite logic operators and", "Hence, ``Implies(a, b)`` and ``a >> b`` will return different", "(b & ~a)) \"\"\" expr = sympify(expr) if is_nnf(expr, simplify):", "\"\"\" return S.UniversalSet class BooleanFalse(BooleanAtom, metaclass=Singleton): \"\"\"Diofant version of False,", "by default). deep : boolean (default True) indicates whether to", "# do some quick checks if function1.__class__ != function2.__class__: return", "c) True >>> is_dnf(a & b & c) True >>>", "\"POS\" form) that gives the desired outcome. If there are", "can be accessed via ``false``. This is the Diofant version", "True, checks if result contains no redundant clauses. Examples ========", "in enumerate(terms[(i + 1):]): index = _check_pair(ti, tj) if index", "number of the arguments are True and the rest are", "logical sentence s to disjunctive normal form. That is, of", "z, y], [[1, 0, 1], [0, 0, 1]]) >>> function2", "return simplified_terms def _compare_term(minterm, term): \"\"\" Return True if a", "from ..core.relational import Relational args = [sympify(arg, strict=True) for arg", "disjuncts in the sentence s. Examples ======== >>> disjuncts(a |", "representation. Examples ======== >>> to_int_repr([x | y, y], [x, y])", "Python subclass from ``int``, ``~True`` is the same as ``~1``", "and BooleanFalse.\"\"\" is_Boolean = True @property def canonical(self): return self", "(~x | ~y)) Notes ===== The ``~`` operator is provided", "to_nnf(self, simplify=True): args = [] for i in range(0, len(self.args)+1,", "recursively simplify any non-boolean functions contained within the input. Examples", "elif form == 'cnf' or form is None: # pragma:", "1], repeat=len(variables)): t = list(t) if expr.xreplace(dict(zip(variables, t))): truthtable.append(t) if", "class definitions. Some useful methods def conjuncts(expr): \"\"\"Return a list", "true: return b elif a == false: return c elif", "if it contains only And, Or and Not, and Not", "``==`` is not a symbolic operation in Diofant, since it", "Or(And(Not(y), a), And(Not(y), b), And(x, y)) >>> dict(_finger(eq)) {(0, 0,", "True``. To quote PEP 8: Don't compare boolean values to", "if odd: argset.remove(true) if true in argset else argset.add(true) for", "inability to import the class directly from this # file).", ">>> x ^ y Xor(x, y) Notes ===== The ``^``", "Form (NNF) if it contains only And, Or and Not,", "= 3 if newterm not in simplified_terms: simplified_terms.append(newterm) simplified_terms.extend( [terms[i]", "its simplest CNF form. Examples ======== >>> to_cnf(~(a | b)", "combinations(self.args, i): clause = [~s if s in neg else", ">>> Xor(True, True) false >>> Xor(True, False, True, True, False)", "1 >>> true >> false false \"\"\" @classmethod def eval(cls,", "a propositional logical sentence s to disjunctive normal form. That", "in (3, minterm[i]): return False return True def _rem_redundancy(l1, terms):", "of thumb is: \"If the boolean in question can be", "relationals in terms of real sets. Examples ======== >>> false.as_set()", "return cls.zero argset.add(a) else: argset.add(arg) return cls(*argset) class And(LatticeOp, BooleanFunction):", "if func == Equivalent: return And._to_nnf(Or(*args), Or(*[~arg for arg in", "returns the result of B if A is true else", "We want S.true and S.false to work, rather than S.BooleanTrue", "issue, use the Diofant objects ``true`` and ``false``. >>> True", "Finally, for boolean flags, it's better to just use ``if", "| b) == frozenset([Or(a, b)]) True \"\"\" return And.make_args(expr) def", "isinstance(info[0], info[1]): return info[1](*list(map(_distribute, ((x, info[1], info[2]) for x in", "this class when they evaluate to true. Notes ===== There", "| b) & (a | ~a), True) a | b", "& b) == frozenset([a, b]) True >>> conjuncts(a | b)", "other): \"\"\"Overloading for >> operator.\"\"\" return Implies(self, other) def __lshift__(self,", "of each. If more than one mappings of this sort", "[] for a, b in zip(self.args, self.args[1:]): args.append(Or(~a, b)) args.append(Or(~self.args[-1],", "``true``. This is the Diofant version of True, for use", "simplify=simplify) if func == Or: return And._to_nnf(*[~arg for arg in", "s to conjunctive normal form. That is, of the form", "as base class for And, Or, Not, etc. \"\"\" is_Boolean", "return S.UniversalSet class BooleanFalse(BooleanAtom, metaclass=Singleton): \"\"\"Diofant version of False, a", "particular, ``~a`` and ``Not(a)`` will be different if ``a`` is", "*[a.diff(x) for a in self.args[1:]]) # end class definitions. Some", "^ y Xor(x, y) Notes ===== The ``^`` operator is", "_compare_term(minterm, term): \"\"\" Return True if a binary term is", "as ``1 >> 1``, i.e., 0, which has a truth", "two-valued logic. When in doubt, use ``True``. \"``true == True", "& b) | (b & c) >>> to_dnf((a & b)", "or POS form. The return type is an Or or", "== c: break else: continue remove.append((r, rj)) if odd: argset.remove(true)", "def to_nnf(self, simplify=True): a, b, c = self.args return And._to_nnf(Or(~a,", "elif a == false: return c elif b == c:", "in the logic module. The primary advantage of using true", "else: argset.add(arg) return cls(*argset) class And(LatticeOp, BooleanFunction): \"\"\" Logical AND", "is_literal(~a) True >>> is_literal(a + b) True >>> is_literal(Or(a, b))", "0], [0, 0, 1, 0], [0, 1, 0, 1]] >>>", "or arg in (True, False): if not arg: continue else:", "system should use ``True`` and ``False``. Aside from not satisfying", "return false elif len(argset) == 1: return argset.pop() elif True", "c: break else: continue remove.append((r, rj)) if odd: argset.remove(true) if", "= kwargs.get('simplify', True) argset = set() for arg in args:", "doubt over whether a function or expression will return ``true``", "True and the rest are False. Returns False if an", "and relationals in terms of real sets. Examples ======== >>>", "NotImplementedError('Sorry, Or.as_set has not yet been' ' implemented for multivariate'", "that is equivalent to s, but has only &, |,", "sympify(expr) if is_nnf(expr, simplify): return expr return expr.to_nnf(simplify) def to_cnf(expr,", "import the class directly from this # file). S.true =", "tuple(ordered(argset)) obj._argset = frozenset(argset) return obj @property # type: ignore[misc]", "as a convenience, but note that their use here is", "convenience, but note that their use here is different from", "be one of the (perhaps many) functions that satisfy the", "must be given as the first argument. Return a logical", "== frozenset([a, b]) True >>> conjuncts(a | b) == frozenset([Or(a,", "is number of complimentary pairs odd? start 0 -> False", "arguments are False. Examples ======== >>> Nor(True, False) false >>>", "symbols (x), negated symbols (Not(x)), or Or (or an And)", "False 1 >>> true >> false false \"\"\" @classmethod def", "And(x, y)) >>> dict(_finger(eq)) {(0, 0, 1, 0, 2): [x],", "``if greeting == True:`` * Worse: ``if greeting is True:``", "& c), True) a | c \"\"\" expr = sympify(expr)", "0, 1]]) >>> function2 = SOPform([a, b, c], [[1, 0,", "\"\"\" @classmethod def eval(cls, *args): try: newargs = [] for", "eq.free_symbols d = {fi: [0] * 5 for fi in", "return {expr} return set().union(*(_find_predicates(i) for i in expr.args)) def simplify_logic(expr,", "== 1: return self.args[0].as_set().complement(S.Reals) else: raise NotImplementedError('Sorry, Not.as_set has not", "Nor(False, False) true >>> Nor(x, y) ~(x | y) \"\"\"", "pairs odd? start 0 -> False remove = [] for", "the match dictionary if possible matchdict = {} for k", "& (~a | ~b), {a: a, b: b, c: d,", "return Or(*temp) def _simplified_pairs(terms): \"\"\" Reduces a set of minterms,", "Or(a, c), simplify=simplify) def _eval_derivative(self, x): return self.func(self.args[0], *[a.diff(x) for", "is_dnf(a | b | c) True >>> is_dnf(a & b", "false if arg else true if arg.is_Not: return arg.args[0] #", "return ``True``. The assumptions system should use ``True`` and ``False``.", "~True -2 >>> ~true false \"\"\" is_Not = True @classmethod", "StrictGreaterThan, StrictLessThan, Unequality) if isinstance(arg, Number) or arg in (True,", "will be one of the (perhaps many) functions that satisfy", "Hence, ``And(a, b)`` and ``a & b`` will return different", "len(rel)): rj, cj = rel[j][:2] if cj == nc: return", "any of them are False, and False if they are", "should return ``True``. The assumptions system should use ``True`` and", "boolean is being used as a symbolic representation of truth.", "both True or both False. Returns True if all of", "-1: todo[i] = todo[j_i + i + 1] = None", "if is_literal(self): return self expr = self.args[0] func, args =", "for use in the logic module. The primary advantage of", "(b | ~a | (b & ~a)) \"\"\" expr =", "return distribute_or_over_and(expr) def is_nnf(expr, simplified=True): \"\"\" Checks if expr is", "an Or) whose arguments are either symbols (x), negated symbols", "+= o inv = defaultdict(list) for k, v in ordered(d.items()):", "import Intersection if len(self.free_symbols) == 1: return Intersection(*[arg.as_set() for arg", "continue nc = (~c).canonical if any(r == nc for r", "is applied only to literals. If simplify is True, the", "__lshift__ __rlshift__ = __rshift__ def __xor__(self, other): return Xor(self, other)", "in args: if isinstance(x, Number) or x in (0, 1):", "c == false: return a elif b == false and", "c \"\"\" expr = sympify(expr) if not isinstance(expr, BooleanFunction): return", "for a in arg.args: argset.remove(a) if a in argset else", "argset.add(True if x else False) rel = [] for r", "!= len(function2.args): return if function1.is_Symbol: return {function1: function2} # get", "to its simplest CNF form. Examples ======== >>> to_cnf(~(a |", "rel[j][:2] if cj == nc: odd = ~odd break elif", "Not, and Not is applied only to literals. If simplified", "on 1. Functions in the logic module will return this", "of ``.args`` must be ``Basic``. On the other hand, ``==``", "its simplified version in SOP or POS form. The return", "of truth. For example, if the object ends up in", "real sets. Examples ======== >>> Not(x > 0, evaluate=False).as_set() (-oo,", "SOP). \"\"\" temp = [] for i, m in enumerate(minterm):", "Not(Symbol) in an And or Or, sum of the number", "normal form is returned; if None, the answer is returned", "True if all arguments are False. Examples ======== >>> Nor(True,", "or ``x > 1``, use ``true``. Otherwise, use ``True``\". In", "quote PEP 8: Don't compare boolean values to ``True`` or", "in essential]) def _find_predicates(expr): \"\"\"Helper to find logical predicates in", "``True``. The assumptions system should use ``True`` and ``False``. Aside", "1: return Union(*[arg.as_set() for arg in self.args]) else: raise NotImplementedError('Sorry,", "sympify(True) true >>> ~true false >>> ~True -2 >>> Or(True,", "d))) (a | b) & (~c | ~d) >>> to_nnf(Equivalent(a", "-1 for x, (i, j) in enumerate(zip(minterm1, minterm2)): if i", "argset.add(true) for a, b in remove: argset.remove(a) argset.remove(b) if len(argset)", "else: newargs.append(x) A, B = newargs except ValueError: raise ValueError(f'{len(args)}", "logic module will return this class when they evaluate to", "m in enumerate(minterm): if m == 0: temp.append(Not(variables[i])) elif m", "cj = rel[j][:2] if cj == nc: return false elif", "if is_dnf(expr): return expr expr = eliminate_implications(expr) return distribute_or_over_and(expr) def", "of real sets. Examples ======== >>> Or(x > 2, x", "| ~d) >>> to_nnf(Equivalent(a >> b, b >> a)) (a", "[])] for d in dontcares: if d in minterms: raise", "continue if x.is_Relational: c = x.canonical if c in rel:", "{t: a, x: b, y: c, z: d}) >>> eq", "j_i, tj in enumerate(terms[(i + 1):]): index = _check_pair(ti, tj)", "of False, for use in the logic module. The primary", "\"\"\" Boolean algebra module for Diofant. \"\"\" from collections import", "======== >>> b = (~x & ~y & ~z) |", "elif m == 1: temp.append(variables[i]) return And(*temp) def _convert_to_varsPOS(maxterm, variables):", "in essential: essential.append(temporary[0]) for x in terms: for y in", "must have the same literals. Examples ======== >>> (a >>", "== 1: return Union(*[arg.as_set() for arg in self.args]) else: raise", "try: a, b, c = args except ValueError: raise ValueError('ITE", "been' ' implemented for multivariate' ' expressions') class Not(BooleanFunction): \"\"\"", "of conjunctions and disjunctions of literals, return an equivalent sentence", "If there are inputs that can be ignored, pass them", "arg in Or.make_args(c)} for c in clauses] def _check_pair(minterm1, minterm2):", "else: o = len(a.args) + sum(isinstance(ai, Not) for ai in", ">> false false \"\"\" @classmethod def eval(cls, *args): try: newargs", "raise ValueError(f'{len(args)} operand(s) used for an Implies ' f'(pairs are", "def _simplified_pairs(terms): \"\"\" Reduces a set of minterms, if possible,", "ITE(BooleanFunction): \"\"\" If then else clause. ITE(A, B, C) evaluates", "minterms] dontcares = [list(i) for i in (dontcares or [])]", "like ``Or(x, y)`` or ``x > 1``, use ``true``. Otherwise,", "tuple(ordered(self._argset)) def to_nnf(self, simplify=True): args = [] for i in", ">>> is_literal(a + b) True >>> is_literal(Or(a, b)) False \"\"\"", "from ..sets import Intersection if len(self.free_symbols) == 1: return Intersection(*[arg.as_set()", "if expr.func in (And, Or): if simplified: args = expr.args", "~x & ~y \"\"\" if form == 'cnf' or form", "======== >>> conjuncts(a & b) == frozenset([a, b]) True >>>", "zip(self.args, self.args[1:]): args.append(Or(~a, b)) args.append(Or(~self.args[-1], self.args[0])) return And._to_nnf(*args, simplify=simplify) class", "isinstance(arg, Number) or arg in (True, False): return false if", "@classmethod def eval(cls, *args): return Not(Or(*args)) class Implies(BooleanFunction): \"\"\" Logical", "_convert_to_varsSOP(minterm, variables): \"\"\" Converts a term in the expansion of", "it returns the result of C. Examples ======== >>> ITE(True,", "is ``True``, so if there is any doubt over whether", "the Diofant version of True, for use in the logic", "in args: if isinstance(x, Number) or x in [True, False]:", "argset.remove(a) if a in argset else argset.add(a) elif arg in", "if A is True and B is False. Returns True", "use in Python, which is bitwise and. Hence, ``And(a, b)``", "are False. Examples ======== >>> Xor(True, False) true >>> Xor(True,", "b, y:a}. If no such mapping exists, return False. Examples", "= sympify(expr) if is_literal(expr): return True stack = [expr] while", "Python, which is bitwise and. Hence, ``And(a, b)`` and ``a", "rel = [] for x in reversed(list(args)): if isinstance(x, Number)", ">>> ~x ~x >>> Not(And(Or(x, y), Or(~x, ~y))) ~((x |", "!= len(f2[k]): return for i, x in enumerate(f1[k]): matchdict[x] =", "return simplify_logic(expr, 'cnf', True) # Don't convert unless we have", "from its normal use in Python, which is bitwise not.", "return StrictGreaterThan(*arg.args) if isinstance(arg, GreaterThan): return StrictLessThan(*arg.args) def as_set(self): \"\"\"", "if isinstance(x, Number) or x in [True, False]: # Includes", "variable in the terms using QM method. \"\"\" simplified_terms =", "elif b == false and c == true: return Not(a)", "to remember is that ``sympify(True)`` returns ``true``. This means that", "argset: argset.remove(arg) else: argset.add(arg) rel = [(r, r.canonical, (~r).canonical) for", "fi in f} for a in eq.args: if a.is_Symbol: d[a][0]", "\"\"\" from collections import defaultdict from itertools import combinations, product", "and puts them into an integer representation. Examples ======== >>>", "self expr = self.args[0] func, args = expr.func, expr.args if", "over info[2] with respect to info[0].\"\"\" if isinstance(info[0], info[2]): for", "form (for SOP). \"\"\" temp = [] for i, m", "Number) or x in [True, False]: # Includes 0, 1", "an And) whose arguments are only symbols or negated symbols.", "similar to how you can generally use 1 instead of", "and it will work in either case. Finally, for boolean", "``b`` are integers. >>> Or(x, y).subs({x: 0}) y \"\"\" zero", "simplest DNF form. Examples ======== >>> to_dnf(b & (a |", "self._to_nnf(*self.args, simplify=simplify) @classmethod def _to_nnf(cls, *args, **kwargs): simplify = kwargs.get('simplify',", "one of the (perhaps many) functions that satisfy the conditions.", "And(a, b) for the mapping {x: a, y:b} or {x:", ">>> Not(And(And(True, x), Or(x, False))) ~x >>> ~x ~x >>>", "form. The return type is an Or or And object", "and ``b`` are integers. >>> Xor(x, y).subs({y: 0}) x \"\"\"", "expression will return ``true`` or ``True``, just use ``==`` instead", "8: Don't compare boolean values to ``True`` or ``False`` using", "the (perhaps many) functions that satisfy the conditions. Examples ========", "x, y) x >>> ITE(False, x, y) y >>> ITE(x,", "len(argset) == 0: return false elif len(argset) == 1: return", "other): return Xor(self, other) __rxor__ = __xor__ def equals(self, other,", "a single negation if isinstance(expr, Not): if not expr.args[0].is_Atom: return", "raise ValueError(f'{d} in minterms is also in dontcares') maxterms =", "class And(LatticeOp, BooleanFunction): \"\"\" Logical AND function. It evaluates its", "not in minterms) and (t not in dontcares): maxterms.append(t) old", "!A v B Accepts two Boolean arguments; A and B.", "value of True. To avoid this issue, use the Diofant", "((x, info[1], info[2]) for x in info[0].args)))) else: return info[0]", "_find_predicates(expr) truthtable = [] for t in product([0, 1], repeat=len(variables)):", "def _new_args_filter(cls, args): newargs = [] rel = [] for", "``a`` and ``b`` are integers. >>> Xor(x, y).subs({y: 0}) x", "expression') class Xor(BooleanFunction): \"\"\" Logical XOR (exclusive OR) function. Returns", "======== >>> ITE(True, False, True) false >>> ITE(Or(True, False), And(True,", "or form is None: expr = sympify(expr) if not isinstance(expr,", "= true if isinstance(arg, Xor): for a in arg.args: argset.remove(a)", "In particular, since Python considers ``True`` and ``False`` to be", "binary to it's variable form (for POS). \"\"\" temp =", "~b), {a: a, b: b, c: d, d: x}) \"\"\"", ">>> ITE(False, x, y) y >>> ITE(x, y, y) y", "as Singleton from ..core.sympify import converter, sympify from ..utilities import", "False return True def _rem_redundancy(l1, terms): \"\"\" After the truth", "no branch return POSform(variables, truthtable) else: raise ValueError('form can be", "~true false \"\"\" is_Not = True @classmethod def eval(cls, arg):", "# We want S.true and S.false to work, rather than", "use ``True``. \"``true == True is True``.\" While \"``true is", "rule of thumb is: \"If the boolean in question can", "redundant clauses. Examples ======== >>> to_nnf(Not((~a & ~b) | (c", "hand, ``==`` is not a symbolic operation in Diofant, since", "convert unless we have to if is_dnf(expr): return expr expr", "(b & c) >>> to_dnf((a & b) | (a &", "simplified boolean expressions if possible. By \"simplified\" we mean that", "singleton that can be accessed via ``false``. This is the", "times it appeared as a Symbol, # of times it", "if len(function1.args) != len(function2.args): return if function1.is_Symbol: return {function1: function2}", "in combinations(self.args, i): clause = [~s if s in neg", "\"\"\"Diofant version of True, a singleton that can be accessed", "using ``==``. * Yes: ``if greeting:`` * No: ``if greeting", "from this # file). S.true = true S.false = false", "return _is_form(expr, Or, And) def _is_form(expr, function1, function2): \"\"\"Test whether", "Simplify Relational objects. if isinstance(arg, Equality): return Unequality(*arg.args) if isinstance(arg,", "This is the Diofant version of False, for use in", "y), Or(~x, ~y))) ~((x | y) & (~x | ~y))", "sets. Examples ======== >>> true.as_set() UniversalSet() \"\"\" return S.UniversalSet class", "S.BooleanTrue and # S.BooleanFalse, but making the class and instance", "if isinstance(x, Number) or x in (0, 1): newargs.append(True if", "= True def _eval_simplify(self, ratio, measure): return simplify_logic(self) def to_nnf(self,", "one mappings of this sort exist, one of them is", "return info[0] def to_nnf(expr, simplify=True): \"\"\" Converts expr to Negation", "c elif b == c: return b elif b ==", "old = None new = maxterms + dontcares while new", "of the disjuncts in the sentence s. Examples ======== >>>", "while stack: expr = stack.pop() if expr.func in (And, Or):", "advantage of using false instead of False is that shorthand", "True) # Don't convert unless we have to if is_dnf(expr):", "class and instance names the same causes some # major", "return False. Examples ======== >>> function1 = SOPform([x, z, y],", "None @classmethod def _new_args_filter(cls, args): newargs = [] rel =", "elif b == c: return b elif b == true", "a Not(Symbol) in an And or Or, sum of the", "raise ValueError('form can be cnf or dnf only') def _finger(eq):", "is true else it returns the result of C. Examples", "& (b >> a)) False \"\"\" expr = sympify(expr) if", "if simplify: return simplify_logic(expr, 'cnf', True) # Don't convert unless", "form=None, deep=True): \"\"\" This function simplifies a boolean function to", "operator.\"\"\" return Implies(self, other) def __lshift__(self, other): \"\"\"Overloading for <<", ": string ('cnf' or 'dnf') or None (default). If 'cnf'", "simplify=True): \"\"\" Converts expr to Negation Normal Form. A logical", "bit shifts. Hence, ``Implies(a, b)`` and ``a >> b`` will", "false: return c elif b == c: return b elif", "particular, since Python considers ``True`` and ``False`` to be integers,", "cnf or dnf only') def _finger(eq): \"\"\" Assign a 5-item", "is bitwise xor. In particular, ``a ^ b`` and ``Xor(a,", "Or(*[~arg for arg in args]), simplify=simplify) if func == Xor:", "is_literal(arg): arg = arg.to_nnf(simplify) if simplify: if isinstance(arg, cls): arg", ">>> to_int_repr([x | y, y], [x, y]) [{1, 2}, {2}]", "a | c \"\"\" expr = sympify(expr) if not isinstance(expr,", "!= function2.__class__: return if len(function1.args) != len(function2.args): return if function1.is_Symbol:", "assumptions system should use ``True`` and ``False``. Aside from not", "for multivariate' ' expressions') class Not(BooleanFunction): \"\"\" Logical Not function", "b), Or(a, c), simplify=simplify) def _eval_derivative(self, x): return self.func(self.args[0], *[a.diff(x)", "True:`` * Worse: ``if greeting is True:`` Examples ======== >>>", "is False. Returns False if the statement is True. Examples", "c)) (b & c) | (c & ~a) \"\"\" return", "\"\"\" # do some quick checks if function1.__class__ != function2.__class__:", "LessThan(*arg.args) if isinstance(arg, LessThan): return StrictGreaterThan(*arg.args) if isinstance(arg, GreaterThan): return", "== 'cnf' or form == 'dnf' or form is None:", "return Expr.__new__(cls, *args) def to_nnf(self, simplify=True): a, b = self.args", "& ~y \"\"\" if form == 'cnf' or form ==", "from ..core.singleton import S from ..core.singleton import SingletonWithManagedProperties as Singleton", "particular, ``a ^ b`` and ``Xor(a, b)`` will be different", "*args): try: a, b, c = args except ValueError: raise", "_eval_derivative(self, x): return self.func(self.args[0], *[a.diff(x) for a in self.args[1:]]) #", "And)) def _distribute(info): \"\"\"Distributes info[1] over info[2] with respect to", "for t in product([0, 1], repeat=len(variables)): t = list(t) if", "equivalent to !A v B Accepts two Boolean arguments; A", "(c & ~a) \"\"\" return _distribute((expr, Or, And)) def _distribute(info):", "in remove: argset.remove(a) argset.remove(b) argset.add(True) if len(argset) <= 1: return", ">>> Or(x > 2, x < -2).as_set() [-oo, -2) U", "False immediately if any of them are True, and True", "multivariate' ' expressions') class Not(BooleanFunction): \"\"\" Logical Not function (negation).", "is equivalent to !A v B Accepts two Boolean arguments;", "def to_cnf(expr, simplify=False): \"\"\" Convert a propositional logical sentence s", "# of times it appeared as a Symbol, # of", "k in f1: if k not in f2 or len(f1[k])", "1. Functions in the logic module will return this class", "[] for i, (r, c, nc) in enumerate(rel): for j", "it should return ``True``. The assumptions system should use ``True``", "Not(b))) False \"\"\" from ..core.relational import Relational from .inference import", "do some quick checks if function1.__class__ != function2.__class__: return if", "References ========== * https://en.wikipedia.org/wiki/Quine-McCluskey_algorithm \"\"\" variables = [sympify(v) for v", "(And, Or): if simplified: args = expr.args for arg in", "are integers. >>> Or(x, y).subs({x: 0}) y \"\"\" zero =", "argument. Return a logical Or function (i.e., the \"sum of", "def to_nnf(self, simplify=True): args = [] for a, b in", "mappings of this sort exist, one of them is returned.", "Equivalent(BooleanFunction): \"\"\" Equivalence relation. Equivalent(A, B) is True iff A", "zero = false identity = true nargs = None @classmethod", "are canonical. Here, ``(t, z)`` could be ``(a, d)`` or", "arg.args else: arg = arg, for a in arg: if", "Implies(x, y) >>> y << x Implies(x, y) Notes =====", "function2 = SOPform([a, b, c], [[1, 0, 1], [1, 0,", "(a | ~b) & (b | ~a) >>> eliminate_implications(Equivalent(a, b,", ">>> (a >> b).equals(~b >> ~a) True >>> Not(And(a, b,", "= [] for i, m in enumerate(maxterm): if m ==", "True if expr is a literal, else False. Examples ========", "__int__(self): return int(bool(self)) class BooleanTrue(BooleanAtom, metaclass=Singleton): \"\"\"Diofant version of True,", "c], [[1, 0, 1], [1, 0, 0]]) >>> bool_map(function1, function2)", "(b | c)) False >>> is_nnf((a | ~a) & (b", "= expr.args for arg in args: if Not(arg) in args:", "@classmethod def eval(cls, *args): return Not(And(*args)) class Nor(BooleanFunction): \"\"\" Logical", "\"\"\"Overloading for << operator.\"\"\" return Implies(other, self) __rrshift__ = __lshift__", "= [] for r in argset: if isinstance(r, Relational): rel.append((r,", "to_dnf(b & (a | c)) (a & b) | (b", "in self.args]) else: raise NotImplementedError('Sorry, Or.as_set has not yet been'", "And or Or, # of times it appeared as a", "of variables that makes the two expressions bool1 and bool2", "False remove = [] for i, (r, c, nc) in", "break return essential def SOPform(variables, minterms, dontcares=None): \"\"\" The SOPform", "expects exactly 3 arguments') if a == true: return b", "in info[0].args)))) else: return info[0] def to_nnf(expr, simplify=True): \"\"\" Converts", "else returns -1. \"\"\" index = -1 for x, (i,", "any of them are False, and True if they are", "(r, c, nc) in enumerate(rel): for j in range(i +", "disjuncts(a & b) == frozenset([And(a, b)]) True \"\"\" return Or.make_args(expr)", "...) & ...). If simplify is True, the expr is", "an Or or And object in Diofant. Parameters ========== expr", "ITE(False, x, y) y >>> ITE(x, y, y) y \"\"\"", "is None: # pragma: no branch return POSform(variables, truthtable) else:", "``(d, a)``: >>> eq1 = Or(And(Not(y), t), And(Not(y), z), And(x,", "== c: remove.append((r, rj)) break for a, b in remove:", "Furthermore, since bools in Python subclass from ``int``, ``~True`` is", "y).subs({y: 0}) x \"\"\" def __new__(cls, *args, **kwargs): argset =", "~ as logical operators. Examples ======== >>> eliminate_implications(Implies(a, b)) b", "1: temp.append(variables[i]) return And(*temp) def _convert_to_varsPOS(maxterm, variables): \"\"\" Converts a", "always returns ``True`` or ``False``, and does so in terms", "..core import (Equality, GreaterThan, LessThan, StrictGreaterThan, StrictLessThan, Unequality) if isinstance(arg,", "= expr.func, expr.args if func == And: return Or._to_nnf(*[~arg for", "* Yes: ``if greeting:`` * No: ``if greeting == True:``", ">>> is_nnf(a & b | ~c) True >>> is_nnf((a |", "odd: argset.remove(true) if true in argset else argset.add(true) for a,", "a = simplify_logic(bool1) b = simplify_logic(bool2) m = match(a, b)", "And) def _is_form(expr, function1, function2): \"\"\"Test whether or not an", "That is, return an expression that is equivalent to s,", "in the logic module. The primary advantage of using false", "A == false or B == true or B ==", "if m == 1: temp.append(Not(variables[i])) elif m == 0: temp.append(variables[i])", "other))) class BooleanAtom(Atom, Boolean): \"\"\"Base class of BooleanTrue and BooleanFalse.\"\"\"", "not lit.is_Atom: return False return True def eliminate_implications(expr): \"\"\" Change", "is the Diofant version of False, for use in the", "the fingerprint dictionaries f1 = _finger(function1) f2 = _finger(function2) #", "def as_set(self): \"\"\" Rewrite logic operators and relationals in terms", "uses a three-valued logic (``True``, ``False``, ``None``), whereas ``true`` and", "(exclusive OR) function. Returns True if an odd number of", ": string or boolean expression form : string ('cnf' or", "[1, 0, 0]]) >>> bool_map(function1, function2) (y & ~z, {y:", "~b) | (c & d))) (a | b) & (~c", "False \"\"\" return _is_form(expr, Or, And) def _is_form(expr, function1, function2):", "..sets import EmptySet return EmptySet() true = BooleanTrue() false: BooleanFalse", "> 2, x < -2).as_set() [-oo, -2) U (2, oo]", ">>> minterms = [[0, 0, 0, 1], [0, 0, 1,", "return [false] rel.append(c) newargs.append(x) return LatticeOp._new_args_filter(newargs, And) def as_set(self): \"\"\"", "a 5-item fingerprint to each symbol in the equation: [", "B.canonical: return true elif (~A).canonical == B.canonical: return B else:", "x not in (3, minterm[i]): return False return True def", "x in info[0].args)))) else: return info[0] def to_nnf(expr, simplify=True): \"\"\"", "its normal use in Python, which is bitwise or. Hence,", "true >>> Equivalent(True, False, False) false >>> Equivalent(x, And(x, True))", "0, 0, 1], [0, 0, 1, 1], [0, 1, 1,", "are provided as a convenience, but note that their use", "< -2).as_set() [-oo, -2) U (2, oo] \"\"\" from ..sets", "True >>> Not(And(a, b, c)).equals(And(Not(a), Not(b), Not(c))) False >>> Not(And(a,", "integers. >>> Xor(x, y).subs({y: 0}) x \"\"\" def __new__(cls, *args,", "any non-boolean functions contained within the input. Examples ======== >>>", "\"\"\" temp = [] for i, m in enumerate(minterm): if", "& ~z) | (~x & ~y & z) >>> simplify_logic(b)", "is_Boolean = True @property def canonical(self): return self def __int__(self):", "to ``true`` when necessary, similar to how you can generally", "in minterms) and (t not in dontcares): maxterms.append(t) old =", "can be replaced by an arbitrary symbolic ``Boolean``, like ``Or(x,", "return self._to_nnf(*self.args, simplify=simplify) @classmethod def _to_nnf(cls, *args, **kwargs): simplify =", "nc) in enumerate(rel): for j in range(i + 1, len(rel)):", "False) false >>> Equivalent(x, And(x, True)) true \"\"\" def __new__(cls,", "matchdict else None a = simplify_logic(bool1) b = simplify_logic(bool2) m", "from ..core import Atom, cacheit from ..core.expr import Expr from", "arg = arg, for a in arg: if Not(a) in", "Number) or arg in (True, False): return false if arg", "(c | ~b) \"\"\" return to_nnf(expr) def is_literal(expr): \"\"\" Returns", "False, True, True, False) true >>> Xor(True, False, True, False)", "== true: return b elif a == false: return c", "``True``, so if there is any doubt over whether a", ">>> true >> false false \"\"\" @classmethod def eval(cls, *args):", "self.args return And._to_nnf(Or(~a, b), Or(a, c), simplify=simplify) def _eval_derivative(self, x):", "That is, of the form ((A | ~B | ...)", "False)) true >>> Not(Or(True, False)) false >>> Not(And(And(True, x), Or(x,", "return And._to_nnf(*args, simplify=simplify) class Nand(BooleanFunction): \"\"\" Logical NAND function. It", "that can be accessed via ``true``. This is the Diofant", "names the same causes some # major issues (like the", "= dict(zip(symbols, range(1, len(symbols) + 1))) def append_symbol(arg, symbols): if", "If then else clause. ITE(A, B, C) evaluates and returns", "as logical operators. Examples ======== >>> eliminate_implications(Implies(a, b)) b |", "== B: return true elif A.is_Relational and B.is_Relational: if A.canonical", "======== >>> is_literal(a) True >>> is_literal(~a) True >>> is_literal(a +", "elif not is_literal(expr): return False return True def is_cnf(expr): \"\"\"", "[] for r in argset: if isinstance(r, Relational): rel.append((r, r.canonical,", "1, 1], ... [0, 1, 1, 1], [1, 0, 1,", "is liable to be some confusion as to when ``True``", "the arguments are False. Returns False if all arguments are", "``a`` and ``b`` are integers. >>> And(x, y).subs({x: 1}) y", "b do not. \"\"\" f = eq.free_symbols d = {fi:", "x, y, z], minterms, dontcares) z & (y | ~t)", "is True``\" is ``False``, \"``true == True``\" is ``True``, so", "..core.relational import Relational from .inference import satisfiable other = sympify(other)", "[terms[i] for i in [_ for _ in todo if", "True. Returns True if any of the arguments are False.", "\"\"\" Logical implication. A implies B is equivalent to !A", "info[2]): for arg in info[0].args: if isinstance(arg, info[1]): conj =", "but making the class and instance names the same causes", "\"\"\" return _is_form(expr, Or, And) def _is_form(expr, function1, function2): \"\"\"Test", "function that lives in a boolean space. This is used", "continue if isinstance(cls, Not): if not cls.args[0].is_Atom: return False elif", "= super().__new__(cls, *args, **kwargs) for arg in super(Xor, obj).args: if", "else: arg = arg, for a in arg: if Not(a)", "Equivalent(True, False, False) false >>> Equivalent(x, And(x, True)) true \"\"\"", "arg): from ..core import (Equality, GreaterThan, LessThan, StrictGreaterThan, StrictLessThan, Unequality)", "False is that shorthand boolean operations like ~ and >>", "strict=True) for arg in args] argset = set(args) for x", "function2): return False for lit in cls.args: if isinstance(lit, Not):", "match(a, b) if m: return a, m return m is", "that a function has been denested and is either an", "``Implies(a, b)`` and ``a >> b`` will return different things", "self.args[0])) return And._to_nnf(*args, simplify=simplify) class ITE(BooleanFunction): \"\"\" If then else", "x}) \"\"\" def match(function1, function2): \"\"\"Return the mapping that equates", "def args(self): return tuple(ordered(self._argset)) def to_nnf(self, simplify=True): args = []", "eq1 = Or(And(Not(y), t), And(Not(y), z), And(x, y)) >>> eq2", "old = None new = minterms + dontcares while new", "a simplified set of minterms with one less variable in", "b) == frozenset([Or(a, b)]) True \"\"\" return And.make_args(expr) def disjuncts(expr):", "\"\"\" The SOPform function uses simplified_pairs and a redundant group-", "``==``. * Yes: ``if greeting:`` * No: ``if greeting ==", "c), False) True >>> is_nnf(Not(a & b) | c) False", "number of complimentary pairs odd? start 0 -> False remove", "Boolean algebra module for Diofant. \"\"\" from collections import defaultdict", "expr.args if func == And: return Or._to_nnf(*[~arg for arg in", "import Relational from .inference import satisfiable other = sympify(other) if", "& d & (a | b) & (~a | ~b),", "form : string ('cnf' or 'dnf') or None (default). If", "generate '1' (the minterms) into the smallest Sum of Products", "c), simplify=simplify) def _eval_derivative(self, x): return self.func(self.args[0], *[a.diff(x) for a", "~y & z) >>> simplify_logic(b) ~x & ~y >>> sympify(b)", "Python, which is bitwise xor. In particular, ``a ^ b``", "a boolean function to its simplified version in SOP or", "0, 1], [0, 0, 1]]) >>> function2 = SOPform([a, b,", "return And._to_nnf(*[~arg for arg in args], simplify=simplify) if func ==", "``a >> b`` will return different things if ``a`` and", "if isinstance(arg, Equality): return Unequality(*arg.args) if isinstance(arg, Unequality): return Equality(*arg.args)", "if they are all False. Returns False if any argument", "returns index, else returns -1. \"\"\" index = -1 for", "y:a}. If no such mapping exists, return False. Examples ========", "to import the class directly from this # file). S.true", "(b >> a)) False \"\"\" expr = sympify(expr) if is_literal(expr):", "c)) (a | ~c) & (b | ~a) & (c", "Diofant. An important thing to remember is that ``sympify(True)`` returns", ".inference import satisfiable other = sympify(other) if self.has(Relational) or other.has(Relational):", "(negation). Returns True if the statement is False. Returns False", "| c) True >>> is_dnf(a & b & c) True", "======== >>> is_dnf(a | b | c) True >>> is_dnf(a", "in order, giving False immediately if any of them are", "& ...) | (B & C & ...) | ...).", "``true`` and ``false``. >>> True >> False 1 >>> true", ">>> is_dnf(a & (b | c)) False \"\"\" return _is_form(expr,", "terms of real sets. Examples ======== >>> false.as_set() EmptySet() \"\"\"", "Or(self, other) __ror__ = __or__ def __invert__(self): \"\"\"Overloading for ~", "len(args)+1, 2): for neg in combinations(args, i): clause = [~s", "S.true and S.false to work, rather than S.BooleanTrue and #", "a elif b == false and c == true: return", "too. The result will be one of the (perhaps many)", "if isinstance(arg, Number) or arg in (True, False): if not", "issue, use the Diofant boolean types ``true`` and ``false``. >>>", "dontcares: if d in minterms: raise ValueError(f'{d} in minterms is", "return Or.make_args(expr) def distribute_and_over_or(expr): \"\"\" Given a sentence s consisting", "False >>> is_nnf((a >> b) & (b >> a)) False", "\"\"\" return to_nnf(expr) def is_literal(expr): \"\"\" Returns True if expr", "import combinations, product from ..core import Atom, cacheit from ..core.expr", "PEP 8: Don't compare boolean values to ``True`` or ``False``", "them are False, and False if they are all True.", "And function (i.e., the \"product of sums\" or \"POS\" form)", "*args) def to_nnf(self, simplify=True): a, b = self.args return Or._to_nnf(~a,", "evaluate to false. Notes ===== See note in :py:class:`~diofant.logic.boolalg.BooleanTrue`. Examples", "simplified_terms = [] todo = list(range(len(terms))) for i, ti in", "o = len(a.args) + sum(isinstance(ai, Not) for ai in a.args)", "= __or__ def __invert__(self): \"\"\"Overloading for ~ operator.\"\"\" return Not(self)", "are True, and True if they are all False. Returns", "smallest Product of Sums form. The variables must be given", "be used and when ``true`` should be used in various", "true or A == false or B == true or", "True``\" is ``True``, so if there is any doubt over", "they must have the same literals. Examples ======== >>> (a", ">>> is_nnf(Not(a & b) | c) False >>> is_nnf((a >>", "return Not(self) def __rshift__(self, other): \"\"\"Overloading for >> operator.\"\"\" return", "== 0: return false elif len(argset) == 1: return argset.pop()", "ITE(True, False, True) false >>> ITE(Or(True, False), And(True, True), Xor(True,", "but a and b do not. \"\"\" f = eq.free_symbols", "use the prime implicant table method to recognize and eliminate", "arg.to_nnf(simplify) if simplify: if isinstance(arg, cls): arg = arg.args else:", "that is not a BooleanFunction itself. \"\"\" if not isinstance(expr,", "is True, the result contains no redundant clauses. Examples ========", "minterm2): \"\"\" Checks if a pair of minterms differs by", "equates variables between two simplified boolean expressions if possible. By", "b | c) True >>> is_dnf(a & b & c)", "\"\"\" This function simplifies a boolean function to its simplified", "Examples ======== >>> minterms = [[0, 0, 0, 1], [0,", "\"\"\" return _is_form(expr, And, Or) def is_dnf(expr): \"\"\" Test whether", "1: temp.append(Not(variables[i])) elif m == 0: temp.append(variables[i]) return Or(*temp) def", "args], simplify=simplify) if func == Implies: a, b = args", "y: c, z: d}) >>> eq = And(Xor(a, b), c,", "_simplified_pairs(terms): \"\"\" Reduces a set of minterms, if possible, to", "_finger(eq): \"\"\" Assign a 5-item fingerprint to each symbol in", "Symbol, # of times it appeared as a Not(symbol), #", "And(*temp) def _convert_to_varsPOS(maxterm, variables): \"\"\" Converts a term in the", "form with fewest args (in CNF by default). deep :", "for some correspondence between the variables of each. If more", "Nor(BooleanFunction): \"\"\" Logical NOR function. It evaluates its arguments in", "if any of them are False, and True if they", "def __hash__(self): return hash(False) def as_set(self): \"\"\" Rewrite logic operators", "And(*argset) if False in argset: argset.discard(False) return And(*[~arg for arg", "canonical. Here, ``(t, z)`` could be ``(a, d)`` or ``(d,", "Unequality) if isinstance(arg, Number) or arg in (True, False): return", "===== See note in :py:class:`~diofant.logic.boolalg.BooleanTrue`. Examples ======== >>> sympify(False) false", "d in minterms: raise ValueError(f'{d} in minterms is also in", "returned. For example, And(x, y) is logically equivalent to And(a,", "& C & ...) | ...). If simplify is True,", "only &, |, and ~ as logical operators. Examples ========", "satisfy the conditions. Examples ======== >>> minterms = [[0, 0,", "===== The ``>>`` and ``<<`` operators are provided as a", "mapping exists, return False. Examples ======== >>> function1 = SOPform([x,", "+ sum(isinstance(ai, Not) for ai in a.args) for ai in", "this class, whereas with True they act bitwise on 1.", "in (True, False): if not arg: continue else: arg =", "measure): return simplify_logic(self) def to_nnf(self, simplify=True): return self._to_nnf(*self.args, simplify=simplify) @classmethod", "3 if newterm not in simplified_terms: simplified_terms.append(newterm) simplified_terms.extend( [terms[i] for", "\"``true is True``\" is ``False``, \"``true == True``\" is ``True``,", "To avoid this issue, use the Diofant boolean types ``true``", "unless we have to if is_cnf(expr): return expr expr =", "result of C. Examples ======== >>> ITE(True, False, True) false", ">> b).equals(~b >> ~a) True >>> Not(And(a, b, c)).equals(And(Not(a), Not(b),", "1], [0, 0, 1, 1], ... [0, 1, 1, 1],", "not robust enough (see issue sympy/sympy#4835) so this is a", "True. Examples ======== >>> x & y x & y", "= sympify(other) if self.has(Relational) or other.has(Relational): raise NotImplementedError('handling of relationals')", "Implies(False, False) true >>> Implies(True, True) true >>> Implies(False, True)", "and False if they are all True. Returns True if", "True if they are all False. Returns False if any", "return And._to_nnf(*args, simplify=simplify) class ITE(BooleanFunction): \"\"\" If then else clause.", "BooleanFunction) def to_int_repr(clauses, symbols): \"\"\" Takes clauses in CNF format", "either case. Finally, for boolean flags, it's better to just", "\"\"\" expr = sympify(expr) if is_nnf(expr, simplify): return expr return", "bitwise on 0. Functions in the logic module will return", "is_Boolean = True def _eval_simplify(self, ratio, measure): return simplify_logic(self) def", "= args return And._to_nnf(Or(a, ~c), Or(~a, ~b), simplify=simplify) raise ValueError(f'Illegal", "i, x in enumerate(f1[k]): matchdict[x] = f2[k][i] return matchdict if", "expected on this class, whereas with True they act bitwise", "return self.atoms() == other.atoms() and \\ not satisfiable(Not(Equivalent(self, other))) class", "return Not(a) def to_nnf(self, simplify=True): a, b, c = self.args", "function. Returns True if an odd number of the arguments", "1 and Not(Symbol) as 2 ] >>> eq = Or(And(Not(y),", "of Sums form. The variables must be given as the", "else: raise NotImplementedError('Sorry, Or.as_set has not yet been' ' implemented", "clauses. Examples ======== >>> is_nnf(a & b | ~c) True", "c)) >>> bool_map(eq1, eq2) ((x & y) | (t &", "..core.function import Application from ..core.numbers import Number from ..core.operations import", ">>> is_literal(a) True >>> is_literal(~a) True >>> is_literal(a + b)", "(for POS). \"\"\" temp = [] for i, m in", "appeared as a Symbol, # of times it appeared as", "x, (i, j) in enumerate(zip(minterm1, minterm2)): if i != j:", "False. Returns False if all arguments are True. Examples ========", "!= j: if index == -1: index = x else:", "raise NotImplementedError('Sorry, Not.as_set has not yet been' ' implemented for", "True \"\"\" return Or.make_args(expr) def distribute_and_over_or(expr): \"\"\" Given a sentence", "must necessarily be ``true`` instead of ``True``, as elements of", "\"\"\" index = -1 for x, (i, j) in enumerate(zip(minterm1,", "it appeared as a Symbol in an And or Or,", "Not(And(Or(x, y), Or(~x, ~y))) ~((x | y) & (~x |", "..core.operations import LatticeOp from ..core.singleton import S from ..core.singleton import", "is that shorthand boolean operations like ~ and >> will", "newterm = ti[:] newterm[index] = 3 if newterm not in", "from ..core.numbers import Number from ..core.operations import LatticeOp from ..core.singleton", "= None newterm = ti[:] newterm[index] = 3 if newterm", "return LessThan(*arg.args) if isinstance(arg, LessThan): return StrictGreaterThan(*arg.args) if isinstance(arg, GreaterThan):", "* 5 for fi in f} for a in eq.args:", "temp.append(variables[i]) return Or(*temp) def _simplified_pairs(terms): \"\"\" Reduces a set of", "are False. Returns False if an even number of the", "cls.args[0].is_Atom: return False elif not isinstance(cls, function2): return False for", "def is_nnf(expr, simplified=True): \"\"\" Checks if expr is in Negation", "when necessary, similar to how you can generally use 1", "SingletonWithManagedProperties as Singleton from ..core.sympify import converter, sympify from ..utilities", "False. Returns False if an even number of the arguments", "true and c == false: return a elif b ==", "True) a | c \"\"\" expr = sympify(expr) if not", "y, y) y \"\"\" @classmethod def eval(cls, *args): try: a,", "integer representation. Examples ======== >>> to_int_repr([x | y, y], [x,", "is different from their normal use in Python, which is", "which is ``-2``, which has a boolean value of True.", "of ``True``, as elements of ``.args`` must be ``Basic``. On", "class Nand(BooleanFunction): \"\"\" Logical NAND function. It evaluates its arguments", ">>> Nand(False, True) true >>> Nand(True, True) false >>> Nand(x,", "Union if len(self.free_symbols) == 1: return Union(*[arg.as_set() for arg in", "false >>> Nor(True, True) false >>> Nor(False, True) false >>>", "s in neg else s for s in args] result.append(Or(*clause))", ">>> simplify_logic(_) ~x & ~y \"\"\" if form == 'cnf'", "instead of ``is`` to do the comparison, and it will", "arguments in order, giving True immediately if any of them", "dictionaries f1 = _finger(function1) f2 = _finger(function2) # more quick", "Or._to_nnf(~a, b, simplify=simplify) class Equivalent(BooleanFunction): \"\"\" Equivalence relation. Equivalent(A, B)", "And(x, y)) >>> eq2 = Or(And(Not(c), a), And(Not(c), d), And(b,", "will be the same as ``1 >> 1``, i.e., 0,", "case of a single expression of function2 if isinstance(expr, function2):", "nc = (~c).canonical if any(r == nc for r in", "======== >>> to_nnf(Not((~a & ~b) | (c & d))) (a", "b, c)).equals(And(Not(a), Not(b), Not(c))) False >>> Not(And(a, Not(a))).equals(Or(b, Not(b))) False", "dontcares): maxterms.append(t) old = None new = maxterms + dontcares", "'cnf' or 'dnf', the simplest expression in the corresponding normal", "from collections import defaultdict from itertools import combinations, product from", "__new__(cls, *args, **options): from ..core.relational import Relational args = [sympify(arg,", ">>> x >> y Implies(x, y) >>> y << x", "return Or._to_nnf(*[~arg for arg in args], simplify=simplify) if func ==", "& ~b)) & (b | ~a | (b & ~a))", "instead of ``True``, as elements of ``.args`` must be ``Basic``.", "class, whereas with False they act bitwise on 0. Functions", "is provided as a convenience, but note that its use", "bool_map(bool1, bool2): \"\"\" Return the simplified version of bool1, and", "is ``False``, \"``true == True``\" is ``True``, so if there", "So y and x have unique fingerprints, but a and", "b)]) True \"\"\" return Or.make_args(expr) def distribute_and_over_or(expr): \"\"\" Given a", "are integers. >>> And(x, y).subs({x: 1}) y \"\"\" zero =", "A.canonical == B.canonical: return true elif (~A).canonical == B.canonical: return", "return true if True in argset: argset.discard(True) return And(*argset) if", "| (c & ~a) \"\"\" return _distribute((expr, Or, And)) def", "class BooleanTrue(BooleanAtom, metaclass=Singleton): \"\"\"Diofant version of True, a singleton that", "false: BooleanFalse = BooleanFalse() # We want S.true and S.false", "the ``.args`` of any expression, then it must necessarily be", "if expr is a literal, else False. Examples ======== >>>", "deep: from ..simplify import simplify variables = [simplify(v) for v", "arguments') if a == true: return b elif a ==", "from ``int``, ``~True`` is the same as ``~1`` which is", "or. Hence, ``Or(a, b)`` and ``a | b`` will return", "class BooleanFunction(Application, Boolean): \"\"\"Boolean function is a function that lives", "use ``True``\". In other words, use ``true`` only on those", "a three-valued logic (``True``, ``False``, ``None``), whereas ``true`` and ``false``", "the same causes some # major issues (like the inability", "or form is None: # pragma: no branch return POSform(variables,", "# major issues (like the inability to import the class", "True, and True if they are all False. Returns False", "(True, False): return false if arg else true if arg.is_Not:", "b >> a)) (a | ~b | (a & ~b))", "is NOT simplified. Examples ======== >>> distribute_or_over_and(And(Or(Not(a), b), c)) (b", "or form == 'dnf' or form is None: expr =", "1, 1, 1], ... [1, 0, 1, 1], [1, 1,", "an And (or an Or) whose arguments are either symbols", "a function from binary to it's variable form (for SOP).", "1, 2, 8): [y]} So y and x have unique", "(-oo, 0] \"\"\" if len(self.free_symbols) == 1: return self.args[0].as_set().complement(S.Reals) else:", "b]) True >>> disjuncts(a & b) == frozenset([And(a, b)]) True", "branch return POSform(variables, truthtable) else: raise ValueError('form can be cnf", "statement is True. Examples ======== >>> Not(True) false >>> Not(False)", "return And._to_nnf(*result, simplify=simplify) if func == ITE: a, b, c", "in the equation: [ # of times it appeared as", "isinstance(expr, Not): return not isinstance(expr.args[0], BooleanFunction) else: return not isinstance(expr,", "all input combos that generate '1' (the minterms) into the", "minterms with one less variable in the terms using QM", "in cls.args: if isinstance(lit, Not): if not lit.args[0].is_Atom: return False", "This means that for the most part, you can just", "an expression is of the required form.\"\"\" expr = sympify(expr)", "which has a truth value of False. To avoid this", "boolean function to its simplified version in SOP or POS", "to find logical predicates in BooleanFunctions. A logical predicate is", "True stack = [expr] while stack: expr = stack.pop() if", "(a | b) & (~c | ~d) >>> to_nnf(Equivalent(a >>", "(a & b) | (b & c) >>> to_dnf((a &", "raise ValueError(f'{d} in minterms is also in dontcares') old =", "false or B == true or B == false: return", "\"\"\"Diofant version of False, a singleton that can be accessed", "args: if isinstance(x, Number) or x in [True, False]: #", "not expr.args[0].is_Atom: return False if not isinstance(expr, function1): return False", "Here, ``(t, z)`` could be ``(a, d)`` or ``(d, a)``:", "essential: essential.append(temporary[0]) for x in terms: for y in essential:", "argument is True. Returns True if all arguments are False.", "one of them is returned. For example, And(x, y) is", "return info[1](*list(map(_distribute, ((info[2](c, rest), info[1], info[2]) for c in conj.args))))", "the comparison, and it will work in either case. Finally,", "redundant pairs, and return the essential arguments. \"\"\" essential =", "Unequality(*arg.args) if isinstance(arg, Unequality): return Equality(*arg.args) if isinstance(arg, StrictLessThan): return", "implemented for multivariate' ' expressions') class Not(BooleanFunction): \"\"\" Logical Not", "1, 1], [1, 0, 1, 1], [1, 1, 1, 1]]", "not an expression is of the required form.\"\"\" expr =", "false >>> false >> false true >>> False >> False", "giving True immediately if any of them are False, and", "b = simplify_logic(bool2) m = match(a, b) if m: return", "return if function1.is_Symbol: return {function1: function2} # get the fingerprint", "~b)) & (b | ~a | (b & ~a)) \"\"\"", "B is equivalent to !A v B Accepts two Boolean", "= set(args) for x in args: if isinstance(x, Number) or", "BooleanFalse = BooleanFalse() # We want S.true and S.false to", "Not is applied only to literals. If simplify is True,", "logical predicates in BooleanFunctions. A logical predicate is defined here", "Examples ======== >>> x & y x & y Notes", "s to disjunctive normal form. That is, of the form", "of the conjuncts in the expr s. Examples ======== >>>", "BooleanFunction that is not a BooleanFunction itself. \"\"\" if not", "are all True. Returns True if any of the arguments", "all input combinations that generate '1' (the minterms) into the", "format and puts them into an integer representation. Examples ========", "symbols or negated symbols. For example, And(x, Not(y), Or(w, Not(z))).", "======== >>> Not(True) false >>> Not(False) true >>> Not(And(True, False))", "ValueError('ITE expects exactly 3 arguments') if a == true: return", "isinstance(arg, GreaterThan): return StrictLessThan(*arg.args) def as_set(self): \"\"\" Rewrite logic operators", "+ 1):]): index = _check_pair(ti, tj) if index != -1:", "~y), {t: a, x: b, y: c, z: d}) >>>", "of them are True, and False if they are all", "in CNF format and puts them into an integer representation.", "frozenset(argset) obj = super().__new__(cls, _args) obj._argset = _args return obj", "defaultdict(list) for k, v in ordered(d.items()): inv[tuple(v)].append(k) return inv def", "CNF form. Examples ======== >>> to_cnf(~(a | b) | c)", "y x & y Notes ===== The ``&`` operator is", "# Special case of an Atom if expr.is_Atom: return True", "example, And(x, Not(y), Or(w, Not(z))). Basic.match is not robust enough", "to_nnf(expr, simplify=True): \"\"\" Converts expr to Negation Normal Form. A", "(~r).canonical)) remove = [] for i, (r, c, nc) in", "base class for And, Or, Not, etc. \"\"\" is_Boolean =", "2) \"\"\" from ..sets import Intersection if len(self.free_symbols) == 1:", "exactly 3 arguments') if a == true: return b elif", "logical sentence s to conjunctive normal form. That is, of", "False) false >>> Implies(False, False) true >>> Implies(True, True) true", "return true elif A.is_Relational and B.is_Relational: if A.canonical == B.canonical:", "Not): return not isinstance(expr.args[0], BooleanFunction) else: return not isinstance(expr, BooleanFunction)", "class Or(LatticeOp, BooleanFunction): \"\"\" Logical OR function It evaluates its", "both False. Returns True if all of the arguments are", "normal form. That is, of the form ((A & ~B", "y) ~(x & y) \"\"\" @classmethod def eval(cls, *args): return", "conjuncts(a & b) == frozenset([a, b]) True >>> conjuncts(a |", "if deep: from ..simplify import simplify variables = [simplify(v) for", "expr if simplify: return simplify_logic(expr, 'cnf', True) # Don't convert", "a function from binary to it's variable form (for POS).", "ai in a.args: if ai.is_Symbol: d[ai][2] += 1 d[ai][-1] +=", "def __hash__(self): return hash(True) def as_set(self): \"\"\" Rewrite logic operators", "Converts a term in the expansion of a function from", "| (z & ~y), {t: a, x: b, y: c,", "in reversed(list(args)): if isinstance(x, Number) or x in (0, 1):", "y >>> ITE(x, y, y) y \"\"\" @classmethod def eval(cls,", "their normal use in Python, which is bit shifts. Hence,", "simplify_logic(_) ~x & ~y \"\"\" if form == 'cnf' or", "ITE(x, y, y) y \"\"\" @classmethod def eval(cls, *args): try:", "======== >>> is_nnf(a & b | ~c) True >>> is_nnf((a", "_rem_redundancy(l1, terms): \"\"\" After the truth table has been sufficiently", "def is_literal(expr): \"\"\" Returns True if expr is a literal,", "pragma: no branch if _compare_term(x, z): assert z not in", "(see issue sympy/sympy#4835) so this is a workaround that is", "new != old: old = new new = _simplified_pairs(old) essential", "== frozenset([And(a, b)]) True \"\"\" return Or.make_args(expr) def distribute_and_over_or(expr): \"\"\"", "class BooleanAtom(Atom, Boolean): \"\"\"Base class of BooleanTrue and BooleanFalse.\"\"\" is_Boolean", "b)`` and ``a & b`` will return different things if", "[0, 1, 0, 1]] >>> SOPform([t, x, y, z], minterms,", "also in dontcares') old = None new = minterms +", "False. Returns False if the statement is True. Examples ========", "flags, it's better to just use ``if x`` instead of", "have the same literals. Examples ======== >>> (a >> b).equals(~b", "isinstance(x, Number) or x in (0, 1): newargs.append(True if x", "c) True >>> is_dnf(a & (b | c)) False \"\"\"", "(z & ~y), {t: a, x: b, y: c, z:", "obj = super().__new__(cls, _args) obj._argset = _args return obj @property", "dontcares = [[0, 0, 0, 0], [0, 0, 1, 0],", "======== BooleanFalse \"\"\" def __bool__(self): return True def __hash__(self): return", "return b elif a == false: return c elif b", "them into an integer representation. Examples ======== >>> to_int_repr([x |", "Nor(False, True) false >>> Nor(False, False) true >>> Nor(x, y)", "remove.append((r, rj)) break for a, b in remove: argset.remove(a) argset.remove(b)", "1, 0, 2): [x], (0, 0, 1, 0, 3): [a,", "B else: return Expr.__new__(cls, *args) def to_nnf(self, simplify=True): a, b", "minterms, dontcares) (y & z) | (z & ~t) References", "= None @classmethod def _new_args_filter(cls, args): newargs = [] rel", "..core.relational import Relational args = [sympify(arg, strict=True) for arg in", "= (~c).canonical if any(r == nc for r in rel):", "True >>> is_dnf(a & b & c) True >>> is_dnf((a", "[] for x in terms: temporary = [] for y", "[0, 0, 1, 0], [0, 1, 0, 1]] >>> SOPform([t,", "or not an expression is of the required form.\"\"\" expr", "rj, cj = rel[j][:2] if cj == nc: odd =", ">> False 1 >>> true >> false false \"\"\" @classmethod", "Or._to_nnf(*[~arg for arg in args], simplify=simplify) if func == Or:", ">>> sympify(False) false >>> false >> false true >>> False", "\"\"\" Logical OR function It evaluates its arguments in order,", "ignored, pass them as a list, too. The result will", "for arg in args]), simplify=simplify) if func == Xor: result", "{expr} return set().union(*(_find_predicates(i) for i in expr.args)) def simplify_logic(expr, form=None,", "__or__ def __invert__(self): \"\"\"Overloading for ~ operator.\"\"\" return Not(self) def", "major issues (like the inability to import the class directly", "| ...) & (B | C | ...) & ...).", "isinstance(cls, Not): if not cls.args[0].is_Atom: return False elif not isinstance(cls,", "function uses simplified_pairs and a redundant group- eliminating algorithm to", "gives the desired outcome. If there are inputs that can", "distribute_or_over_and(expr): \"\"\" Given a sentence s consisting of conjunctions and", "is not robust enough (see issue sympy/sympy#4835) so this is", "_to_nnf(cls, *args, **kwargs): simplify = kwargs.get('simplify', True) argset = set()", "= set() for arg in args: if not is_literal(arg): arg", "1, 0, 1]] >>> SOPform([t, x, y, z], minterms, dontcares)", "| (z & ~t) References ========== * https://en.wikipedia.org/wiki/Quine-McCluskey_algorithm \"\"\" variables", "conj]) return info[1](*list(map(_distribute, ((info[2](c, rest), info[1], info[2]) for c in", "essential.append(z) break return essential def SOPform(variables, minterms, dontcares=None): \"\"\" The", "form == 'cnf' or form == 'dnf' or form is", "and B. Returns False if A is True and B", "Or(And(Not(y), t), And(Not(y), z), And(x, y)) >>> eq2 = Or(And(Not(c),", "\"\"\" def __new__(cls, *args, **kwargs): argset = set() obj =", "from ..sets import Union if len(self.free_symbols) == 1: return Union(*[arg.as_set()", "of times it appeared as a Symbol, # of times", "that its use here is different from its normal use", "possible. By \"simplified\" we mean that a function has been", "..core.expr import Expr from ..core.function import Application from ..core.numbers import", "product([0, 1], repeat=len(variables)): t = list(t) if (t not in", "even number of the arguments are True and the rest", "= true S.false = false converter[bool] = lambda x: true", "number of arguments with which it appeared, counting Symbol as", "same literals. Examples ======== >>> (a >> b).equals(~b >> ~a)", "the simplified version of bool1, and the mapping of variables", "sets. Examples ======== >>> false.as_set() EmptySet() \"\"\" from ..sets import", "arg: if Not(a) in argset: return cls.zero argset.add(a) else: argset.add(arg)", "is in conjunctive normal form. Examples ======== >>> is_cnf(a |", "x: true if x else false class BooleanFunction(Application, Boolean): \"\"\"Boolean", "expr = sympify(expr) if not isinstance(expr, BooleanFunction): return expr variables", "Or(~a, ~b), simplify=simplify) raise ValueError(f'Illegal operator {func} in expression') class", "and a redundant group- eliminating algorithm to convert the list", "Examples ======== >>> Not(True) false >>> Not(False) true >>> Not(And(True,", "\"\"\" essential = [] for x in terms: temporary =", "true >>> Xor(True, True) false >>> Xor(True, False, True, True,", "different things if ``a`` and ``b`` are integers. >>> Or(x,", "1): newargs.append(True if x else False) continue if x.is_Relational: c", "logical And function (i.e., the \"product of sums\" or \"POS\"", "only symbols or negated symbols. For example, And(x, Not(y), Or(w,", "f'(pairs are required): {args!s}') if A == true or A", "| (a & ~b)) & (b | ~a | (b", "else: d[ai.args[0]][3] += 1 d[ai.args[0]][-1] += o inv = defaultdict(list)", "| (~x & ~y & z) >>> simplify_logic(b) ~x &", "+ 1, len(rel)): rj, cj = rel[j][:2] if cj ==", "Nor(True, False) false >>> Nor(True, True) false >>> Nor(False, True)", "Negation Normal Form (NNF) if it contains only And, Or", "logical operators. Examples ======== >>> eliminate_implications(Implies(a, b)) b | ~a", "that gives the desired outcome. If there are inputs that", "simplify=simplify) if func == Xor: result = [] for i", "else false class BooleanFunction(Application, Boolean): \"\"\"Boolean function is a function", "return LatticeOp._new_args_filter(newargs, Or) def as_set(self): \"\"\" Rewrite logic operators and", "different things if ``a`` and ``b`` are integers. >>> And(x,", "``a ^ b`` and ``Xor(a, b)`` will be different if", "if len(temporary) == 1: if temporary[0] not in essential: essential.append(temporary[0])", "(form is None and len(truthtable) >= (2 ** (len(variables) -", "if a == true: return b elif a == false:", "import satisfiable other = sympify(other) if self.has(Relational) or other.has(Relational): raise", "elif b == true and c == false: return a", "argset.remove(a) argset.remove(b) argset.add(True) if len(argset) <= 1: return true if", "is bitwise not. In particular, ``~a`` and ``Not(a)`` will be", "0, evaluate=False).as_set() (-oo, 0] \"\"\" if len(self.free_symbols) == 1: return", "minterms is also in dontcares') old = None new =", "False otherwise. Examples ======== >>> Equivalent(False, False, False) true >>>", "if a binary term is satisfied by the given term.", "d[ai.args[0]][3] += 1 d[ai.args[0]][-1] += o inv = defaultdict(list) for", "a.is_Symbol: d[a][0] += 1 elif a.is_Not: d[a.args[0]][1] += 1 else:", "True, the expr is evaluated to its simplest CNF form.", "and c == false: return a elif b == false", "..utilities import ordered class Boolean(Expr): \"\"\"A boolean object is an", "\"\"\" symbols = dict(zip(symbols, range(1, len(symbols) + 1))) def append_symbol(arg,", "True is True``.\" While \"``true is True``\" is ``False``, \"``true", "i in range(0, len(self.args)+1, 2): for neg in combinations(self.args, i):", "equivalent sentence in CNF. Examples ======== >>> distribute_and_over_or(Or(a, And(Not(b), Not(c))))", "...). If simplify is True, the expr is evaluated to", "we have to if is_cnf(expr): return expr expr = eliminate_implications(expr)", "a list of the disjuncts in the sentence s. Examples", "c in rel: continue nc = (~c).canonical if any(r ==", "~b) | (b & c) | (~b & c), True)", "False if not isinstance(expr, function1): return False for cls in", "not a symbolic operation in Diofant, since it always returns", "return different things if ``a`` and ``b`` are integers. >>>", "0}) y \"\"\" zero = true identity = false @classmethod", "('cnf' or 'dnf') or None (default). If 'cnf' or 'dnf',", "automatically be converted to ``true`` when necessary, similar to how", "checks if function1.__class__ != function2.__class__: return if len(function1.args) != len(function2.args):", "[_ for _ in todo if _ is not None]])", "act bitwise on 1. Functions in the logic module will", "contains no redundant clauses. Examples ======== >>> is_nnf(a & b", "if form == 'dnf' or \\ (form is None and", "false and c == true: return Not(a) def to_nnf(self, simplify=True):", "a truth value of False. To avoid this issue, use", "Or): if simplified: args = expr.args for arg in args:", "[simplify(v) for v in variables] if form == 'dnf' or", "== false: return Or(Not(A), B) elif A == B: return", "expr expr = eliminate_implications(expr) return distribute_and_over_or(expr) def to_dnf(expr, simplify=False): \"\"\"", "ValueError(f'Illegal operator {func} in expression') class Xor(BooleanFunction): \"\"\" Logical XOR", "for i in range(1, len(args)+1, 2): for neg in combinations(args,", "& ~z) >>> simplify_logic(_) ~x & ~y \"\"\" if form", "if there is any doubt over whether a function or", "Returns False if A is True and B is False.", "is the Diofant version of True, for use in the", "in :py:class:`~diofant.logic.boolalg.BooleanTrue`. Examples ======== >>> sympify(False) false >>> false >>", "def match(function1, function2): \"\"\"Return the mapping that equates variables between", "Basic.match is not robust enough (see issue sympy/sympy#4835) so this", "relationals in terms of real sets. Examples ======== >>> Not(x", "-1 return index def _convert_to_varsSOP(minterm, variables): \"\"\" Converts a term", "False, True, False) false >>> x ^ y Xor(x, y)", "b)) b | ~a >>> eliminate_implications(Equivalent(a, b)) (a | ~b)", ">>> is_nnf((a >> b) & (b >> a)) False \"\"\"", "essential = _rem_redundancy(new, maxterms) return And(*[_convert_to_varsPOS(x, variables) for x in", "Test whether or not an expression is in disjunctive normal", "def eval(cls, arg): from ..core import (Equality, GreaterThan, LessThan, StrictGreaterThan,", "And._to_nnf(*result, simplify=simplify) if func == ITE: a, b, c =", "matchdict if matchdict else None a = simplify_logic(bool1) b =", "via ``false``. This is the Diofant version of False, for", "xor. In particular, ``a ^ b`` and ``Xor(a, b)`` will", "find logical predicates in BooleanFunctions. A logical predicate is defined", "The POSform function uses simplified_pairs and a redundant-group eliminating algorithm", "len(self.free_symbols) == 1: return Intersection(*[arg.as_set() for arg in self.args]) else:", "args], simplify=simplify) if func == Or: return And._to_nnf(*[~arg for arg", "= _rem_redundancy(new, minterms) return Or(*[_convert_to_varsSOP(x, variables) for x in essential])", "~y) | (~x & ~y & ~z) >>> simplify_logic(_) ~x", "workaround that is valid for simplified boolean expressions. \"\"\" #", "they are canonical. Here, ``(t, z)`` could be ``(a, d)``", "question can be replaced by an arbitrary symbolic ``Boolean``, like", "super().__new__(cls, *args, **kwargs) for arg in super(Xor, obj).args: if isinstance(arg,", "expr.args: if cls.is_Atom: continue if isinstance(cls, Not): if not cls.args[0].is_Atom:", "& (a | b) & (~a | ~b), {a: a,", "class for And, Or, Not, etc. \"\"\" is_Boolean = True", "been sufficiently simplified, use the prime implicant table method to", "0}) x \"\"\" def __new__(cls, *args, **kwargs): argset = set()", "for arg in args: if not is_literal(arg): arg = arg.to_nnf(simplify)", "satisfying the above rule of thumb, the assumptions system uses", "~a | (b & ~a)) \"\"\" expr = sympify(expr) if", "bool_map(eq1, eq2) ((x & y) | (t & ~y) |", "arg else true if arg.is_Not: return arg.args[0] # Simplify Relational", "is: \"If the boolean in question can be replaced by", "And._to_nnf(*[~arg for arg in args], simplify=simplify) if func == Implies:", "b) == frozenset([a, b]) True >>> conjuncts(a | b) ==", "BooleanFunction itself. \"\"\" if not isinstance(expr, BooleanFunction): return {expr} return", "import S from ..core.singleton import SingletonWithManagedProperties as Singleton from ..core.sympify", "is in Negation Normal Form. A logical expression is in", "_eval_simplify(self, ratio, measure): return simplify_logic(self) def to_nnf(self, simplify=True): return self._to_nnf(*self.args,", "Atom, cacheit from ..core.expr import Expr from ..core.function import Application", "of minterms, if possible, to a simplified set of minterms", "\"\"\" def match(function1, function2): \"\"\"Return the mapping that equates variables", "in the corresponding normal form is returned; if None, the", "GreaterThan(*arg.args) if isinstance(arg, StrictGreaterThan): return LessThan(*arg.args) if isinstance(arg, LessThan): return", "r in rel): return [true] rel.append(c) newargs.append(x) return LatticeOp._new_args_filter(newargs, Or)", "for r in argset: if isinstance(r, Relational): rel.append((r, r.canonical, (~r).canonical))", "[[1, 0, 1], [0, 0, 1]]) >>> function2 = SOPform([a,", "arg, for a in arg: if Not(a) in argset: return", "__rrshift__ = __lshift__ __rlshift__ = __rshift__ def __xor__(self, other): return", "generally use 1 instead of ``Integer(1)``. The rule of thumb", "be equal they must have the same literals. Examples ========", "no redundant clauses. Examples ======== >>> is_nnf(a & b |", "(y & z) | (z & ~t) References ========== *", "3): [a, b], (0, 0, 1, 2, 8): [y]} So", "uses simplified_pairs and a redundant-group eliminating algorithm to convert the", "Not(And(a, Not(a))).equals(Or(b, Not(b))) False \"\"\" from ..core.relational import Relational from", "self.atoms() == other.atoms() and \\ not satisfiable(Not(Equivalent(self, other))) class BooleanAtom(Atom,", "clauses. Examples ======== >>> to_nnf(Not((~a & ~b) | (c &", "= true nargs = None @classmethod def _new_args_filter(cls, args): newargs", "a binary term is satisfied by the given term. Used", "~a >>> eliminate_implications(Equivalent(a, b)) (a | ~b) & (b |", "_args = frozenset(argset) obj = super().__new__(cls, _args) obj._argset = _args", "``(t, z)`` could be ``(a, d)`` or ``(d, a)``: >>>", "Diofant version of False, for use in the logic module.", "len(temporary) == 1: if temporary[0] not in essential: essential.append(temporary[0]) for", "~ and >> will work as expected on this class,", "The result will be one of the (perhaps many) functions", "For two formulas to be equal they must have the", "is an integer. Furthermore, since bools in Python subclass from", "true >>> Nand(True, True) false >>> Nand(x, y) ~(x &", "return False if not isinstance(expr, function1): return False for cls", "false >>> Equivalent(x, And(x, True)) true \"\"\" def __new__(cls, *args,", "conjuncts(expr): \"\"\"Return a list of the conjuncts in the expr", "= tuple(ordered(argset)) obj._argset = frozenset(argset) return obj @property # type:", "s, but has only &, |, and ~ as logical", "======== >>> to_cnf(~(a | b) | c) (c | ~a)", "def _rem_redundancy(l1, terms): \"\"\" After the truth table has been", "SOPform([x, z, y], [[1, 0, 1], [0, 0, 1]]) >>>", "= false @classmethod def _new_args_filter(cls, args): newargs = [] rel", "that ``sympify(True)`` returns ``true``. This means that for the most", "~odd break elif cj == c: break else: continue remove.append((r,", "s. Examples ======== >>> conjuncts(a & b) == frozenset([a, b])", "x: b, y: c, z: d}) >>> eq = And(Xor(a,", "function (i.e., the \"sum of products\" or \"SOP\" form) that", "``-2``, which has a boolean value of True. To avoid", "~x & ~y) | (~x & ~y & ~z) >>>", "simplify_logic(expr, 'cnf', True) # Don't convert unless we have to", "True iff A and B are both True or both", "repeat=len(variables)): t = list(t) if expr.xreplace(dict(zip(variables, t))): truthtable.append(t) if deep:", "| ~a) & (c | ~b) \"\"\" return to_nnf(expr) def", "the other hand, ``==`` is not a symbolic operation in", "here is different from their normal use in Python, which", "and disjunctions of literals, return an equivalent sentence in DNF.", "if index != -1: todo[i] = todo[j_i + i +", "dictionary if possible matchdict = {} for k in f1:", "minterms) into the smallest Product of Sums form. The variables", "is also in dontcares') maxterms = [] for t in", "simplify_logic(bool2) m = match(a, b) if m: return a, m", "variables between two simplified boolean expressions if possible. By \"simplified\"", "argset = set() obj = super().__new__(cls, *args, **kwargs) for arg", "essential = _rem_redundancy(new, minterms) return Or(*[_convert_to_varsSOP(x, variables) for x in", "or B == false: return Or(Not(A), B) elif A ==", "maxterms) return And(*[_convert_to_varsPOS(x, variables) for x in essential]) def _find_predicates(expr):", "false >>> Not(And(And(True, x), Or(x, False))) ~x >>> ~x ~x", "(y & ~z, {y: a, z: b}) The results are", "is an Or or And object in Diofant. Parameters ==========", "with which it appeared, counting Symbol as 1 and Not(Symbol)", "boolean value of True. To avoid this issue, use the", "minterms: raise ValueError(f'{d} in minterms is also in dontcares') maxterms", "``if x is True``. To quote PEP 8: Don't compare", "binary to it's variable form (for SOP). \"\"\" temp =", "(c & d & (a | b) & (~a |", "if isinstance(arg, cls): arg = arg.args else: arg = arg,", "B == true or B == false: return Or(Not(A), B)", "``true``. This means that for the most part, you can", "~c), Or(~a, ~b), simplify=simplify) raise ValueError(f'Illegal operator {func} in expression')", "On the other hand, ``==`` is not a symbolic operation", "the expr is evaluated to its simplest CNF form. Examples", "function has been denested and is either an And (or", "false \"\"\" is_Not = True @classmethod def eval(cls, arg): from", "isinstance(arg, Number) or arg in (True, False): if not arg:", "a, b in remove: argset.remove(a) argset.remove(b) argset.add(True) if len(argset) <=", "# of times it appeared as a Symbol in an", "arg in self.args]) else: raise NotImplementedError('Sorry, Or.as_set has not yet", "return False for cls in expr.args: if cls.is_Atom: continue if", "[0, 1, 0, 1]] >>> POSform([t, x, y, z], minterms,", "if expr.is_Atom: return True # Special case of a single", "~z, {y: a, z: b}) The results are not necessarily", "in enumerate(f1[k]): matchdict[x] = f2[k][i] return matchdict if matchdict else", "Examples ======== >>> eliminate_implications(Implies(a, b)) b | ~a >>> eliminate_implications(Equivalent(a,", "(~x & ~y & z) >>> simplify_logic(b) ~x & ~y", "arguments in order, giving False immediately if any of them", "in Negation Normal Form (NNF) if it contains only And,", "and b do not. \"\"\" f = eq.free_symbols d =", "term is satisfied by the given term. Used for recognizing", "__xor__(self, other): return Xor(self, other) __rxor__ = __xor__ def equals(self,", "implemented for mutivariate' ' expressions') def to_nnf(self, simplify=True): if is_literal(self):", "return essential def SOPform(variables, minterms, dontcares=None): \"\"\" The SOPform function", "primary advantage of using true instead of True is that", "y Implies(x, y) >>> y << x Implies(x, y) Notes", "0 -> False remove = [] for i, (r, c,", "[] for x in reversed(list(args)): if isinstance(x, Number) or x", "1], [0, 0, 1]]) >>> function2 = SOPform([a, b, c],", "b) & (b >> a)) False \"\"\" expr = sympify(expr)", "the form ((A | ~B | ...) & (B |", "1]]) >>> function2 = SOPform([a, b, c], [[1, 0, 1],", "Examples ======== >>> b = (~x & ~y & ~z)", "temporary[0] not in essential: essential.append(temporary[0]) for x in terms: for", "i): clause = [~s if s in neg else s", "y, z], minterms, dontcares) (y & z) | (z &", "__ror__ = __or__ def __invert__(self): \"\"\"Overloading for ~ operator.\"\"\" return", "set of minterms, if possible, to a simplified set of", "and ``Not(a)`` will be different if ``a`` is an integer.", "0, 1], [1, 0, 0]]) >>> bool_map(function1, function2) (y &", ">>> conjuncts(a & b) == frozenset([a, b]) True >>> conjuncts(a", "return this class when they evaluate to true. Notes =====", "args return And._to_nnf(a, ~b, simplify=simplify) if func == Equivalent: return", "OR) function. Returns True if an odd number of the", "operator {func} in expression') class Xor(BooleanFunction): \"\"\" Logical XOR (exclusive", "minterms, dontcares) z & (y | ~t) References ========== *", "other) def __lshift__(self, other): \"\"\"Overloading for << operator.\"\"\" return Implies(other,", "used as base class for And, Or, Not, etc. \"\"\"", "expr.func, expr.args if func == And: return Or._to_nnf(*[~arg for arg", "else: raise NotImplementedError('Sorry, And.as_set has not yet been' ' implemented", "===== The ``&`` operator is provided as a convenience, but", "b, c = args return And._to_nnf(Or(a, ~c), Or(~a, ~b), simplify=simplify)", "form == 'dnf' or form is None: expr = sympify(expr)", "(z & ~x & ~y) | (~x & ~y &", "disjunctions of literals, return an equivalent sentence in CNF. Examples", "simplify=True): if is_literal(self): return self expr = self.args[0] func, args", "And(Not(b), Not(c)))) (a | ~b) & (a | ~c) \"\"\"", "(t not in dontcares): maxterms.append(t) old = None new =", "if isinstance(lit, Not): if not lit.args[0].is_Atom: return False else: if", "Also ======== BooleanTrue \"\"\" def __bool__(self): return False def __hash__(self):", "= frozenset(argset) return obj @property # type: ignore[misc] @cacheit def", ">>> dontcares = [[0, 0, 0, 0], [0, 0, 1,", "return true elif (~A).canonical == B.canonical: return B else: return", "~b), simplify=simplify) raise ValueError(f'Illegal operator {func} in expression') class Xor(BooleanFunction):", "0, 1, 2, 8): [y]} So y and x have", "result = [] for i in range(1, len(args)+1, 2): for", ">>> eq = Or(And(Not(y), a), And(Not(y), b), And(x, y)) >>>", "False if all arguments are True. Examples ======== >>> Nand(False,", "0, 0]]) >>> bool_map(function1, function2) (y & ~z, {y: a,", "else: for z in l1: # pragma: no branch if", "~c) \"\"\" return _distribute((expr, And, Or)) def distribute_or_over_and(expr): \"\"\" Given", "expr variables = _find_predicates(expr) truthtable = [] for t in", "1))): return SOPform(variables, truthtable) elif form == 'cnf' or form", "x | y x | y Notes ===== The ``|``", "system uses a three-valued logic (``True``, ``False``, ``None``), whereas ``true``", "temp.append(variables[i]) return And(*temp) def _convert_to_varsPOS(maxterm, variables): \"\"\" Converts a term", "of function2 if isinstance(expr, function2): for lit in expr.args: if", "if the given formulas have the same truth table. For", "[0, 1, 1, 1], [1, 0, 1, 1], [1, 1,", "y) x >>> ITE(False, x, y) y >>> ITE(x, y,", "B: return true elif A.is_Relational and B.is_Relational: if A.canonical ==", "than mathematical, so it should return ``True``. The assumptions system", "or not an expression is in conjunctive normal form. Examples", "A implies B is equivalent to !A v B Accepts", "deep : boolean (default True) indicates whether to recursively simplify", "\"\"\" Logical NOR function. It evaluates its arguments in order,", "2): [x], (0, 0, 1, 0, 3): [a, b], (0,", "index == -1: index = x else: return -1 return", "relationals in terms of real sets. Examples ======== >>> And(x" ]
[ "and int(each_count) > 0: if int(each_count) % 10 == 0:", "goods_data.drop_duplicates(subset=['r','评价数量'],inplace=True) for each_asin, each_count in zip(goods_data['ASIN'][5:50], goods_data['goods_review_count'][5:50]): if each_asin and", "each_view in view_con: # 评价人 view_name = each_view.xpath('.//span[@class=\"a-profile-name\"]/text()')[0] view_star_raw =", "view_date, view_colour, view_size, view_body, view_useful] self.view_list.append(each_view_list) # print(self.view_list[-1]) def run(self,", "goods_data['goods_review_count'][5:50]): if each_asin and int(each_count) > 0: if int(each_count) %", "self.s.get(url, headers=self.headers, proxies=self.proxies) if res.status_code != 200: print(\"请求出错,状态码为:%s\" % res.status_code)", "')[0] if view_useful == 'one': view_useful = 1 try: view_useful", "for page in range(1, end_page): if page == 1: url", "self.url_queue.empty()] for each in review_threads: each.start() print(\"队列剩余数量\", self.url_queue.qsize()) for each", "view_useful] self.view_list.append(each_view_list) # print(self.view_list[-1]) def run(self, data): goods_data = pd.read_excel(data,", "zip(goods_data['ASIN'][5:50], goods_data['goods_review_count'][5:50]): if each_asin and int(each_count) > 0: if int(each_count)", "view_useful_raw = each_view.xpath('.//span[@data-hook=\"helpful-vote-statement\"]/text()')[0] view_useful = view_useful_raw.split(' ')[0] if view_useful ==", "review aok-relative\"]') for each_view in view_con: # 评价人 view_name =", "each.start() print(\"队列剩余数量\", self.url_queue.qsize()) for each in review_threads: each.join() except: print(\"请求链接出错,重试中...\")", "= each.split(\":\")[1].strip() except: pass # 评价内容 view_body = each_view.xpath('string(.//span[@data-hook=\"review-body\"]/span)') #", "self.s.get(url=self.row_url, headers=self.headers, proxies=self.proxies) def get_review(self, url): res = self.s.get(url, headers=self.headers,", "Queue import threading class Review: headers = { \"User-Agent\": \"Mozilla/5.0", "in review_threads: each.join() except: print(\"请求链接出错,重试中...\") pass time.sleep(random.uniform(0.5,2.1)) if self.url_queue.empty(): break", "view_size = each.split(\":\")[1].strip() except: pass # 评价内容 view_body = each_view.xpath('string(.//span[@data-hook=\"review-body\"]/span)')", "\"/product-reviews/\" # goods_data.drop_duplicates(subset=['r','评价数量'],inplace=True) for each_asin, each_count in zip(goods_data['ASIN'][5:50], goods_data['goods_review_count'][5:50]): if", "pandas as pd import requests from lxml import etree import", "self.s = requests.Session() self.s.get(url=self.row_url, headers=self.headers, proxies=self.proxies) def get_review(self, url): res", "each.split(\":\")[1].strip() except: pass # 评价内容 view_body = each_view.xpath('string(.//span[@data-hook=\"review-body\"]/span)') # 评价有用数量", "int(each_count) > 0: if int(each_count) % 10 == 0: end_page", "<gh_stars>1-10 import pandas as pd import requests from lxml import", "# 评价星级 view_star = view_star_raw.split(' ')[0] # 评价title view_title =", "each.join() except: print(\"请求链接出错,重试中...\") pass time.sleep(random.uniform(0.5,2.1)) if self.url_queue.empty(): break view_goods_pd =", "in view_format: if re.search(\"color|colour|色\", each, re.I): view_colour = each.split(':')[1].strip() if", "pd.read_excel(data, encoding='utf-8') base_url = self.row_url + \"/product-reviews/\" # goods_data.drop_duplicates(subset=['r','评价数量'],inplace=True) for", "+ each_asin + '?pageNumber=' + str(page) self.url_queue.put(url) print(\"review_page_%d\" % page,", "proxies=self.proxies) if res.status_code != 200: print(\"请求出错,状态码为:%s\" % res.status_code) print(res.text) return", "res = self.s.get(url, headers=self.headers, proxies=self.proxies) if res.status_code != 200: print(\"请求出错,状态码为:%s\"", "AppleWebKit/537.36 \\ (KHTML, like Gecko) Chrome/69.0.3497.81 Safari/537.36\" } proxies =", "# 评价人 view_name = each_view.xpath('.//span[@class=\"a-profile-name\"]/text()')[0] view_star_raw = each_view.xpath('.//div[@class=\"a-row\"]/a[@class=\"a-link-normal\"]/@title')[0] # 评价星级", "end_page = int(each_count) // 10 + 1 else: end_page =", "\"http\": \"http://172.16.17.32:9999\", } def __init__(self, domain): self.view_list = [] self.page_list", "\"reviews_\" + aft + \".xlsx\" view_goods_pd.to_excel(file_name, encoding='utf-8', engine='xlsxwriter') print(\"共获取评论数量:\", len(self.view_list))", "import threading class Review: headers = { \"User-Agent\": \"Mozilla/5.0 (Windows", "view_star_raw = each_view.xpath('.//div[@class=\"a-row\"]/a[@class=\"a-link-normal\"]/@title')[0] # 评价星级 view_star = view_star_raw.split(' ')[0] #", "queue import Queue import threading class Review: headers = {", "'review_body', 'review_useful']) view_goods_pd.drop_duplicates(subset=['review_name', 'review_date','review_body'], inplace=True) aft = datetime.datetime.now().strftime('%m%d%H%M') file_name =", "0 # 商品的评价信息表 each_view_list = [view_goods, view_name, view_star, view_title, view_date,", "# 商品评价名称 view_goods = res_html.xpath('//span[@class=\"a-list-item\"]/a/text()')[0] # 商品评价容器 view_con = res_html.xpath('//div[@class=\"a-section", "each_asin else: url = base_url + each_asin + '?pageNumber=' +", "try: for each in view_format: if re.search(\"color|colour|色\", each, re.I): view_colour", "headers=self.headers, proxies=self.proxies) if res.status_code != 200: print(\"请求出错,状态码为:%s\" % res.status_code) print(res.text)", "class Review: headers = { \"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0;", "print(\"请求出错,状态码为:%s\" % res.status_code) print(res.text) return res_html = etree.HTML(res.text) # 商品评价名称", "# print(self.view_list[-1]) def run(self, data): goods_data = pd.read_excel(data, encoding='utf-8') base_url", "= requests.Session() self.s.get(url=self.row_url, headers=self.headers, proxies=self.proxies) def get_review(self, url): res =", "each.split(':')[1].strip() if re.search(\"size|style|サイズ\", each, re.I): view_size = each.split(\":\")[1].strip() except: pass", "if re.search(\"color|colour|色\", each, re.I): view_colour = each.split(':')[1].strip() if re.search(\"size|style|サイズ\", each,", "= view_star_raw.split(' ')[0] # 评价title view_title = each_view.xpath('.//a[@data-hook=\"review-title\"]/span/text()')[0] # 评价日期", "__init__(self, domain): self.view_list = [] self.page_list = [] self.url_queue =", "= view_useful_raw.split(' ')[0] if view_useful == 'one': view_useful = 1", "评价有用数量 try: view_useful_raw = each_view.xpath('.//span[@data-hook=\"helpful-vote-statement\"]/text()')[0] view_useful = view_useful_raw.split(' ')[0] if", "if re.search(\"size|style|サイズ\", each, re.I): view_size = each.split(\":\")[1].strip() except: pass #", "\".xlsx\" view_goods_pd.to_excel(file_name, encoding='utf-8', engine='xlsxwriter') print(\"共获取评论数量:\", len(self.view_list)) if __name__ == '__main__':", "# 评价日期 view_date = each_view.xpath('.//span[@data-hook=\"review-date\"]/text()')[0] view_format = each_view.xpath('.//a[@data-hook=\"format-strip\"]/text()') view_colour =", "def get_review(self, url): res = self.s.get(url, headers=self.headers, proxies=self.proxies) if res.status_code", "x64) AppleWebKit/537.36 \\ (KHTML, like Gecko) Chrome/69.0.3497.81 Safari/537.36\" } proxies", "for each in view_format: if re.search(\"color|colour|色\", each, re.I): view_colour =", "== 'com': self.row_url = \"https://www.amazon.com\" self.s = requests.Session() self.s.get(url=self.row_url, headers=self.headers,", "= [] self.url_queue = Queue() if domain.strip().lower() == 'jp': self.row_url", "'review_title', 'review_date', 'review_colour', 'review_size', 'review_body', 'review_useful']) view_goods_pd.drop_duplicates(subset=['review_name', 'review_date','review_body'], inplace=True) aft", "pass time.sleep(random.uniform(0.5,2.1)) if self.url_queue.empty(): break view_goods_pd = pd.DataFrame(self.view_list, columns=['review_goods', 'review_name',", "\"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 \\ (KHTML, like", "% res.status_code) print(res.text) return res_html = etree.HTML(res.text) # 商品评价名称 view_goods", "None view_size = None try: for each in view_format: if", "= \"https://www.amazon.co.jp\" elif domain.strip().lower == 'com': self.row_url = \"https://www.amazon.com\" self.s", "view_star_raw.split(' ')[0] # 评价title view_title = each_view.xpath('.//a[@data-hook=\"review-title\"]/span/text()')[0] # 评价日期 view_date", "try: review_threads = [threading.Thread(target=self.get_review, args=(self.url_queue.get(),)) for m in range(30) if", "while True: try: review_threads = [threading.Thread(target=self.get_review, args=(self.url_queue.get(),)) for m in", "(KHTML, like Gecko) Chrome/69.0.3497.81 Safari/537.36\" } proxies = { \"http\":", "商品评价容器 view_con = res_html.xpath('//div[@class=\"a-section review aok-relative\"]') for each_view in view_con:", "None try: for each in view_format: if re.search(\"color|colour|色\", each, re.I):", "view_goods_pd.to_excel(file_name, encoding='utf-8', engine='xlsxwriter') print(\"共获取评论数量:\", len(self.view_list)) if __name__ == '__main__': data", "!= 200: print(\"请求出错,状态码为:%s\" % res.status_code) print(res.text) return res_html = etree.HTML(res.text)", "== 'jp': self.row_url = \"https://www.amazon.co.jp\" elif domain.strip().lower == 'com': self.row_url", "each_view.xpath('.//a[@data-hook=\"review-title\"]/span/text()')[0] # 评价日期 view_date = each_view.xpath('.//span[@data-hook=\"review-date\"]/text()')[0] view_format = each_view.xpath('.//a[@data-hook=\"format-strip\"]/text()') view_colour", "1 try: view_useful = int(view_useful) except: pass except: view_useful =", "each in review_threads: each.start() print(\"队列剩余数量\", self.url_queue.qsize()) for each in review_threads:", "aft = datetime.datetime.now().strftime('%m%d%H%M') file_name = r'../data/goods_review/' + \"reviews_\" + aft", "m in range(30) if not self.url_queue.empty()] for each in review_threads:", "view_body, view_useful] self.view_list.append(each_view_list) # print(self.view_list[-1]) def run(self, data): goods_data =", "= each_view.xpath('string(.//span[@data-hook=\"review-body\"]/span)') # 评价有用数量 try: view_useful_raw = each_view.xpath('.//span[@data-hook=\"helpful-vote-statement\"]/text()')[0] view_useful =", "not self.url_queue.empty()] for each in review_threads: each.start() print(\"队列剩余数量\", self.url_queue.qsize()) for", "import pandas as pd import requests from lxml import etree", "base_url = self.row_url + \"/product-reviews/\" # goods_data.drop_duplicates(subset=['r','评价数量'],inplace=True) for each_asin, each_count", "评价内容 view_body = each_view.xpath('string(.//span[@data-hook=\"review-body\"]/span)') # 评价有用数量 try: view_useful_raw = each_view.xpath('.//span[@data-hook=\"helpful-vote-statement\"]/text()')[0]", "review_threads: each.join() except: print(\"请求链接出错,重试中...\") pass time.sleep(random.uniform(0.5,2.1)) if self.url_queue.empty(): break view_goods_pd", "etree import re, time, random, datetime from queue import Queue", "review_threads: each.start() print(\"队列剩余数量\", self.url_queue.qsize()) for each in review_threads: each.join() except:", "Gecko) Chrome/69.0.3497.81 Safari/537.36\" } proxies = { \"http\": \"http://172.16.17.32:9999\", }", "2 for page in range(1, end_page): if page == 1:", "'review_colour', 'review_size', 'review_body', 'review_useful']) view_goods_pd.drop_duplicates(subset=['review_name', 'review_date','review_body'], inplace=True) aft = datetime.datetime.now().strftime('%m%d%H%M')", "+ \".xlsx\" view_goods_pd.to_excel(file_name, encoding='utf-8', engine='xlsxwriter') print(\"共获取评论数量:\", len(self.view_list)) if __name__ ==", "headers = { \"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64)", "goods_data = pd.read_excel(data, encoding='utf-8') base_url = self.row_url + \"/product-reviews/\" #", "args=(self.url_queue.get(),)) for m in range(30) if not self.url_queue.empty()] for each", "each_view.xpath('.//span[@data-hook=\"review-date\"]/text()')[0] view_format = each_view.xpath('.//a[@data-hook=\"format-strip\"]/text()') view_colour = None view_size = None", "= res_html.xpath('//span[@class=\"a-list-item\"]/a/text()')[0] # 商品评价容器 view_con = res_html.xpath('//div[@class=\"a-section review aok-relative\"]') for", "__name__ == '__main__': data = r\"../data/category/Kid's Weighted Blankets_08_28_13_22.xlsx\" review =", "{ \"http\": \"http://172.16.17.32:9999\", } def __init__(self, domain): self.view_list = []", "lxml import etree import re, time, random, datetime from queue", "import re, time, random, datetime from queue import Queue import", "print(\"review_page_%d\" % page, url) time.sleep(1.5) while True: try: review_threads =", "domain.strip().lower == 'com': self.row_url = \"https://www.amazon.com\" self.s = requests.Session() self.s.get(url=self.row_url,", "Queue() if domain.strip().lower() == 'jp': self.row_url = \"https://www.amazon.co.jp\" elif domain.strip().lower", "for each_asin, each_count in zip(goods_data['ASIN'][5:50], goods_data['goods_review_count'][5:50]): if each_asin and int(each_count)", "view_size = None try: for each in view_format: if re.search(\"color|colour|色\",", "print(\"共获取评论数量:\", len(self.view_list)) if __name__ == '__main__': data = r\"../data/category/Kid's Weighted", "aok-relative\"]') for each_view in view_con: # 评价人 view_name = each_view.xpath('.//span[@class=\"a-profile-name\"]/text()')[0]", "+ 1 else: end_page = int(each_count) // 10 + 2", "import Queue import threading class Review: headers = { \"User-Agent\":", "view_star, view_title, view_date, view_colour, view_size, view_body, view_useful] self.view_list.append(each_view_list) # print(self.view_list[-1])", "return res_html = etree.HTML(res.text) # 商品评价名称 view_goods = res_html.xpath('//span[@class=\"a-list-item\"]/a/text()')[0] #", "base_url + each_asin + '?pageNumber=' + str(page) self.url_queue.put(url) print(\"review_page_%d\" %", "10 + 1 else: end_page = int(each_count) // 10 +", "[] self.url_queue = Queue() if domain.strip().lower() == 'jp': self.row_url =", "each_view.xpath('.//a[@data-hook=\"format-strip\"]/text()') view_colour = None view_size = None try: for each", "+ 2 for page in range(1, end_page): if page ==", "= [threading.Thread(target=self.get_review, args=(self.url_queue.get(),)) for m in range(30) if not self.url_queue.empty()]", "data): goods_data = pd.read_excel(data, encoding='utf-8') base_url = self.row_url + \"/product-reviews/\"", "range(30) if not self.url_queue.empty()] for each in review_threads: each.start() print(\"队列剩余数量\",", "view_useful = int(view_useful) except: pass except: view_useful = 0 #", "each, re.I): view_size = each.split(\":\")[1].strip() except: pass # 评价内容 view_body", "view_format = each_view.xpath('.//a[@data-hook=\"format-strip\"]/text()') view_colour = None view_size = None try:", "+ \"/product-reviews/\" # goods_data.drop_duplicates(subset=['r','评价数量'],inplace=True) for each_asin, each_count in zip(goods_data['ASIN'][5:50], goods_data['goods_review_count'][5:50]):", "10 == 0: end_page = int(each_count) // 10 + 1", "random, datetime from queue import Queue import threading class Review:", "= each_view.xpath('.//div[@class=\"a-row\"]/a[@class=\"a-link-normal\"]/@title')[0] # 评价星级 view_star = view_star_raw.split(' ')[0] # 评价title", "= each.split(':')[1].strip() if re.search(\"size|style|サイズ\", each, re.I): view_size = each.split(\":\")[1].strip() except:", "end_page = int(each_count) // 10 + 2 for page in", "view_useful == 'one': view_useful = 1 try: view_useful = int(view_useful)", "== 1: url = base_url + each_asin else: url =", "view_useful = view_useful_raw.split(' ')[0] if view_useful == 'one': view_useful =", "== '__main__': data = r\"../data/category/Kid's Weighted Blankets_08_28_13_22.xlsx\" review = Review(domain='com')", "else: end_page = int(each_count) // 10 + 2 for page", "elif domain.strip().lower == 'com': self.row_url = \"https://www.amazon.com\" self.s = requests.Session()", "10 + 2 for page in range(1, end_page): if page", "+ str(page) self.url_queue.put(url) print(\"review_page_%d\" % page, url) time.sleep(1.5) while True:", "% page, url) time.sleep(1.5) while True: try: review_threads = [threading.Thread(target=self.get_review,", "view_size, view_body, view_useful] self.view_list.append(each_view_list) # print(self.view_list[-1]) def run(self, data): goods_data", "len(self.view_list)) if __name__ == '__main__': data = r\"../data/category/Kid's Weighted Blankets_08_28_13_22.xlsx\"", "= self.s.get(url, headers=self.headers, proxies=self.proxies) if res.status_code != 200: print(\"请求出错,状态码为:%s\" %", "inplace=True) aft = datetime.datetime.now().strftime('%m%d%H%M') file_name = r'../data/goods_review/' + \"reviews_\" +", "review_threads = [threading.Thread(target=self.get_review, args=(self.url_queue.get(),)) for m in range(30) if not", "view_star = view_star_raw.split(' ')[0] # 评价title view_title = each_view.xpath('.//a[@data-hook=\"review-title\"]/span/text()')[0] #", "view_title, view_date, view_colour, view_size, view_body, view_useful] self.view_list.append(each_view_list) # print(self.view_list[-1]) def", "pd.DataFrame(self.view_list, columns=['review_goods', 'review_name', 'review_star', 'review_title', 'review_date', 'review_colour', 'review_size', 'review_body', 'review_useful'])", "view_useful_raw.split(' ')[0] if view_useful == 'one': view_useful = 1 try:", "view_con: # 评价人 view_name = each_view.xpath('.//span[@class=\"a-profile-name\"]/text()')[0] view_star_raw = each_view.xpath('.//div[@class=\"a-row\"]/a[@class=\"a-link-normal\"]/@title')[0] #", "= 0 # 商品的评价信息表 each_view_list = [view_goods, view_name, view_star, view_title,", "datetime.datetime.now().strftime('%m%d%H%M') file_name = r'../data/goods_review/' + \"reviews_\" + aft + \".xlsx\"", "view_colour = each.split(':')[1].strip() if re.search(\"size|style|サイズ\", each, re.I): view_size = each.split(\":\")[1].strip()", "} proxies = { \"http\": \"http://172.16.17.32:9999\", } def __init__(self, domain):", "view_useful = 1 try: view_useful = int(view_useful) except: pass except:", "from queue import Queue import threading class Review: headers =", "'review_useful']) view_goods_pd.drop_duplicates(subset=['review_name', 'review_date','review_body'], inplace=True) aft = datetime.datetime.now().strftime('%m%d%H%M') file_name = r'../data/goods_review/'", "for each_view in view_con: # 评价人 view_name = each_view.xpath('.//span[@class=\"a-profile-name\"]/text()')[0] view_star_raw", "view_name, view_star, view_title, view_date, view_colour, view_size, view_body, view_useful] self.view_list.append(each_view_list) #", "= r'../data/goods_review/' + \"reviews_\" + aft + \".xlsx\" view_goods_pd.to_excel(file_name, encoding='utf-8',", "def __init__(self, domain): self.view_list = [] self.page_list = [] self.url_queue", "domain.strip().lower() == 'jp': self.row_url = \"https://www.amazon.co.jp\" elif domain.strip().lower == 'com':", "[threading.Thread(target=self.get_review, args=(self.url_queue.get(),)) for m in range(30) if not self.url_queue.empty()] for", "商品评价名称 view_goods = res_html.xpath('//span[@class=\"a-list-item\"]/a/text()')[0] # 商品评价容器 view_con = res_html.xpath('//div[@class=\"a-section review", "time.sleep(random.uniform(0.5,2.1)) if self.url_queue.empty(): break view_goods_pd = pd.DataFrame(self.view_list, columns=['review_goods', 'review_name', 'review_star',", "page, url) time.sleep(1.5) while True: try: review_threads = [threading.Thread(target=self.get_review, args=(self.url_queue.get(),))", "# goods_data.drop_duplicates(subset=['r','评价数量'],inplace=True) for each_asin, each_count in zip(goods_data['ASIN'][5:50], goods_data['goods_review_count'][5:50]): if each_asin", "= None try: for each in view_format: if re.search(\"color|colour|色\", each,", "if self.url_queue.empty(): break view_goods_pd = pd.DataFrame(self.view_list, columns=['review_goods', 'review_name', 'review_star', 'review_title',", "self.url_queue = Queue() if domain.strip().lower() == 'jp': self.row_url = \"https://www.amazon.co.jp\"", "re.search(\"size|style|サイズ\", each, re.I): view_size = each.split(\":\")[1].strip() except: pass # 评价内容", "url) time.sleep(1.5) while True: try: review_threads = [threading.Thread(target=self.get_review, args=(self.url_queue.get(),)) for", "r'../data/goods_review/' + \"reviews_\" + aft + \".xlsx\" view_goods_pd.to_excel(file_name, encoding='utf-8', engine='xlsxwriter')", "if page == 1: url = base_url + each_asin else:", "each_count in zip(goods_data['ASIN'][5:50], goods_data['goods_review_count'][5:50]): if each_asin and int(each_count) > 0:", "view_name = each_view.xpath('.//span[@class=\"a-profile-name\"]/text()')[0] view_star_raw = each_view.xpath('.//div[@class=\"a-row\"]/a[@class=\"a-link-normal\"]/@title')[0] # 评价星级 view_star =", "self.view_list = [] self.page_list = [] self.url_queue = Queue() if", "评价人 view_name = each_view.xpath('.//span[@class=\"a-profile-name\"]/text()')[0] view_star_raw = each_view.xpath('.//div[@class=\"a-row\"]/a[@class=\"a-link-normal\"]/@title')[0] # 评价星级 view_star", "time, random, datetime from queue import Queue import threading class", "if not self.url_queue.empty()] for each in review_threads: each.start() print(\"队列剩余数量\", self.url_queue.qsize())", "for m in range(30) if not self.url_queue.empty()] for each in", "= 1 try: view_useful = int(view_useful) except: pass except: view_useful", "columns=['review_goods', 'review_name', 'review_star', 'review_title', 'review_date', 'review_colour', 'review_size', 'review_body', 'review_useful']) view_goods_pd.drop_duplicates(subset=['review_name',", "if view_useful == 'one': view_useful = 1 try: view_useful =", "# 评价title view_title = each_view.xpath('.//a[@data-hook=\"review-title\"]/span/text()')[0] # 评价日期 view_date = each_view.xpath('.//span[@data-hook=\"review-date\"]/text()')[0]", "print(\"队列剩余数量\", self.url_queue.qsize()) for each in review_threads: each.join() except: print(\"请求链接出错,重试中...\") pass", "requests from lxml import etree import re, time, random, datetime", "except: print(\"请求链接出错,重试中...\") pass time.sleep(random.uniform(0.5,2.1)) if self.url_queue.empty(): break view_goods_pd = pd.DataFrame(self.view_list,", "+ aft + \".xlsx\" view_goods_pd.to_excel(file_name, encoding='utf-8', engine='xlsxwriter') print(\"共获取评论数量:\", len(self.view_list)) if", "= res_html.xpath('//div[@class=\"a-section review aok-relative\"]') for each_view in view_con: # 评价人", "= None view_size = None try: for each in view_format:", "for each in review_threads: each.join() except: print(\"请求链接出错,重试中...\") pass time.sleep(random.uniform(0.5,2.1)) if", "+ each_asin else: url = base_url + each_asin + '?pageNumber='", "encoding='utf-8', engine='xlsxwriter') print(\"共获取评论数量:\", len(self.view_list)) if __name__ == '__main__': data =", "# 评价有用数量 try: view_useful_raw = each_view.xpath('.//span[@data-hook=\"helpful-vote-statement\"]/text()')[0] view_useful = view_useful_raw.split(' ')[0]", "domain): self.view_list = [] self.page_list = [] self.url_queue = Queue()", "etree.HTML(res.text) # 商品评价名称 view_goods = res_html.xpath('//span[@class=\"a-list-item\"]/a/text()')[0] # 商品评价容器 view_con =", "= base_url + each_asin + '?pageNumber=' + str(page) self.url_queue.put(url) print(\"review_page_%d\"", "== 0: end_page = int(each_count) // 10 + 1 else:", "if each_asin and int(each_count) > 0: if int(each_count) % 10", "except: view_useful = 0 # 商品的评价信息表 each_view_list = [view_goods, view_name,", "= int(view_useful) except: pass except: view_useful = 0 # 商品的评价信息表", "// 10 + 1 else: end_page = int(each_count) // 10", "'review_star', 'review_title', 'review_date', 'review_colour', 'review_size', 'review_body', 'review_useful']) view_goods_pd.drop_duplicates(subset=['review_name', 'review_date','review_body'], inplace=True)", "proxies = { \"http\": \"http://172.16.17.32:9999\", } def __init__(self, domain): self.view_list", "[view_goods, view_name, view_star, view_title, view_date, view_colour, view_size, view_body, view_useful] self.view_list.append(each_view_list)", "res_html.xpath('//span[@class=\"a-list-item\"]/a/text()')[0] # 商品评价容器 view_con = res_html.xpath('//div[@class=\"a-section review aok-relative\"]') for each_view", "% 10 == 0: end_page = int(each_count) // 10 +", "Win64; x64) AppleWebKit/537.36 \\ (KHTML, like Gecko) Chrome/69.0.3497.81 Safari/537.36\" }", "each_view.xpath('string(.//span[@data-hook=\"review-body\"]/span)') # 评价有用数量 try: view_useful_raw = each_view.xpath('.//span[@data-hook=\"helpful-vote-statement\"]/text()')[0] view_useful = view_useful_raw.split('", "print(res.text) return res_html = etree.HTML(res.text) # 商品评价名称 view_goods = res_html.xpath('//span[@class=\"a-list-item\"]/a/text()')[0]", "view_body = each_view.xpath('string(.//span[@data-hook=\"review-body\"]/span)') # 评价有用数量 try: view_useful_raw = each_view.xpath('.//span[@data-hook=\"helpful-vote-statement\"]/text()')[0] view_useful", "time.sleep(1.5) while True: try: review_threads = [threading.Thread(target=self.get_review, args=(self.url_queue.get(),)) for m", "url = base_url + each_asin else: url = base_url +", "in range(1, end_page): if page == 1: url = base_url", "view_colour = None view_size = None try: for each in", "re.search(\"color|colour|色\", each, re.I): view_colour = each.split(':')[1].strip() if re.search(\"size|style|サイズ\", each, re.I):", "engine='xlsxwriter') print(\"共获取评论数量:\", len(self.view_list)) if __name__ == '__main__': data = r\"../data/category/Kid's", "} def __init__(self, domain): self.view_list = [] self.page_list = []", "self.row_url = \"https://www.amazon.com\" self.s = requests.Session() self.s.get(url=self.row_url, headers=self.headers, proxies=self.proxies) def", "view_colour, view_size, view_body, view_useful] self.view_list.append(each_view_list) # print(self.view_list[-1]) def run(self, data):", "if __name__ == '__main__': data = r\"../data/category/Kid's Weighted Blankets_08_28_13_22.xlsx\" review", "'jp': self.row_url = \"https://www.amazon.co.jp\" elif domain.strip().lower == 'com': self.row_url =", "\\ (KHTML, like Gecko) Chrome/69.0.3497.81 Safari/537.36\" } proxies = {", "Chrome/69.0.3497.81 Safari/537.36\" } proxies = { \"http\": \"http://172.16.17.32:9999\", } def", "view_title = each_view.xpath('.//a[@data-hook=\"review-title\"]/span/text()')[0] # 评价日期 view_date = each_view.xpath('.//span[@data-hook=\"review-date\"]/text()')[0] view_format =", "in zip(goods_data['ASIN'][5:50], goods_data['goods_review_count'][5:50]): if each_asin and int(each_count) > 0: if", "= Queue() if domain.strip().lower() == 'jp': self.row_url = \"https://www.amazon.co.jp\" elif", "threading class Review: headers = { \"User-Agent\": \"Mozilla/5.0 (Windows NT", "view_goods_pd = pd.DataFrame(self.view_list, columns=['review_goods', 'review_name', 'review_star', 'review_title', 'review_date', 'review_colour', 'review_size',", "Review: headers = { \"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64;", "res_html.xpath('//div[@class=\"a-section review aok-relative\"]') for each_view in view_con: # 评价人 view_name", "= pd.read_excel(data, encoding='utf-8') base_url = self.row_url + \"/product-reviews/\" # goods_data.drop_duplicates(subset=['r','评价数量'],inplace=True)", "pass except: view_useful = 0 # 商品的评价信息表 each_view_list = [view_goods,", "each_asin, each_count in zip(goods_data['ASIN'][5:50], goods_data['goods_review_count'][5:50]): if each_asin and int(each_count) >", "as pd import requests from lxml import etree import re,", "each_view.xpath('.//span[@class=\"a-profile-name\"]/text()')[0] view_star_raw = each_view.xpath('.//div[@class=\"a-row\"]/a[@class=\"a-link-normal\"]/@title')[0] # 评价星级 view_star = view_star_raw.split(' ')[0]", "int(view_useful) except: pass except: view_useful = 0 # 商品的评价信息表 each_view_list", "headers=self.headers, proxies=self.proxies) def get_review(self, url): res = self.s.get(url, headers=self.headers, proxies=self.proxies)", "= each_view.xpath('.//a[@data-hook=\"format-strip\"]/text()') view_colour = None view_size = None try: for", "# 商品评价容器 view_con = res_html.xpath('//div[@class=\"a-section review aok-relative\"]') for each_view in", "+ \"reviews_\" + aft + \".xlsx\" view_goods_pd.to_excel(file_name, encoding='utf-8', engine='xlsxwriter') print(\"共获取评论数量:\",", "run(self, data): goods_data = pd.read_excel(data, encoding='utf-8') base_url = self.row_url +", "pass # 评价内容 view_body = each_view.xpath('string(.//span[@data-hook=\"review-body\"]/span)') # 评价有用数量 try: view_useful_raw", "= \"https://www.amazon.com\" self.s = requests.Session() self.s.get(url=self.row_url, headers=self.headers, proxies=self.proxies) def get_review(self,", "re, time, random, datetime from queue import Queue import threading", "get_review(self, url): res = self.s.get(url, headers=self.headers, proxies=self.proxies) if res.status_code !=", "self.view_list.append(each_view_list) # print(self.view_list[-1]) def run(self, data): goods_data = pd.read_excel(data, encoding='utf-8')", "print(self.view_list[-1]) def run(self, data): goods_data = pd.read_excel(data, encoding='utf-8') base_url =", "// 10 + 2 for page in range(1, end_page): if", "int(each_count) // 10 + 2 for page in range(1, end_page):", "# 商品的评价信息表 each_view_list = [view_goods, view_name, view_star, view_title, view_date, view_colour,", "each in review_threads: each.join() except: print(\"请求链接出错,重试中...\") pass time.sleep(random.uniform(0.5,2.1)) if self.url_queue.empty():", "'review_date','review_body'], inplace=True) aft = datetime.datetime.now().strftime('%m%d%H%M') file_name = r'../data/goods_review/' + \"reviews_\"", "except: pass # 评价内容 view_body = each_view.xpath('string(.//span[@data-hook=\"review-body\"]/span)') # 评价有用数量 try:", "pd import requests from lxml import etree import re, time,", "int(each_count) % 10 == 0: end_page = int(each_count) // 10", "break view_goods_pd = pd.DataFrame(self.view_list, columns=['review_goods', 'review_name', 'review_star', 'review_title', 'review_date', 'review_colour',", "= { \"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36", "商品的评价信息表 each_view_list = [view_goods, view_name, view_star, view_title, view_date, view_colour, view_size,", "self.row_url = \"https://www.amazon.co.jp\" elif domain.strip().lower == 'com': self.row_url = \"https://www.amazon.com\"", "self.url_queue.qsize()) for each in review_threads: each.join() except: print(\"请求链接出错,重试中...\") pass time.sleep(random.uniform(0.5,2.1))", "+ '?pageNumber=' + str(page) self.url_queue.put(url) print(\"review_page_%d\" % page, url) time.sleep(1.5)", "Safari/537.36\" } proxies = { \"http\": \"http://172.16.17.32:9999\", } def __init__(self,", "'review_name', 'review_star', 'review_title', 'review_date', 'review_colour', 'review_size', 'review_body', 'review_useful']) view_goods_pd.drop_duplicates(subset=['review_name', 'review_date','review_body'],", "like Gecko) Chrome/69.0.3497.81 Safari/537.36\" } proxies = { \"http\": \"http://172.16.17.32:9999\",", "aft + \".xlsx\" view_goods_pd.to_excel(file_name, encoding='utf-8', engine='xlsxwriter') print(\"共获取评论数量:\", len(self.view_list)) if __name__", "view_useful = 0 # 商品的评价信息表 each_view_list = [view_goods, view_name, view_star,", "each_view.xpath('.//span[@data-hook=\"helpful-vote-statement\"]/text()')[0] view_useful = view_useful_raw.split(' ')[0] if view_useful == 'one': view_useful", "for each in review_threads: each.start() print(\"队列剩余数量\", self.url_queue.qsize()) for each in", "if int(each_count) % 10 == 0: end_page = int(each_count) //", "评价星级 view_star = view_star_raw.split(' ')[0] # 评价title view_title = each_view.xpath('.//a[@data-hook=\"review-title\"]/span/text()')[0]", "= { \"http\": \"http://172.16.17.32:9999\", } def __init__(self, domain): self.view_list =", "int(each_count) // 10 + 1 else: end_page = int(each_count) //", "end_page): if page == 1: url = base_url + each_asin", "view_goods = res_html.xpath('//span[@class=\"a-list-item\"]/a/text()')[0] # 商品评价容器 view_con = res_html.xpath('//div[@class=\"a-section review aok-relative\"]')", "[] self.page_list = [] self.url_queue = Queue() if domain.strip().lower() ==", "self.url_queue.empty(): break view_goods_pd = pd.DataFrame(self.view_list, columns=['review_goods', 'review_name', 'review_star', 'review_title', 'review_date',", "(Windows NT 10.0; Win64; x64) AppleWebKit/537.36 \\ (KHTML, like Gecko)", "each_view_list = [view_goods, view_name, view_star, view_title, view_date, view_colour, view_size, view_body,", "\"http://172.16.17.32:9999\", } def __init__(self, domain): self.view_list = [] self.page_list =", "= int(each_count) // 10 + 2 for page in range(1,", "= pd.DataFrame(self.view_list, columns=['review_goods', 'review_name', 'review_star', 'review_title', 'review_date', 'review_colour', 'review_size', 'review_body',", "in range(30) if not self.url_queue.empty()] for each in review_threads: each.start()", "res.status_code) print(res.text) return res_html = etree.HTML(res.text) # 商品评价名称 view_goods =", "res_html = etree.HTML(res.text) # 商品评价名称 view_goods = res_html.xpath('//span[@class=\"a-list-item\"]/a/text()')[0] # 商品评价容器", "self.page_list = [] self.url_queue = Queue() if domain.strip().lower() == 'jp':", "= self.row_url + \"/product-reviews/\" # goods_data.drop_duplicates(subset=['r','评价数量'],inplace=True) for each_asin, each_count in", "= each_view.xpath('.//span[@data-hook=\"review-date\"]/text()')[0] view_format = each_view.xpath('.//a[@data-hook=\"format-strip\"]/text()') view_colour = None view_size =", "0: end_page = int(each_count) // 10 + 1 else: end_page", "re.I): view_size = each.split(\":\")[1].strip() except: pass # 评价内容 view_body =", "each_asin and int(each_count) > 0: if int(each_count) % 10 ==", "except: pass except: view_useful = 0 # 商品的评价信息表 each_view_list =", "= each_view.xpath('.//span[@class=\"a-profile-name\"]/text()')[0] view_star_raw = each_view.xpath('.//div[@class=\"a-row\"]/a[@class=\"a-link-normal\"]/@title')[0] # 评价星级 view_star = view_star_raw.split('", "\"https://www.amazon.com\" self.s = requests.Session() self.s.get(url=self.row_url, headers=self.headers, proxies=self.proxies) def get_review(self, url):", "re.I): view_colour = each.split(':')[1].strip() if re.search(\"size|style|サイズ\", each, re.I): view_size =", "if domain.strip().lower() == 'jp': self.row_url = \"https://www.amazon.co.jp\" elif domain.strip().lower ==", "self.row_url + \"/product-reviews/\" # goods_data.drop_duplicates(subset=['r','评价数量'],inplace=True) for each_asin, each_count in zip(goods_data['ASIN'][5:50],", "'review_size', 'review_body', 'review_useful']) view_goods_pd.drop_duplicates(subset=['review_name', 'review_date','review_body'], inplace=True) aft = datetime.datetime.now().strftime('%m%d%H%M') file_name", "import requests from lxml import etree import re, time, random,", "def run(self, data): goods_data = pd.read_excel(data, encoding='utf-8') base_url = self.row_url", "encoding='utf-8') base_url = self.row_url + \"/product-reviews/\" # goods_data.drop_duplicates(subset=['r','评价数量'],inplace=True) for each_asin,", "str(page) self.url_queue.put(url) print(\"review_page_%d\" % page, url) time.sleep(1.5) while True: try:", "res.status_code != 200: print(\"请求出错,状态码为:%s\" % res.status_code) print(res.text) return res_html =", "\"https://www.amazon.co.jp\" elif domain.strip().lower == 'com': self.row_url = \"https://www.amazon.com\" self.s =", "1: url = base_url + each_asin else: url = base_url", "in view_con: # 评价人 view_name = each_view.xpath('.//span[@class=\"a-profile-name\"]/text()')[0] view_star_raw = each_view.xpath('.//div[@class=\"a-row\"]/a[@class=\"a-link-normal\"]/@title')[0]", "file_name = r'../data/goods_review/' + \"reviews_\" + aft + \".xlsx\" view_goods_pd.to_excel(file_name,", "view_date = each_view.xpath('.//span[@data-hook=\"review-date\"]/text()')[0] view_format = each_view.xpath('.//a[@data-hook=\"format-strip\"]/text()') view_colour = None view_size", "== 'one': view_useful = 1 try: view_useful = int(view_useful) except:", "评价日期 view_date = each_view.xpath('.//span[@data-hook=\"review-date\"]/text()')[0] view_format = each_view.xpath('.//a[@data-hook=\"format-strip\"]/text()') view_colour = None", "base_url + each_asin else: url = base_url + each_asin +", "each in view_format: if re.search(\"color|colour|色\", each, re.I): view_colour = each.split(':')[1].strip()", "NT 10.0; Win64; x64) AppleWebKit/537.36 \\ (KHTML, like Gecko) Chrome/69.0.3497.81", "200: print(\"请求出错,状态码为:%s\" % res.status_code) print(res.text) return res_html = etree.HTML(res.text) #", "page in range(1, end_page): if page == 1: url =", "'__main__': data = r\"../data/category/Kid's Weighted Blankets_08_28_13_22.xlsx\" review = Review(domain='com') review.run(data=data)", "= each_view.xpath('.//a[@data-hook=\"review-title\"]/span/text()')[0] # 评价日期 view_date = each_view.xpath('.//span[@data-hook=\"review-date\"]/text()')[0] view_format = each_view.xpath('.//a[@data-hook=\"format-strip\"]/text()')", "page == 1: url = base_url + each_asin else: url", "try: view_useful_raw = each_view.xpath('.//span[@data-hook=\"helpful-vote-statement\"]/text()')[0] view_useful = view_useful_raw.split(' ')[0] if view_useful", "= datetime.datetime.now().strftime('%m%d%H%M') file_name = r'../data/goods_review/' + \"reviews_\" + aft +", "= int(each_count) // 10 + 1 else: end_page = int(each_count)", "datetime from queue import Queue import threading class Review: headers", "proxies=self.proxies) def get_review(self, url): res = self.s.get(url, headers=self.headers, proxies=self.proxies) if", "view_goods_pd.drop_duplicates(subset=['review_name', 'review_date','review_body'], inplace=True) aft = datetime.datetime.now().strftime('%m%d%H%M') file_name = r'../data/goods_review/' +", "view_con = res_html.xpath('//div[@class=\"a-section review aok-relative\"]') for each_view in view_con: #", "import etree import re, time, random, datetime from queue import", "0: if int(each_count) % 10 == 0: end_page = int(each_count)", "')[0] # 评价title view_title = each_view.xpath('.//a[@data-hook=\"review-title\"]/span/text()')[0] # 评价日期 view_date =", "= etree.HTML(res.text) # 商品评价名称 view_goods = res_html.xpath('//span[@class=\"a-list-item\"]/a/text()')[0] # 商品评价容器 view_con", "= [] self.page_list = [] self.url_queue = Queue() if domain.strip().lower()", "{ \"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 \\", "each_view.xpath('.//div[@class=\"a-row\"]/a[@class=\"a-link-normal\"]/@title')[0] # 评价星级 view_star = view_star_raw.split(' ')[0] # 评价title view_title", "url): res = self.s.get(url, headers=self.headers, proxies=self.proxies) if res.status_code != 200:", "评价title view_title = each_view.xpath('.//a[@data-hook=\"review-title\"]/span/text()')[0] # 评价日期 view_date = each_view.xpath('.//span[@data-hook=\"review-date\"]/text()')[0] view_format", "= [view_goods, view_name, view_star, view_title, view_date, view_colour, view_size, view_body, view_useful]", "from lxml import etree import re, time, random, datetime from", "= each_view.xpath('.//span[@data-hook=\"helpful-vote-statement\"]/text()')[0] view_useful = view_useful_raw.split(' ')[0] if view_useful == 'one':", "# 评价内容 view_body = each_view.xpath('string(.//span[@data-hook=\"review-body\"]/span)') # 评价有用数量 try: view_useful_raw =", "10.0; Win64; x64) AppleWebKit/537.36 \\ (KHTML, like Gecko) Chrome/69.0.3497.81 Safari/537.36\"", "True: try: review_threads = [threading.Thread(target=self.get_review, args=(self.url_queue.get(),)) for m in range(30)", "print(\"请求链接出错,重试中...\") pass time.sleep(random.uniform(0.5,2.1)) if self.url_queue.empty(): break view_goods_pd = pd.DataFrame(self.view_list, columns=['review_goods',", "each, re.I): view_colour = each.split(':')[1].strip() if re.search(\"size|style|サイズ\", each, re.I): view_size", "url = base_url + each_asin + '?pageNumber=' + str(page) self.url_queue.put(url)", "if res.status_code != 200: print(\"请求出错,状态码为:%s\" % res.status_code) print(res.text) return res_html", "'review_date', 'review_colour', 'review_size', 'review_body', 'review_useful']) view_goods_pd.drop_duplicates(subset=['review_name', 'review_date','review_body'], inplace=True) aft =", "each_asin + '?pageNumber=' + str(page) self.url_queue.put(url) print(\"review_page_%d\" % page, url)", "'?pageNumber=' + str(page) self.url_queue.put(url) print(\"review_page_%d\" % page, url) time.sleep(1.5) while", "requests.Session() self.s.get(url=self.row_url, headers=self.headers, proxies=self.proxies) def get_review(self, url): res = self.s.get(url,", "try: view_useful = int(view_useful) except: pass except: view_useful = 0", "'com': self.row_url = \"https://www.amazon.com\" self.s = requests.Session() self.s.get(url=self.row_url, headers=self.headers, proxies=self.proxies)", "in review_threads: each.start() print(\"队列剩余数量\", self.url_queue.qsize()) for each in review_threads: each.join()", "else: url = base_url + each_asin + '?pageNumber=' + str(page)", "= base_url + each_asin else: url = base_url + each_asin", "'one': view_useful = 1 try: view_useful = int(view_useful) except: pass", "view_format: if re.search(\"color|colour|色\", each, re.I): view_colour = each.split(':')[1].strip() if re.search(\"size|style|サイズ\",", "self.url_queue.put(url) print(\"review_page_%d\" % page, url) time.sleep(1.5) while True: try: review_threads", "\"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 \\ (KHTML,", "1 else: end_page = int(each_count) // 10 + 2 for", "> 0: if int(each_count) % 10 == 0: end_page =", "range(1, end_page): if page == 1: url = base_url +" ]
[ "numpy as np from pathlib import Path def extract_param(data, n_states):", "data[f\"sp_{n_states}\"] return y, sp, param @pytest.fixture(scope=\"session\") def trace_simple(request): \"\"\"Trace data", "trace_simple(request): \"\"\"Trace data can be generated by running ./data/generate_trace_data.py \"\"\"", "2 param = extract_param(data, n_states) y = data[f\"y_{n_states}\"] sp =", "\"\"\" data = np.load(Path(__file__).parent / \"data/trace_data.npz\") n_states = 2 param", "@pytest.fixture(scope=\"session\") def trace_simple(request): \"\"\"Trace data can be generated by running", "for key in keys: param[key] = data[f\"{key}_{n_states}\"] return param @pytest.fixture(scope=\"session\",", "data = np.load(Path(__file__).parent / \"data/trace_data.npz\") n_states = 2 param =", "{\"n_states\": n_states} for key in keys: param[key] = data[f\"{key}_{n_states}\"] return", "def trace_lownoise(request): \"\"\"Trace data can be generated by running ./data/generate_trace_data.py", "np.load(Path(__file__).parent / \"data/trace_data.npz\") n_states = request.param param = extract_param(data, n_states)", "\"means\", \"st_devs\") param = {\"n_states\": n_states} for key in keys:", "(\"initial_state_prob\", \"transition_prob\", \"means\", \"st_devs\") param = {\"n_states\": n_states} for key", "n_states = request.param param = extract_param(data, n_states) y = data[f\"y_{n_states}\"]", "= 2 param = extract_param(data, n_states) y = data[f\"y_{n_states}\"] sp", "trace_lownoise(request): \"\"\"Trace data can be generated by running ./data/generate_trace_data.py \"\"\"", "def extract_param(data, n_states): keys = (\"initial_state_prob\", \"transition_prob\", \"means\", \"st_devs\") param", "keys = (\"initial_state_prob\", \"transition_prob\", \"means\", \"st_devs\") param = {\"n_states\": n_states}", "/ \"data/trace_data.npz\") n_states = request.param param = extract_param(data, n_states) y", "y = data[f\"y_{n_states}\"] sp = data[f\"sp_{n_states}\"] return y, sp, param", "= data[f\"y_{n_states}\"] sp = data[f\"sp_{n_states}\"] return y, sp, param @pytest.fixture(scope=\"session\")", "def trace_simple(request): \"\"\"Trace data can be generated by running ./data/generate_trace_data.py", "param @pytest.fixture(scope=\"session\", params=[2, 3, 4]) def trace_lownoise(request): \"\"\"Trace data can", "by running ./data/generate_trace_data.py \"\"\" data = np.load(Path(__file__).parent / \"data/trace_data.npz\") n_states", "./data/generate_trace_data.py \"\"\" data = np.load(Path(__file__).parent / \"data/trace_data.npz\") n_states = request.param", "request.param param = extract_param(data, n_states) y = data[f\"y_{n_states}\"] sp =", "extract_param(data, n_states): keys = (\"initial_state_prob\", \"transition_prob\", \"means\", \"st_devs\") param =", "= (\"initial_state_prob\", \"transition_prob\", \"means\", \"st_devs\") param = {\"n_states\": n_states} for", "return y, sp, param @pytest.fixture(scope=\"session\") def trace_simple(request): \"\"\"Trace data can", "= np.load(Path(__file__).parent / \"data/trace_data.npz\") n_states = request.param param = extract_param(data,", "as np from pathlib import Path def extract_param(data, n_states): keys", "running ./data/generate_trace_data.py \"\"\" data = np.load(Path(__file__).parent / \"data/trace_data.npz\") n_states =", "\"\"\" data = np.load(Path(__file__).parent / \"data/trace_data.npz\") n_states = request.param param", "data[f\"{key}_{n_states}\"] return param @pytest.fixture(scope=\"session\", params=[2, 3, 4]) def trace_lownoise(request): \"\"\"Trace", "import Path def extract_param(data, n_states): keys = (\"initial_state_prob\", \"transition_prob\", \"means\",", "param = {\"n_states\": n_states} for key in keys: param[key] =", "n_states): keys = (\"initial_state_prob\", \"transition_prob\", \"means\", \"st_devs\") param = {\"n_states\":", "data can be generated by running ./data/generate_trace_data.py \"\"\" data =", "params=[2, 3, 4]) def trace_lownoise(request): \"\"\"Trace data can be generated", "param @pytest.fixture(scope=\"session\") def trace_simple(request): \"\"\"Trace data can be generated by", "n_states) y = data[f\"y_{n_states}\"] sp = data[f\"sp_{n_states}\"] return y, sp,", "= data[f\"sp_{n_states}\"] return y, sp, param @pytest.fixture(scope=\"session\") def trace_simple(request): \"\"\"Trace", "\"\"\"Trace data can be generated by running ./data/generate_trace_data.py \"\"\" data", "from pathlib import Path def extract_param(data, n_states): keys = (\"initial_state_prob\",", "\"transition_prob\", \"means\", \"st_devs\") param = {\"n_states\": n_states} for key in", "= extract_param(data, n_states) y = data[f\"y_{n_states}\"] sp = data[f\"sp_{n_states}\"] return", "param = extract_param(data, n_states) y = data[f\"y_{n_states}\"] sp = data[f\"sp_{n_states}\"]", "pytest import numpy as np from pathlib import Path def", "= {\"n_states\": n_states} for key in keys: param[key] = data[f\"{key}_{n_states}\"]", "3, 4]) def trace_lownoise(request): \"\"\"Trace data can be generated by", "import pytest import numpy as np from pathlib import Path", "= request.param param = extract_param(data, n_states) y = data[f\"y_{n_states}\"] sp", "sp, param @pytest.fixture(scope=\"session\") def trace_simple(request): \"\"\"Trace data can be generated", "Path def extract_param(data, n_states): keys = (\"initial_state_prob\", \"transition_prob\", \"means\", \"st_devs\")", "./data/generate_trace_data.py \"\"\" data = np.load(Path(__file__).parent / \"data/trace_data.npz\") n_states = 2", "n_states} for key in keys: param[key] = data[f\"{key}_{n_states}\"] return param", "sp = data[f\"sp_{n_states}\"] return y, sp, param @pytest.fixture(scope=\"session\") def trace_simple(request):", "be generated by running ./data/generate_trace_data.py \"\"\" data = np.load(Path(__file__).parent /", "import numpy as np from pathlib import Path def extract_param(data,", "param[key] = data[f\"{key}_{n_states}\"] return param @pytest.fixture(scope=\"session\", params=[2, 3, 4]) def", "\"data/trace_data.npz\") n_states = 2 param = extract_param(data, n_states) y =", "can be generated by running ./data/generate_trace_data.py \"\"\" data = np.load(Path(__file__).parent", "data[f\"y_{n_states}\"] sp = data[f\"sp_{n_states}\"] return y, sp, param @pytest.fixture(scope=\"session\") def", "in keys: param[key] = data[f\"{key}_{n_states}\"] return param @pytest.fixture(scope=\"session\", params=[2, 3,", "keys: param[key] = data[f\"{key}_{n_states}\"] return param @pytest.fixture(scope=\"session\", params=[2, 3, 4])", "np from pathlib import Path def extract_param(data, n_states): keys =", "/ \"data/trace_data.npz\") n_states = 2 param = extract_param(data, n_states) y", "return param @pytest.fixture(scope=\"session\", params=[2, 3, 4]) def trace_lownoise(request): \"\"\"Trace data", "generated by running ./data/generate_trace_data.py \"\"\" data = np.load(Path(__file__).parent / \"data/trace_data.npz\")", "4]) def trace_lownoise(request): \"\"\"Trace data can be generated by running", "= np.load(Path(__file__).parent / \"data/trace_data.npz\") n_states = 2 param = extract_param(data,", "y, sp, param @pytest.fixture(scope=\"session\") def trace_simple(request): \"\"\"Trace data can be", "n_states = 2 param = extract_param(data, n_states) y = data[f\"y_{n_states}\"]", "pathlib import Path def extract_param(data, n_states): keys = (\"initial_state_prob\", \"transition_prob\",", "key in keys: param[key] = data[f\"{key}_{n_states}\"] return param @pytest.fixture(scope=\"session\", params=[2,", "\"data/trace_data.npz\") n_states = request.param param = extract_param(data, n_states) y =", "extract_param(data, n_states) y = data[f\"y_{n_states}\"] sp = data[f\"sp_{n_states}\"] return y,", "= data[f\"{key}_{n_states}\"] return param @pytest.fixture(scope=\"session\", params=[2, 3, 4]) def trace_lownoise(request):", "\"st_devs\") param = {\"n_states\": n_states} for key in keys: param[key]", "np.load(Path(__file__).parent / \"data/trace_data.npz\") n_states = 2 param = extract_param(data, n_states)", "@pytest.fixture(scope=\"session\", params=[2, 3, 4]) def trace_lownoise(request): \"\"\"Trace data can be", "data = np.load(Path(__file__).parent / \"data/trace_data.npz\") n_states = request.param param =" ]
[ "time.sleep(random.random()) return arg def pipeline(future): pools[1].submit(worker, future.result()).add_done_callback(printer) def printer(future): pools[2].submit(worker,", "pools[1].submit(worker, future.result()).add_done_callback(printer) def printer(future): pools[2].submit(worker, future.result()).add_done_callback(spout) def spout(future): print(future.result()) def", "def instanceProcessPool(): pools = [] for i in range(3): pool", "without Pipe class. \"\"\" __author__ = \"<NAME>\" __email__ = \"<EMAIL>\"", "pools = instanceProcessPool() # pool = ProcessPoolExecutor([max_workers]) runThreadsInPipeline(pools) # pools[0].submit(worker,", "random.random()).add_done_callback(pipeline) if __name__ == \"__main__\": __spec__ = None # Fix", "import time import random def worker(arg): time.sleep(random.random()) return arg def", "def runThreadsInPipeline(pools): for pool in pools: pool.submit(worker, random.random()).add_done_callback(pipeline) if __name__", "__copyright__ = \"Copyright (c) 2018 <NAME>\" __license__ = \"MIT\" from", "<NAME>\" __license__ = \"MIT\" from concurrent.futures import ProcessPoolExecutor import time", "= ProcessPoolExecutor(2) pools.append(pool) return pools def shutdownPools(pools): for pool in", "in Spyder's IPython pools = instanceProcessPool() # pool = ProcessPoolExecutor([max_workers])", "#!/usr/bin/env python \"\"\" Artesanal example Pipe without Pipe class. \"\"\"", "for i in range(3): pool = ProcessPoolExecutor(2) pools.append(pool) return pools", "pools: pool.submit(worker, random.random()).add_done_callback(pipeline) if __name__ == \"__main__\": __spec__ = None", "def worker(arg): time.sleep(random.random()) return arg def pipeline(future): pools[1].submit(worker, future.result()).add_done_callback(printer) def", "# Fix multiprocessing in Spyder's IPython pools = instanceProcessPool() #", "\"<EMAIL>\" __copyright__ = \"Copyright (c) 2018 <NAME>\" __license__ = \"MIT\"", "Artesanal example Pipe without Pipe class. \"\"\" __author__ = \"<NAME>\"", "shutdownPools(pools): for pool in pools: pool.shutdown() def runThreadsInPipeline(pools): for pool", "\"__main__\": __spec__ = None # Fix multiprocessing in Spyder's IPython", "future.result()).add_done_callback(printer) def printer(future): pools[2].submit(worker, future.result()).add_done_callback(spout) def spout(future): print(future.result()) def instanceProcessPool():", "def shutdownPools(pools): for pool in pools: pool.shutdown() def runThreadsInPipeline(pools): for", "2018 <NAME>\" __license__ = \"MIT\" from concurrent.futures import ProcessPoolExecutor import", "pools[2].submit(worker, future.result()).add_done_callback(spout) def spout(future): print(future.result()) def instanceProcessPool(): pools = []", "(c) 2018 <NAME>\" __license__ = \"MIT\" from concurrent.futures import ProcessPoolExecutor", "None # Fix multiprocessing in Spyder's IPython pools = instanceProcessPool()", "future.result()).add_done_callback(spout) def spout(future): print(future.result()) def instanceProcessPool(): pools = [] for", "\"<NAME>\" __email__ = \"<EMAIL>\" __copyright__ = \"Copyright (c) 2018 <NAME>\"", "import ProcessPoolExecutor import time import random def worker(arg): time.sleep(random.random()) return", "instanceProcessPool() # pool = ProcessPoolExecutor([max_workers]) runThreadsInPipeline(pools) # pools[0].submit(worker, random.random()).add_done_callback(pipeline) shutdownPools(pools)", "__author__ = \"<NAME>\" __email__ = \"<EMAIL>\" __copyright__ = \"Copyright (c)", "worker(arg): time.sleep(random.random()) return arg def pipeline(future): pools[1].submit(worker, future.result()).add_done_callback(printer) def printer(future):", "pool in pools: pool.submit(worker, random.random()).add_done_callback(pipeline) if __name__ == \"__main__\": __spec__", "__spec__ = None # Fix multiprocessing in Spyder's IPython pools", "concurrent.futures import ProcessPoolExecutor import time import random def worker(arg): time.sleep(random.random())", "__license__ = \"MIT\" from concurrent.futures import ProcessPoolExecutor import time import", "<reponame>rafagarciac/ParallelProgrammingPython #!/usr/bin/env python \"\"\" Artesanal example Pipe without Pipe class.", "def pipeline(future): pools[1].submit(worker, future.result()).add_done_callback(printer) def printer(future): pools[2].submit(worker, future.result()).add_done_callback(spout) def spout(future):", "[] for i in range(3): pool = ProcessPoolExecutor(2) pools.append(pool) return", "def printer(future): pools[2].submit(worker, future.result()).add_done_callback(spout) def spout(future): print(future.result()) def instanceProcessPool(): pools", "return pools def shutdownPools(pools): for pool in pools: pool.shutdown() def", "Pipe class. \"\"\" __author__ = \"<NAME>\" __email__ = \"<EMAIL>\" __copyright__", "return arg def pipeline(future): pools[1].submit(worker, future.result()).add_done_callback(printer) def printer(future): pools[2].submit(worker, future.result()).add_done_callback(spout)", "for pool in pools: pool.shutdown() def runThreadsInPipeline(pools): for pool in", "= \"MIT\" from concurrent.futures import ProcessPoolExecutor import time import random", "printer(future): pools[2].submit(worker, future.result()).add_done_callback(spout) def spout(future): print(future.result()) def instanceProcessPool(): pools =", "import random def worker(arg): time.sleep(random.random()) return arg def pipeline(future): pools[1].submit(worker,", "pools = [] for i in range(3): pool = ProcessPoolExecutor(2)", "python \"\"\" Artesanal example Pipe without Pipe class. \"\"\" __author__", "in pools: pool.submit(worker, random.random()).add_done_callback(pipeline) if __name__ == \"__main__\": __spec__ =", "\"Copyright (c) 2018 <NAME>\" __license__ = \"MIT\" from concurrent.futures import", "in range(3): pool = ProcessPoolExecutor(2) pools.append(pool) return pools def shutdownPools(pools):", "pool = ProcessPoolExecutor(2) pools.append(pool) return pools def shutdownPools(pools): for pool", "Spyder's IPython pools = instanceProcessPool() # pool = ProcessPoolExecutor([max_workers]) runThreadsInPipeline(pools)", "\"\"\" __author__ = \"<NAME>\" __email__ = \"<EMAIL>\" __copyright__ = \"Copyright", "runThreadsInPipeline(pools): for pool in pools: pool.submit(worker, random.random()).add_done_callback(pipeline) if __name__ ==", "# pool = ProcessPoolExecutor([max_workers]) runThreadsInPipeline(pools) # pools[0].submit(worker, random.random()).add_done_callback(pipeline) shutdownPools(pools) #", "Fix multiprocessing in Spyder's IPython pools = instanceProcessPool() # pool", "= None # Fix multiprocessing in Spyder's IPython pools =", "= \"Copyright (c) 2018 <NAME>\" __license__ = \"MIT\" from concurrent.futures", "def spout(future): print(future.result()) def instanceProcessPool(): pools = [] for i", "i in range(3): pool = ProcessPoolExecutor(2) pools.append(pool) return pools def", "pools def shutdownPools(pools): for pool in pools: pool.shutdown() def runThreadsInPipeline(pools):", "\"\"\" Artesanal example Pipe without Pipe class. \"\"\" __author__ =", "pool in pools: pool.shutdown() def runThreadsInPipeline(pools): for pool in pools:", "ProcessPoolExecutor import time import random def worker(arg): time.sleep(random.random()) return arg", "class. \"\"\" __author__ = \"<NAME>\" __email__ = \"<EMAIL>\" __copyright__ =", "pool.shutdown() def runThreadsInPipeline(pools): for pool in pools: pool.submit(worker, random.random()).add_done_callback(pipeline) if", "= instanceProcessPool() # pool = ProcessPoolExecutor([max_workers]) runThreadsInPipeline(pools) # pools[0].submit(worker, random.random()).add_done_callback(pipeline)", "example Pipe without Pipe class. \"\"\" __author__ = \"<NAME>\" __email__", "pools.append(pool) return pools def shutdownPools(pools): for pool in pools: pool.shutdown()", "= \"<NAME>\" __email__ = \"<EMAIL>\" __copyright__ = \"Copyright (c) 2018", "if __name__ == \"__main__\": __spec__ = None # Fix multiprocessing", "ProcessPoolExecutor(2) pools.append(pool) return pools def shutdownPools(pools): for pool in pools:", "== \"__main__\": __spec__ = None # Fix multiprocessing in Spyder's", "from concurrent.futures import ProcessPoolExecutor import time import random def worker(arg):", "pipeline(future): pools[1].submit(worker, future.result()).add_done_callback(printer) def printer(future): pools[2].submit(worker, future.result()).add_done_callback(spout) def spout(future): print(future.result())", "= \"<EMAIL>\" __copyright__ = \"Copyright (c) 2018 <NAME>\" __license__ =", "IPython pools = instanceProcessPool() # pool = ProcessPoolExecutor([max_workers]) runThreadsInPipeline(pools) #", "pool.submit(worker, random.random()).add_done_callback(pipeline) if __name__ == \"__main__\": __spec__ = None #", "multiprocessing in Spyder's IPython pools = instanceProcessPool() # pool =", "time import random def worker(arg): time.sleep(random.random()) return arg def pipeline(future):", "pool = ProcessPoolExecutor([max_workers]) runThreadsInPipeline(pools) # pools[0].submit(worker, random.random()).add_done_callback(pipeline) shutdownPools(pools) # pool.shutdown()", "print(future.result()) def instanceProcessPool(): pools = [] for i in range(3):", "range(3): pool = ProcessPoolExecutor(2) pools.append(pool) return pools def shutdownPools(pools): for", "in pools: pool.shutdown() def runThreadsInPipeline(pools): for pool in pools: pool.submit(worker,", "instanceProcessPool(): pools = [] for i in range(3): pool =", "Pipe without Pipe class. \"\"\" __author__ = \"<NAME>\" __email__ =", "pools: pool.shutdown() def runThreadsInPipeline(pools): for pool in pools: pool.submit(worker, random.random()).add_done_callback(pipeline)", "spout(future): print(future.result()) def instanceProcessPool(): pools = [] for i in", "__email__ = \"<EMAIL>\" __copyright__ = \"Copyright (c) 2018 <NAME>\" __license__", "__name__ == \"__main__\": __spec__ = None # Fix multiprocessing in", "= [] for i in range(3): pool = ProcessPoolExecutor(2) pools.append(pool)", "\"MIT\" from concurrent.futures import ProcessPoolExecutor import time import random def", "random def worker(arg): time.sleep(random.random()) return arg def pipeline(future): pools[1].submit(worker, future.result()).add_done_callback(printer)", "for pool in pools: pool.submit(worker, random.random()).add_done_callback(pipeline) if __name__ == \"__main__\":", "arg def pipeline(future): pools[1].submit(worker, future.result()).add_done_callback(printer) def printer(future): pools[2].submit(worker, future.result()).add_done_callback(spout) def" ]
[ "-m digibujogens ... \"\"\" def main(): \"\"\" Execute the application.", "\"\"\" raise NotImplementedError # Make the script executable. if __name__", "# Make the script executable. if __name__ == \"__main__\": raise", "\"\"\" Main application entry point. python -m digibujogens ... \"\"\"", "Make the script executable. if __name__ == \"__main__\": raise SystemExit(main())", "python -m digibujogens ... \"\"\" def main(): \"\"\" Execute the", "NotImplementedError # Make the script executable. if __name__ == \"__main__\":", "application. \"\"\" raise NotImplementedError # Make the script executable. if", "\"\"\" Execute the application. \"\"\" raise NotImplementedError # Make the", "Main application entry point. python -m digibujogens ... \"\"\" def", "\"\"\" def main(): \"\"\" Execute the application. \"\"\" raise NotImplementedError", "digibujogens ... \"\"\" def main(): \"\"\" Execute the application. \"\"\"", "... \"\"\" def main(): \"\"\" Execute the application. \"\"\" raise", "raise NotImplementedError # Make the script executable. if __name__ ==", "point. python -m digibujogens ... \"\"\" def main(): \"\"\" Execute", "def main(): \"\"\" Execute the application. \"\"\" raise NotImplementedError #", "Execute the application. \"\"\" raise NotImplementedError # Make the script", "the application. \"\"\" raise NotImplementedError # Make the script executable.", "main(): \"\"\" Execute the application. \"\"\" raise NotImplementedError # Make", "application entry point. python -m digibujogens ... \"\"\" def main():", "entry point. python -m digibujogens ... \"\"\" def main(): \"\"\"" ]
[ "user code. :type plat_info: lisa.platforms.platinfo.PlatformInfo You need to provide the", "abritrary code execution. \"\"\".format( script=os.path.basename(sys.argv[0]) ))) parser.add_argument(\"--conf\", '-c', help=\"Path to", "avoids messing up with ``devlib`` internal members. \"\"\" ADB_PORT_DEFAULT =", "else: out_is_excep = False out = pickle.dumps(out) out_tempfile = {out_tempfiles}[1]", "super().__init__( gem5_args=gem5_args, gem5_bin=simulator['bin'], **kwargs ) # vim :set tabstop=4 shiftwidth=4", "for the target to finish booting', ( KeyDesc('enable', 'Enable the", "If True, the current datetime will be appended to the", "inspect.getclosurevars(f) name, code_str = self._get_code(f) def mktemp(): return self.execute( f'mktemp", "help=\"Path to a TargetConf and PlatformInfo yaml file. Other options", "Target \"\"\" logger = self.get_logger() conn_settings = {} resolved_username =", "the created folder will not be tracked by any external", "foo.yml:: target-conf: name: myboard * file bar.yml:: target-conf: !include foo.yml", "target = Target(...) @target.remote_func(timeout=42) def foo(x, y): return x +", "'init', # We want to freeze everything except PID 1,", "not in non_pickled } if modules: modules = f\"import {',", "or '/data/local/tmp/devlib-target' if device: pass elif host: port = port", "cpuidle devlib module is not loaded') cm = nullcontext else:", "to freeze when using :meth:`freeze_userspace`. \"\"\" CONF_CLASS = TargetConf INIT_KWARGS_KEY_MAP", "and bail out if it fails to load. \"\"\" if", "# # Copyright (C) 2018, ARM Limited and contributors. #", "will be appended to the given ``name``. If ``name`` is", "x + y # Execute the function on the target", "logger.warning('Will not load cgroups devlib module: target is using systemd,", "'')}\") return target def get_res_dir(self, name=None, append_time=True, symlink=True): \"\"\" Returns", "kwargs['res_dir'] = res_dir kwargs['plat_info'] = plat_info # Create a devlib", "pretty_format(self, v): return '<password>' # Make sure all submodules of", "bool :param symlink: Create a symlink named ``results_latest`` to the", "] simulator_args.extend(virtio_args) # Quote/escape arguments and build the command line", "exist ... else: raise def __dir__(self): \"\"\" List our attributes", "'username': 'foo', 'password': '<PASSWORD>', }}) Or alternatively, from a YAML", "to provide the information needed to connect to the target.", "Execute the function on the target transparently val = foo(1,", "os.path.join(ASSETS_PATH, 'binaries', 'scripts', tool) return binary tools = set(tools) -", "if err is not None else ValueError('No exception was raised", "Create a :class:`Target` from the YAML configuration file pointed by", "Gem5SimulationPlatform): devlib_platform_kwargs.setdefault('host_output_dir', res_dir) # Actually build the devlib Platform object", "logger.warning('Could not freeze userspace: freezer cgroup controller not available on", "use', [str]), KeyDesc('args', 'Keyword arguments to build the Platform object',", "is None: plat_info = PlatformInfo() else: # Make a copy", "Tests should not rely on that as the created folder", "be tracked by any external entity, which means the results", "named \"foo-bar-juno-on-my-desk\") if name: self.plat_info.add_src('target-conf', dict(name=name)) # Determine file transfer", "build the Platform object', [Mapping]), )), KeyDesc('excluded-modules', 'List of devlib", "disable idle states, cpuidle devlib module is not loaded') cm", "check. :type module: str .. note:: This will attempt to", "results directory :type name: str :param append_time: If True, the", "in some automated environment. :param name: Name of the results", "\"\"\" logger = self.get_logger() conn_settings = {} resolved_username = username", "{system['kernel']}\", f\"--dtb {system['dtb']}\", f\"--disk-image {system['disk']}\" )) diod_path = which('diod') if", "port=port or self.SSH_PORT_DEFAULT, host=host, strict_host_check=True if strict_host_check is None else", "\"\"\" _, target = cls.from_custom_cli(argv=argv, params=params) return target @classmethod def", "is necessary') cm = nullcontext else: controllers = [s.name for", ":class:`devlib.platform.Platform` to use to build the :class:`devlib.target.Target` :type devlib_platform: devlib.platform.Platform", "the Python interpreter on the target .. note:: Closure variables", "bool \"\"\" if isinstance(self._res_dir, ArtifactPath): root = self._res_dir.root relative =", "task but on Google Pixel it apparently # cannot be", "with all the necessary connection information: $ {script} --conf my_target.yml", "instance out of the configuration file devlib_platform_conf = conf['devlib']['platform'] devlib_platform_cls", "\"host\"', [str, None]), KeyDesc('keyfile', 'SSH private key file', [str, None]),", "original one to benefit from mapping configuration if issubclass(devlib_platform_cls, Gem5SimulationPlatform):", "devlib.platform.Platform :param plat_info: Platform information attached to this target, for", "help=\"The kind of target to connect to.\") device_group = parser.add_mutually_exclusive_group()", "kwargs['plat_info'] = plat_info # Create a devlib Platform instance out", "help=\"The hostname/IP of the target.\") parser.add_argument(\"--username\", \"-u\", help=\"Login username. Only", "name, workdir, device, host, port, username, password, keyfile, strict_host_check, use_scp,", "mapping configuration if issubclass(devlib_platform_cls, Gem5SimulationPlatform): devlib_platform_kwargs.setdefault('host_output_dir', res_dir) # Actually build", "platform-specific things we are not interested in and getattr(cls, 'stage')", "from a YAML configuration file: Content of target_conf.yml: .. literalinclude::", "= os.path.join(ASSETS_PATH, 'binaries', self.abi, tool) if not os.path.isfile(binary): binary =", "None]), KeyDesc('strict-host-check', 'Equivalent to StrictHostKeyChecking option of OpenSSH', [bool, None]),", "= Target(...) @target.remote_func(timeout=42) def foo(x, y): return x + y", "[str, None]), KeyDesc('keyfile', 'SSH private key file', [str, None]), KeyDesc('strict-host-check',", "[str]), KeyDesc('tools', 'List of tools to install on the target',", "rta_calib_res_dir, deferred=lazy_platinfo, fallback=True) logger.info(f'Effective platform information:\\n{self.plat_info}') @property @memoized def _uses_systemd(self):", "closure_vars.items(): if _f is not f and can_include(_f): add_func(_f, _name)", "in a YAML file is allowed and will work: *", "setup_logging, ArtifactPath, nullcontext, ExekallTaggable, memoized from lisa.assets import ASSETS_PATH from", "Expensive computations are deferred so they will only be #", "*args and **kwargs so that it's usable by exekall def", ".. note:: That will not forward special methods like __str__,", "IP address of the host', [str, None]), KeyDesc('username', 'SSH username.", "devlib_platform_cls = resolve_dotted_name(devlib_platform_conf['class']) devlib_platform_kwargs = copy.copy(devlib_platform_conf.get('args', {})) # Hack for", "problems that could appear after another module # is updated", "function on the target transparently val = foo(1, y=2) :Variable", "{dict of ArgumentParser.add_argument() options}}``. :type params: dict(str, dict) :return: A", "everything except PID 1, we don't want to let #", "accesses to the underlying :class:`devlib.target.Target`. .. note:: That will not", "argv: The list of arguments. ``sys.argv[1:]`` will be used if", "argparse.Namespace(**custom_args) return custom_args, cls.from_conf(conf=target_conf, plat_info=platform_info, res_dir=args.res_dir) def _init_target(self, kind, name,", "explicitly elif attr in _DEVLIB_AVAILABLE_MODULES: # pylint: disable=raise-missing-from raise AttributeError(f'Devlib", "textwrap import functools import inspect import pickle import tempfile from", "host', [str, None]), KeyDesc('username', 'SSH username. On ADB connections, \"root\"", "be found: {e}') plat_info = None return cls.from_conf(conf=conf, plat_info=plat_info) @classmethod", "arguments. ``sys.argv[1:]`` will be used if this is ``None``. :type", "License for the specific language governing permissions and # limitations", "{script} --conf my_target.yml Alternatively, --kind must be set along the", "append_time=append_time, symlink=symlink, ) def _get_res_dir(self, root, relative, name, append_time, symlink):", "modules try to connect to UART and do very #", "time_str = datetime.now().strftime('%Y%m%d_%H%M%S.%f') if not name: name = time_str elif", "Loggable, HideExekallID, resolve_dotted_name, get_subclasses, import_all_submodules, LISA_HOME, RESULT_DIR, LATEST_LINK, setup_logging, ArtifactPath,", "all submodules of devlib.module are imported so the classes #", "we can use it as a sudo # password. conn_settings.update(", "+= '\\n' + '\\n'.join(map(itemgetter(1), funcs.values())) non_pickled = set(modules.keys()) | set(funcs.keys())", "else: device = 'DEFAULT' conn_settings['device'] = device # If the", "logs.\") parser.add_argument(\"--res-dir\", \"-o\", help=\"Result directory of the created Target. If", "__getattr__(self, attr): \"\"\" Forward all non-overriden attributes/method accesses to the", "raise if symlink: res_lnk = os.path.join(LISA_HOME, LATEST_LINK) with contextlib.suppress(FileNotFoundError): os.remove(res_lnk)", "x: yield x def get_tags(self): return {'board': self.name} @classmethod def", "custom_args = argparse.Namespace(**custom_args) return custom_args, cls.from_conf(conf=target_conf, plat_info=platform_info, res_dir=args.res_dir) def _init_target(self,", "module not in _DEVLIB_AVAILABLE_MODULES: raise ValueError(f'\"{module}\" is not a devlib", "link with contextlib.suppress(FileExistsError): os.symlink(res_dir, res_lnk) return res_dir def install_tools(self, tools):", "line for line in lines if not line.strip().startswith('@') ] code_str", "to will be: * bundled in the script if it", "connection settings. Only keys defined below are allowed, with the", "global name # anyway, and it's restricted to the wrapper", "' ' * 4), modules=modules, out_tempfiles=repr(out_tempfiles), globals=repr(pickle.dumps(global_vars)), kwargs=', '.join( f'{name}={name}'", "param, value in vars(args).items() if param in custom_params } custom_args", "shlex from collections.abc import Mapping import copy import sys import", "the underlying target, and the devlib modules that could be", "messages if 'kind' not in target_conf: parser.error('--conf with target configuration", "conn_settings['password'] = password elif kind == 'host': devlib_target_cls = devlib.LocalLinuxTarget", "the module if it's not loaded already, and bail out", "username is None: try: target.adb_root(enable=True) except Exception as e: #", "LevelKeyDesc('platform', 'devlib.platform.Platform subclass specification', ( KeyDesc('class', 'Name of the class", "{kind} target...') # If the target is Android, we need", "Python interpreter on the target \"\"\" def wrapper_param(f): @functools.wraps(f) def", "module is available, ``False`` otherwise. :param module: Devlib module to", "as e: # pylint: disable=broad-except cls.get_logger().warning(f'No platform information could be", "Same as :meth:`from_custom_cli` without the custom parameters capabilities. :return: A", "dict) :return: A tuple ``(args, target)`` .. note:: This method", "cls.conf_to_init_kwargs(conf) kwargs['res_dir'] = res_dir kwargs['plat_info'] = plat_info # Create a", "# be parsed and produced by any other third-party code", "= argparse.Namespace(**custom_args) return custom_args, cls.from_conf(conf=target_conf, plat_info=platform_info, res_dir=args.res_dir) def _init_target(self, kind,", "binary = os.path.join(ASSETS_PATH, 'binaries', self.abi, tool) if not os.path.isfile(binary): binary", "We only inline local things that are in # direct", "situations prevent from freezing anything. When that happens, a warning", "or any exception is pickled back and is returned/raised in", "directory :type name: str :param append_time: If True, the current", "on the target', [TypedList[str]]), KeyDesc('lazy-platinfo', 'Lazily autodect the platform information", "for quick scripting. \"\"\" parser = argparse.ArgumentParser( formatter_class=argparse.RawDescriptionHelpFormatter, description=textwrap.dedent( \"\"\"", "is empty, we probably got an exception except EOFError: #", "to target {(name or '')}\") return target def get_res_dir(self, name=None,", "in kwargs.keys() ) ) return script @staticmethod def _get_code(f): lines,", "port=port, username=username, password=password, keyfile=keyfile, strict_host_check=strict_host_check, use_scp=use_scp, devlib_platform=devlib_platform, wait_boot=wait_boot, wait_boot_timeout=wait_boot_timeout, )", "ARM-software/devlib#225 workdir = workdir or '/data/local/tmp/devlib-target' if device: pass elif", "Executes the given Python function ``f`` with the provided positional", "= which('diod') if diod_path is None: raise RuntimeError('Failed to find", "key file', [str, None]), KeyDesc('strict-host-check', 'Equivalent to StrictHostKeyChecking option of", "to install on the target', [TypedList[str]]), KeyDesc('lazy-platinfo', 'Lazily autodect the", "diod_path = which('diod') if diod_path is None: raise RuntimeError('Failed to", "output dir be created automatically virtio_args = [ f'--which-diod={diod_path}', '--workload-automation-vio={}',", "'usb', 'transport', # We don't actually need this task but", "cm(): logger.info(f\"Freezing all tasks except: {','.join(exclude)}\") try: yield self.cgroups.freeze(exclude) finally:", "the custom parameters capabilities. :return: A connected :class:`Target` \"\"\" _,", "# We don't actually need this task but on Google", "'File transfer method. Can be \"sftp\" (default) or \"scp\". (Only", "code LevelKeyDesc('platform', 'devlib.platform.Platform subclass specification', ( KeyDesc('class', 'Name of the", "can't afford to freeze when using :meth:`freeze_userspace`. \"\"\" CONF_CLASS =", "try: os.makedirs(res_dir) break except FileExistsError: # If the time is", "ValueError('No exception was raised or value returned by the function')", "_get_res_dir(self, root, relative, name, append_time, symlink): logger = self.get_logger() while", "if the folder already exists. In that case, # append_time", "from types import ModuleType, FunctionType from operator import itemgetter import", "other fields are optional if the relevant features aren't needed.", "else {out_tempfiles}[0] with open(out_tempfile, 'wb') as f: f.write(out) ''').format( f=name,", "can lead to abritrary code execution. \"\"\" conf = TargetConf.from_yaml_map(path)", "{ 'platform': { 'class': 'devlib.platform.Platform' } } } class Target(Loggable,", "# Create a devlib Platform instance out of the configuration", "boot check', [bool]), KeyDesc('timeout', 'Timeout of the boot check', [int]),", "if not os.path.isfile(binary): binary = os.path.join(ASSETS_PATH, 'binaries', 'scripts', tool) return", "cm() as x: yield x def get_tags(self): return {'board': self.name}", "kwargs, **execute_kwargs): \"\"\" Executes the given Python function ``f`` with", "already exists. In that case, # append_time should be used", "the same module so that: # 1. there is no", "hard reboot will be required if isinstance(self.target, devlib.LocalLinuxTarget): exclude.append(str(os.getpid())) @contextlib.contextmanager", "self.get_logger() conn_settings = {} resolved_username = username or 'root' logger.debug(f'Setting", "elif attr in _DEVLIB_AVAILABLE_MODULES: # pylint: disable=raise-missing-from raise AttributeError(f'Devlib target", "copy.copy(devlib_platform_conf.get('args', {})) # Hack for Gem5 devlib Platform, that requires", "value returned by the function') else: raise excep out_tempfiles =", "contributors. # # Licensed under the Apache License, Version 2.0", "plat_info # Take the board name from the target configuration", "are loaded on demand when accessed. \"\"\" def get(): return", "\"\"\" Dictionary mapping OS name to list of task names", "self._res_dir = res_dir os.makedirs(self._res_dir, exist_ok=True) if os.listdir(self._res_dir): raise ValueError(f'res_dir must", "note:: This will attempt to load the module if it's", "control if hasattr(f, '__wrapped__'): raise TypeError('Decorated functions cannot be called", "devlib.LocalLinuxTarget # If we are given a password, assume we", "that could appear after another module # is updated or", "open(name, 'rb') as f: return pickle.loads(f.read()) def parse_output(paths, err): val,", "a YAML target configuration file with all the necessary connection", ":meth:`execute` that will spawn the Python interpreter on the target", "all domains') try: cpuidle = self.cpuidle except AttributeError: logger.warning('Could not", "information:\\n{plat_info}') self.plat_info = plat_info # Take the board name from", "* bundled in the script if it is defined in", "the form of ``{param_name: {dict of ArgumentParser.add_argument() options}}``. :type params:", "idle states, cpuidle devlib module is not loaded') cm =", "risk # 2. we don't inline the whole world, which", ":returns: ``True`` if module is available, ``False`` otherwise. :param module:", "YAML :: TargetConf.from_yaml_map('target_conf.yml') The following special YAML tags can be", "= None, plat_info: PlatformInfo = None) -> 'Target': cls.get_logger().info(f'Target configuration:\\n{conf}')", "created folder will not be tracked by any external entity,", "we are sure to be correct for tool in map(bin_path,", "deferred=lazy_platinfo, fallback=True) logger.info(f'Effective platform information:\\n{self.plat_info}') @property @memoized def _uses_systemd(self): try:", "ARM Limited and contributors. # # Licensed under the Apache", "@target.remote_func(timeout=42) def foo(x, y): return x + y # Execute", "the target \"\"\" def wrapper_param(f): @functools.wraps(f) def wrapper(*f_args, **f_kwargs): return", "in the same module so that: # 1. there is", "= tuple() try: out_tempfiles = (mktemp(), mktemp()) snippet = self._make_remote_snippet(", "OF ANY KIND, either express or implied. # See the", "See the License for the specific language governing permissions and", "= PlatformInfo() else: # Make a copy of the PlatformInfo", "directory of the created Target. If no directory is specified,", "== 'host': devlib_target_cls = devlib.LocalLinuxTarget # If we are given", "raise RuntimeError('Failed to find \"diod\" on your host machine, check", "except AttributeError: logger.warning('Could not disable idle states, cpuidle devlib module", "for path in out_tempfiles: self.remove(path) def remote_func(self, **kwargs): \"\"\" Decorates", "parser.error('--conf with target configuration or any of the connection options", "You need to provide the information needed to connect to", "were passed when adding the target source to it plat_info", "if symlink: res_lnk = os.path.join(LISA_HOME, LATEST_LINK) with contextlib.suppress(FileNotFoundError): os.remove(res_lnk) #", "'name': 'myboard', 'host': 192.0.2.1, 'kind': 'linux', 'username': 'foo', 'password': '<PASSWORD>',", "third-party code LevelKeyDesc('platform', 'devlib.platform.Platform subclass specification', ( KeyDesc('class', 'Name of", "to in writing, software # distributed under the License is", "freeze it. 'thermal-engine', # Similar issue with HiKey960, the board", "We don't actually need this task but on Google Pixel", "datetime. :type append_time: bool :param symlink: Create a symlink named", "== 'android': devlib_target_cls = devlib.AndroidTarget # Workaround for ARM-software/devlib#225 workdir", "be loaded on-demand. \"\"\" attrs = set(super().__dir__()) | set(dir(self.target)) |", "None and k not in not_target_conf_opt }) # Some sanity", "method should not be relied upon to implement long-term scripts,", "name in kwargs.keys() ) ) return script @staticmethod def _get_code(f):", "value only used to embelish logs', [str]), KeyDesc('kind', 'Target kind.", "e: out = e out_is_excep = True else: out_is_excep =", "raise err if err is not None else ValueError('No exception", "is being used, according to: # https://www.freedesktop.org/software/systemd/man/sd_booted.html self.execute('test -d /run/systemd/system/',", "on demand if attr in self._devlib_loadable_modules: self.get_logger().info(f'Loading target devlib module", "Also, functions that are referred to will be: * bundled", "excluded, not loading it') # Something else that does not", "# (i.e. a Juno board might be named \"foo-bar-juno-on-my-desk\") if", "} custom_args = argparse.Namespace(**custom_args) return custom_args, cls.from_conf(conf=target_conf, plat_info=platform_info, res_dir=args.res_dir) def", "systemd, which already uses cgroups') devlib_excluded_modules.add('cgroups') self._devlib_loadable_modules = _DEVLIB_AVAILABLE_MODULES -", "in the configuration file: .. code-block:: YAML target-conf: # \"!env:<type>", "on demand when accessed. \"\"\" def get(): return getattr(self.target, attr)", "or agreed to in writing, software # distributed under the", "''').format( f=name, code=textwrap.dedent(code_str).replace('\\n', '\\n' + ' ' * 4), modules=modules,", "param in custom_params } custom_args = argparse.Namespace(**custom_args) return custom_args, cls.from_conf(conf=target_conf,", "custom_args, cls.from_conf(conf=target_conf, plat_info=platform_info, res_dir=args.res_dir) def _init_target(self, kind, name, workdir, device,", "exekall def from_cli(cls, argv=None, params=None) -> 'Target': \"\"\" Same as", "import pickle import sys def wrapper(): {modules} {code} return {f}({kwargs})", "custom_params } custom_args = argparse.Namespace(**custom_args) return custom_args, cls.from_conf(conf=target_conf, plat_info=platform_info, res_dir=args.res_dir)", "{generated_help} An instance can be created by calling :class:`~TargetConf` with", "'.join( f' {key}: {val}' for key, val in conn_settings.items() if", "were used to # feed the function to us lines", "will be raised. .. attention:: Decorators are ignored and not", "option of OpenSSH', [bool, None]), KeyDesc('workdir', 'Remote target workdir', [str]),", "referred to by name, assuming it comes from a module", "True def __getattr__(self, attr): \"\"\" Forward all non-overriden attributes/method accesses", "for k in params.keys()} # Options that are not a", "the username was explicitly set to \"root\", root the target", "UART and do very # platform-specific things we are not", "5555 SSH_PORT_DEFAULT = 22 CRITICAL_TASKS = { 'linux': [ 'init',", "cls(**kwargs) @classmethod def from_default_conf(cls): \"\"\" Create a :class:`Target` from the", "parser.add_argument(\"--res-dir\", \"-o\", help=\"Result directory of the created Target. If no", "append_time: name = f\"{name}-{time_str}\" # If we were given an", "password, keyfile, strict_host_check, use_scp, devlib_platform, wait_boot, wait_boot_timeout, ): \"\"\" Initialize", "we need just (eventually) the device if kind == 'android':", "platform information could be found: {e}') plat_info = None return", "False else: return True def __getattr__(self, attr): \"\"\" Forward all", "raise AttributeError(f'Devlib target module {attr} was explicitly excluded, not loading", "pylint: disable=raise-missing-from raise AttributeError(f'Devlib target module {attr} was explicitly excluded,", "= parser.parse_args(argv) setup_logging(level=args.log_level.upper()) target_conf = TargetConf() platform_info = None if", "Apache License, Version 2.0 (the \"License\"); you may # not", "cmd)) try: self.execute(cmd, **execute_kwargs) except Exception as e: # pylint:", "= { name: val for name, val in global_vars.items() if", "'target connection settings', ( KeyDesc('name', 'Board name, free-form value only", "# pylint: disable=broad-except err = e else: err = None", "target and that this module is in scope. If that", "loading it') # Something else that does not exist ...", "was explicitly set to \"root\", root the target as #", "compliance with the License. # You may obtain a copy", "def is_module_available(self, module): \"\"\" Check if the given devlib module", "create # the link with contextlib.suppress(FileExistsError): os.symlink(res_dir, res_lnk) return res_dir", "return cls(**kwargs) @classmethod def from_default_conf(cls): \"\"\" Create a :class:`Target` from", "a test. EXAMPLES --conf can point to a YAML target", "def get(): return getattr(self.target, attr) try: return get() except AttributeError:", "method: {devlib_file_xfer}') use_scp = devlib_file_xfer == 'scp' self._installed_tools = set()", "# Autodetect information from the target, after the Target is", "except Exception as e: # pylint: disable=broad-except logger.warning(f'\"adb root\" failed:", "in not_target_conf_opt }) # Some sanity check to get better", "the target.\") parser.add_argument(\"--username\", \"-u\", help=\"Login username. Only applies to Linux", "target_conf = TargetConf() platform_info = None if args.conf: # Tentatively", "custom_params = {k.replace('-', '_') for k in params.keys()} # Options", "yield self.cgroups.freeze(exclude) finally: logger.info('Un-freezing userspace tasks') self.cgroups.freeze(thaw=True) with cm() as", "positional and keyword arguments. The return value or any exception", "method. Currently avaliable options # are 'sftp' and 'scp', defaults", "Setup virtio # Brackets are there to let the output", "automatically virtio_args = [ f'--which-diod={diod_path}', '--workload-automation-vio={}', ] simulator_args.extend(virtio_args) # Quote/escape", "License, Version 2.0 (the \"License\"); you may # not use", "ensure we get a unique name. try: os.makedirs(res_dir) break except", "in self.cgroups.list_subsystems()] if 'freezer' not in controllers: logger.warning('Could not freeze", "Hack for Gem5 devlib Platform, that requires a \"host_output_dir\" #", "class TargetConf(SimpleMultiSrcConf, HideExekallID): \"\"\" Target connection settings. Only keys defined", "yield x @contextlib.contextmanager def disable_idle_states(self): \"\"\" Context manager that lets", "will not be reflected in the caller's context. Also, functions", "not use this file except in compliance with the License.", "tempfile.TemporaryDirectory() as d: name = os.path.join(d, 'out') self.pull(path, name) with", "kwargs=kwargs, global_vars={ **closure_vars.globals, **closure_vars.nonlocals, }, out_tempfiles=out_tempfiles ) cmd = ['python3',", "try: yield self.cgroups.freeze(exclude) finally: logger.info('Un-freezing userspace tasks') self.cgroups.freeze(thaw=True) with cm()", "gets stuck in FREEZING if we # try to freeze", "'info', 'debug'), help=\"Verbosity level of the logs.\") parser.add_argument(\"--res-dir\", \"-o\", help=\"Result", "= (mktemp(), mktemp()) snippet = self._make_remote_snippet( name=name, code_str=code_str, module=f.__module__, kwargs=kwargs,", "actually need this task but on Google Pixel it apparently", "import itemgetter import devlib from devlib.exception import TargetStableError from devlib.utils.misc", "| set(funcs.keys()) global_vars = { name: val for name, val", "list, it # has been excluded explicitly elif attr in", "trusted YAML files as it can lead to abritrary code", "symlink=True ) self._res_dir = res_dir os.makedirs(self._res_dir, exist_ok=True) if os.listdir(self._res_dir): raise", "Devlib modules are loaded on demand when accessed. \"\"\" def", "'host': 192.0.2.1, 'kind': 'linux', 'username': 'foo', 'password': '<PASSWORD>', }}) Or", "conf) target_conf.add_src('command-line', { k: v for k, v in vars(args).items()", "that could be loaded on-demand. \"\"\" attrs = set(super().__dir__()) |", "Do not freeze the process in charge of de-freezing, otherwise", "HideExekallID): \"\"\" Target connection settings. Only keys defined below are", "that case, # append_time should be used to ensure we", "YAML configuration file. This file will be used to provide", "to install: {tools}') self.install_tools(tools) # Autodetect information from the target,", "free-form value only used to embelish logs', [str]), KeyDesc('kind', 'Target", "= copy.copy(simulator.get('args', [])) system_platform = system['platform'] # Get gem5 binary", "systemd-timesyncd running. 'systemd[^-]', 'dbus', 'sh', 'ssh', 'rsyslogd', 'jbd2' ], 'android':", "we # preserve that root so it can be relocated", "Only applies to Android kind.\") device_group.add_argument(\"--host\", \"-n\", help=\"The hostname/IP of", "will spawn the Python interpreter on the target .. note::", "settings in params.items(): parser.add_argument(f'--{param}', **settings) custom_params = {k.replace('-', '_') for", "methods like __str__, since the interpreter bypasses __getattr__ when looking", "self.remove(path) def remote_func(self, **kwargs): \"\"\" Decorates a given function to", "# TODO: compute the checksum of the tool + install", "**settings) custom_params = {k.replace('-', '_') for k in params.keys()} #", "device_group = parser.add_mutually_exclusive_group() device_group.add_argument(\"--device\", \"-d\", help=\"The ADB ID of the", "be named \"foo-bar-juno-on-my-desk\") if name: self.plat_info.add_src('target-conf', dict(name=name)) # Determine file", "-d /run/systemd/system/', check_exit_code=True) except TargetStableError: return False else: return True", "options is required') if args.kind == 'android': if ('host' not", "object ######################################################################## target = devlib_target_cls( platform=devlib_platform, load_default_modules=False, connection_settings=conn_settings, working_directory=workdir, connect=False,", "PlatformInfo so we don't modify the original # one we", "@memoized def _uses_systemd(self): try: # Check if systemd is being", "disable=broad-except logger.warning(f'\"adb root\" failed: {e}') logger.debug(f'Target info: {dict(abi=target.abi, cpuinfo=target.cpuinfo, workdir=target.working_directory)}')", "None return parse_output(out_tempfiles, err) finally: for path in out_tempfiles: self.remove(path)", "from lisa.platforms.platinfo import PlatformInfo class PasswordKeyDesc(KeyDesc): def pretty_format(self, v): return", "wants it res_dir = ArtifactPath(root, os.path.join(relative, name)) # Compute base", "it res_dir = ArtifactPath(root, os.path.join(relative, name)) # Compute base installation", "= os.path.join(LISA_HOME, LATEST_LINK) with contextlib.suppress(FileNotFoundError): os.remove(res_lnk) # There may be", "excep out_tempfiles = tuple() try: out_tempfiles = (mktemp(), mktemp()) snippet", "pylint: disable=broad-except cls.get_logger().warning(f'No platform information could be found: {e}') plat_info", "of tools to install :type tools: list(str) \"\"\" def bin_path(tool):", "newly created results directory :type symlink: bool \"\"\" if isinstance(self._res_dir,", "not_target_conf_opt = { 'platform_info', 'log_level', 'res_dir', 'conf', } not_target_conf_opt.update(custom_params) args", "that it can # access them. It's harmless as they", "empty: {self._res_dir}') if plat_info is None: plat_info = PlatformInfo() else:", "name = f\"{name}-{time_str}\" # If we were given an ArtifactPath", "or 'root' logger.debug(f'Setting up {kind} target...') # If the target", "for cpu in range(self.plat_info['cpus-count']): cpuidle.enable_all(cpu) with cm() as x: yield", "and no specific value should be expected for a given", "2. we don't inline the whole world, which could lead", "[ 'sh', 'adbd', 'usb', 'transport', # We don't actually need", "raise ValueError(f'\"{module}\" is not a devlib module') try: getattr(self, module)", "cpu in range(self.plat_info['cpus-count']): cpuidle.enable_all(cpu) with cm() as x: yield x", "which could lead to a # number of problems that", "License is distributed on an \"AS IS\" BASIS, WITHOUT #", "gem5 binary arguments simulator_args.append('--listener-mode=on') simulator_args.append(system_platform['description']) simulator_args.extend(system_platform.get('args', [])) simulator_args.extend(( f\"--kernel {system['kernel']}\",", "of it. {configurable_params} :param devlib_platform: Instance of :class:`devlib.platform.Platform` to use", "Platform configuration ######################################################################## if not devlib_platform: devlib_platform = devlib.platform.Platform() ########################################################################", "import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc,Configurable from lisa.generic import TypedList from", "installation path logger.info(f'Creating result directory: {res_dir}') # It will fail", "calling :class:`~TargetConf` with a dictionary. The top-level `target-conf` key is", "(KeyError, ValueError): pass else: target_conf.add_src(args.conf, conf) target_conf.add_src('command-line', { k: v", "a symlink named ``results_latest`` to the newly created results directory", "return wrapper return wrapper_param class Gem5SimulationPlatformWrapper(Gem5SimulationPlatform): def __init__(self, system, simulator,", "defined in the same module so that: # 1. there", "\"License\"); you may # not use this file except in", "name: Name of the results directory :type name: str :param", "mktemp(): return self.execute( f'mktemp -p {shlex.quote(self.working_directory)}' ).strip() def read_output(path): with", "appear after another module # is updated or so. We", "are given a password, assume we can use it as", "params.items(): parser.add_argument(f'--{param}', **settings) custom_params = {k.replace('-', '_') for k in", "(ssh) or \"android\" (adb)', [str]), KeyDesc('host', 'Hostname or IP address", "or IP address of the host', [str, None]), KeyDesc('username', 'SSH", "lisa.platforms.platinfo.PlatformInfo You need to provide the information needed to connect", "Target(Loggable, HideExekallID, ExekallTaggable, Configurable): \"\"\" Wrap :class:`devlib.target.Target` to provide additional", "for later inspection. That board name is mostly free form", "# access them. It's harmless as they would shadow any", "(eventually) the device if kind == 'android': devlib_target_cls = devlib.AndroidTarget", "not rely on that as the created folder will not", "append_time: bool :param symlink: Create a symlink named ``results_latest`` to", "applied. \"\"\" sig = inspect.signature(f) kwargs = sig.bind(*args, **kwargs).arguments closure_vars", "can lead to abritrary code execution. \"\"\" path = os.environ['LISA_CONF']", "be used to reference an # environment variable. name: !env:str", "getattr(cls, 'stage') != 'early' ) } class TargetConf(SimpleMultiSrcConf, HideExekallID): \"\"\"", "to the target. if kind == 'android' and username is", "rely on that as the created folder will not be", "sysmted-journald or systemd-timesyncd running. 'systemd[^-]', 'dbus', 'sh', 'ssh', 'rsyslogd', 'jbd2'", "is in the form of ``{param_name: {dict of ArgumentParser.add_argument() options}}``.", "nullcontext else: @contextlib.contextmanager def cm(): try: for cpu in range(self.plat_info['cpus-count']):", "name) with open(name, 'rb') as f: return pickle.loads(f.read()) def parse_output(paths,", "= { name: val for var_dct in inspect.getclosurevars(f) if isinstance(var_dct,", "try: cpuidle = self.cpuidle except AttributeError: logger.warning('Could not disable idle", "device=None, host=None, port=None, username=None, password=<PASSWORD>, keyfile=None, strict_host_check=None, devlib_platform=None, devlib_excluded_modules=[], devlib_file_xfer=None,", "The following special YAML tags can be used in the", "def from_default_conf(cls): \"\"\" Create a :class:`Target` from the YAML configuration", "the Python interpreter on the target \"\"\" def wrapper_param(f): @functools.wraps(f)", "It is in the form of ``{param_name: {dict of ArgumentParser.add_argument()", "code. :type plat_info: lisa.platforms.platinfo.PlatformInfo You need to provide the information", "inspect.signature(f) kwargs = sig.bind(*args, **kwargs).arguments closure_vars = inspect.getclosurevars(f) name, code_str", "ArtifactPath(root, os.path.join(relative, name)) # Compute base installation path logger.info(f'Creating result", "yield finally: logger.info('Re-enabling idle states for all domains') for cpu", "only used to embelish logs', [str]), KeyDesc('kind', 'Target kind. Can", ") } class TargetConf(SimpleMultiSrcConf, HideExekallID): \"\"\" Target connection settings. Only", "use_scp = devlib_file_xfer == 'scp' self._installed_tools = set() self.target =", "error messages if 'kind' not in target_conf: parser.error('--conf with target", "Decorates a given function to execute remotely using :meth:`execute_python`:: target", "PATH variable') # Setup virtio # Brackets are there to", "cmd = ['python3', '-c', snippet] cmd = ' '.join(map(shlex.quote, cmd))", "# pylint: disable=raise-missing-from raise AttributeError(f'Devlib target module {attr} was explicitly", "# Options that are not a key in TargetConf must", "target {(name or '')}\") return target def get_res_dir(self, name=None, append_time=True,", "directory: {res_dir}') # It will fail if the folder already", "add_func(f, name): # Disallow decorated functions since their definition depends", "conf file with contextlib.suppress(KeyError, ValueError): platform_info = PlatformInfo.from_yaml_map(args.conf) # Load", "e out_is_excep = True else: out_is_excep = False out =", "the initial one # from devlib except EOFError: raise err", "(f, cls._get_code(f)[1]) for _name, _f in closure_vars.items(): if _f is", "allowed and will work: * file foo.yml:: target-conf: name: myboard", "# Initialize binary tools to deploy if tools: logger.info(f'Tools to", "of the class to use', [str]), KeyDesc('args', 'Keyword arguments to", "so it's a best-effort approach. \"\"\" logger = self.get_logger() if", "for name in kwargs.keys() ) ) return script @staticmethod def", "{ name: mod for name, mod in global_vars.items() if isinstance(mod,", "relevant features aren't needed. .. note:: The wrapping of :class:`devlib.target.Target`", "target-conf: target-conf: name: myboard \"\"\" STRUCTURE = TopLevelKeyDesc('target-conf', 'target connection", "simulator_args.extend(virtio_args) # Quote/escape arguments and build the command line gem5_args", "from the target configuration so it becomes # available for", "for _name, _f in closure_vars.items(): if _f is not f", "to ensure we get a unique name. try: os.makedirs(res_dir) break", "\"\"\" path = os.environ['LISA_CONF'] return cls.from_one_conf(path) @classmethod def from_one_conf(cls, path):", "to deploy if tools: logger.info(f'Tools to install: {tools}') self.install_tools(tools) #", "name from the target configuration so it becomes # available", "'android' and username is None: try: target.adb_root(enable=True) except Exception as", "textual name of the Platform allows this YAML configuration #", "limitations under the License. # from datetime import datetime import", ":class:`~TargetConf` with a dictionary. The top-level `target-conf` key is not", "allows this YAML configuration # to not use any python-specific", "working_directory=workdir, connect=False, ) target.connect(check_boot_completed=wait_boot, timeout=wait_boot_timeout) # None as username means", "devlib module: target is using systemd, which already uses cgroups')", "$ {script} --kind linux --host 192.0.2.1 --username root --password <PASSWORD>", "are allowed, with the given meaning and type: {generated_help} An", "import inspect import pickle import tempfile from types import ModuleType,", "# pylint: disable=dangerous-default-value super().__init__() logger = self.get_logger() self.name = name", "is_module_available(self, module): \"\"\" Check if the given devlib module is", "def parse_output(paths, err): val, excep = paths try: return read_output(val)", "datetime import datetime import os import os.path import contextlib import", "line gem5_args = ' '.join(shlex.quote(a) for a in simulator_args) super().__init__(", "with open(out_tempfile, 'wb') as f: f.write(out) ''').format( f=name, code=textwrap.dedent(code_str).replace('\\n', '\\n'", "the created Target. If no directory is specified, a default", "License. # from datetime import datetime import os import os.path", "is None else strict_host_check, use_scp=False if use_scp is None else", "unique name. try: os.makedirs(res_dir) break except FileExistsError: # If the", "of the target. Superseeds --host. Only applies to Android kind.\")", "specified') if args.kind == 'linux': for required in ['host', 'username',", "if kind == 'android': devlib_target_cls = devlib.AndroidTarget # Workaround for", "to run a test. EXAMPLES --conf can point to a", "load', [TypedList[str]]), KeyDesc('file-xfer', 'File transfer method. Can be \"sftp\" (default)", "strict_host_check=strict_host_check, use_scp=use_scp, devlib_platform=devlib_platform, wait_boot=wait_boot, wait_boot_timeout=wait_boot_timeout, ) devlib_excluded_modules = set(devlib_excluded_modules) #", "TypedList from lisa.platforms.platinfo import PlatformInfo class PasswordKeyDesc(KeyDesc): def pretty_format(self, v):", "password, assume we can use it as a sudo #", "__getattr__ when looking them up. .. note:: Devlib modules are", "username will root adb upon target connection', [str, None]), PasswordKeyDesc('password',", "relative=relative, name=name, append_time=append_time, symlink=symlink, ) def _get_res_dir(self, root, relative, name,", "connection settings:\\n {settings}') ######################################################################## # Devlib Platform configuration ######################################################################## if", "Content of target_conf.yml: .. literalinclude:: ../target_conf.yml :language: YAML :: TargetConf.from_yaml_map('target_conf.yml')", "\"\"\" Target connection settings. Only keys defined below are allowed,", "if ( getattr(cls, 'name', None) # early modules try to", "it # has been excluded explicitly elif attr in _DEVLIB_AVAILABLE_MODULES:", "without the custom parameters capabilities. :return: A connected :class:`Target` \"\"\"", "None else strict_host_check, use_scp=False if use_scp is None else use_scp,", "'stage') != 'early' ) } class TargetConf(SimpleMultiSrcConf, HideExekallID): \"\"\" Target", "gem5_args=gem5_args, gem5_bin=simulator['bin'], **kwargs ) # vim :set tabstop=4 shiftwidth=4 expandtab", "if not self.is_rooted: logger.warning('Could not freeze userspace: target is not", "= True else: out_is_excep = False out = pickle.dumps(out) out_tempfile", "to find \"diod\" on your host machine, check your installation", "Python interpreter on the target .. note:: Closure variables are", "devlib module is available. :returns: ``True`` if module is available,", "custom parameters capabilities. :return: A connected :class:`Target` \"\"\" _, target", "that means \"host\", \"username\" and either \"password\" or \"keyfile\". All", "the Target is # initialized. Expensive computations are deferred so", "file transfer method: {devlib_file_xfer}') use_scp = devlib_file_xfer == 'scp' self._installed_tools", "level of the logs.\") parser.add_argument(\"--res-dir\", \"-o\", help=\"Result directory of the", "too long. 'watchdogd', ] } \"\"\" Dictionary mapping OS name", "to the wrapper using eval() global_vars = { **global_vars, **kwargs,", "plat_info: PlatformInfo = None) -> 'Target': cls.get_logger().info(f'Target configuration:\\n{conf}') kwargs =", "tuple() try: out_tempfiles = (mktemp(), mktemp()) snippet = self._make_remote_snippet( name=name,", "], 'android': [ 'sh', 'adbd', 'usb', 'transport', # We don't", "configuration or any of the connection options is required') if", "after another module # is updated or so. We only", "logger.info(f\"Connected to target {(name or '')}\") return target def get_res_dir(self,", "get_subclasses(devlib.module.Module) if ( getattr(cls, 'name', None) # early modules try", "try: return read_output(val) # If the file is empty, we", "frozen, so the cgroup state gets stuck in FREEZING if", "try: self.execute(cmd, **execute_kwargs) except Exception as e: # pylint: disable=broad-except", "Platform, that requires a \"host_output_dir\" # argument computed at runtime.", "to abritrary code execution. \"\"\" conf = TargetConf.from_yaml_map(path) try: plat_info", "the loadable list, it # has been excluded explicitly elif", "distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR", "devlib_platform cls.check_init_param(**kwargs) return cls(**kwargs) @classmethod def from_default_conf(cls): \"\"\" Create a", "that lets you disable all idle states \"\"\" logger =", "-> 'Target': cls.get_logger().info(f'Target configuration:\\n{conf}') kwargs = cls.conf_to_init_kwargs(conf) kwargs['res_dir'] = res_dir", "and k not in not_target_conf_opt }) # Some sanity check", "be \"linux\" (ssh) or \"android\" (adb)', [str]), KeyDesc('host', 'Hostname or", ":meth:`freeze_userspace`. \"\"\" CONF_CLASS = TargetConf INIT_KWARGS_KEY_MAP = { 'devlib_excluded_modules': ['devlib',", "collections.abc import Mapping import copy import sys import argparse import", "= plat_info # Create a devlib Platform instance out of", "location and keep # that in _installed_tools, so we are", "context. Also, functions that are referred to will be: *", "all tasks except: {','.join(exclude)}\") try: yield self.cgroups.freeze(exclude) finally: logger.info('Un-freezing userspace", "file except in compliance with the License. # You may", "target.connect(check_boot_completed=wait_boot, timeout=wait_boot_timeout) # None as username means adb root will", "produced by any other third-party code LevelKeyDesc('platform', 'devlib.platform.Platform subclass specification',", "reboot will be required if isinstance(self.target, devlib.LocalLinuxTarget): exclude.append(str(os.getpid())) @contextlib.contextmanager def", "which('diod') if diod_path is None: raise RuntimeError('Failed to find \"diod\"", "'myboard', 'host': 192.0.2.1, 'kind': 'linux', 'username': 'foo', 'password': '<PASSWORD>', }})", "('scp', 'sftp'): raise ValueError(f'Invalid file transfer method: {devlib_file_xfer}') use_scp =", "not os.path.isfile(binary): binary = os.path.join(ASSETS_PATH, 'binaries', 'scripts', tool) return binary", "the Platform allows this YAML configuration # to not use", "workdir=None, device=None, host=None, port=None, username=None, password=<PASSWORD>, keyfile=None, strict_host_check=None, devlib_platform=None, devlib_excluded_modules=[],", "None]), PasswordKeyDesc('password', 'SSH password', [str, None]), KeyDesc('port', 'SSH or ADB", "getattr(self, module) except Exception: # pylint: disable=broad-except return False else:", "just were used to # feed the function to us", "port, username, password, keyfile, strict_host_check, use_scp, devlib_platform, wait_boot, wait_boot_timeout, ):", "of the logs.\") parser.add_argument(\"--res-dir\", \"-o\", help=\"Result directory of the created", "return cls.from_conf(conf=conf, plat_info=plat_info) @classmethod # Keep the signature without *args", "keyword arguments: Forwarded to :meth:`execute` that will spawn the Python", "\"\"\" STRUCTURE = TopLevelKeyDesc('target-conf', 'target connection settings', ( KeyDesc('name', 'Board", "or \"scp\". (Only valid for linux targets)', [TypedList[str]]), )) ))", "is available, ``False`` otherwise. :param module: Devlib module to check.", "actually needed. rta_calib_res_dir = ArtifactPath.join(self._res_dir, 'rta_calib') os.makedirs(rta_calib_res_dir) self.plat_info.add_target_src(self, rta_calib_res_dir, deferred=lazy_platinfo,", ":type module: str .. note:: This will attempt to load", "be frozen, so the cgroup state gets stuck in FREEZING", "# Load the module on demand if attr in self._devlib_loadable_modules:", "command # line arguments try: conf = TargetConf.from_yaml_map(args.conf) except (KeyError,", "# cannot be frozen, so the cgroup state gets stuck", "TargetConf from the file, and update it with command #", "target_conf: parser.error(f'--{required} must be specified') custom_args = { param: value", "Version 2.0 (the \"License\"); you may # not use this", "module if it's not loaded already, and bail out if", "possible conn_settings['adb_as_root'] = (username == 'root') elif kind == 'linux':", "it's usable by exekall def from_cli(cls, argv=None, params=None) -> 'Target':", "updated or so. We only inline local things that are", "**kwargs so that it's usable by exekall def from_cli(cls, argv=None,", "self._get_code(f) def mktemp(): return self.execute( f'mktemp -p {shlex.quote(self.working_directory)}' ).strip() def", "'Target kind. Can be \"linux\" (ssh) or \"android\" (adb)', [str]),", "exception except EOFError: # pylint: disable=raise-missing-from try: excep = read_output(excep)", "you disable all idle states \"\"\" logger = self.get_logger() logger.info('Disabling", "a PlatformInfo YAML description. Note: only load trusted YAML files", ")) DEFAULT_SRC = { 'devlib': { 'platform': { 'class': 'devlib.platform.Platform'", "install_tools(self, tools): \"\"\" Install tools additional to those specified in", "which from devlib.platform.gem5 import Gem5SimulationPlatform from lisa.utils import Loggable, HideExekallID,", "apparently # cannot be frozen, so the cgroup state gets", "= nullcontext else: controllers = [s.name for s in self.cgroups.list_subsystems()]", "them up. .. note:: Devlib modules are loaded on demand", "= ArtifactPath.join(self._res_dir, 'rta_calib') os.makedirs(rta_calib_res_dir) self.plat_info.add_target_src(self, rta_calib_res_dir, deferred=lazy_platinfo, fallback=True) logger.info(f'Effective platform", "results directory :type symlink: bool \"\"\" if isinstance(self._res_dir, ArtifactPath): root", "= name res_dir = res_dir if res_dir else self._get_res_dir( root=os.path.join(LISA_HOME,", "for a given kind of board # (i.e. a Juno", "to provide additional features on top of it. {configurable_params} :param", "special methods like __str__, since the interpreter bypasses __getattr__ when", "{out_tempfiles}[0] with open(out_tempfile, 'wb') as f: f.write(out) ''').format( f=name, code=textwrap.dedent(code_str).replace('\\n',", "composition, as opposed to inheritance. This allows swapping the exact", "is required') if args.kind == 'android': if ('host' not in", ":param name: Name of the results directory :type name: str", "information from the target, after the Target is # initialized.", "the time is used in the name, there is some", "@staticmethod def _get_code(f): lines, _ = inspect.getsourcelines(f) # Remove decorators,", "if name: self.plat_info.add_src('target-conf', dict(name=name)) # Determine file transfer method. Currently", "platform information:\\n{self.plat_info}') @property @memoized def _uses_systemd(self): try: # Check if", "to the underlying :class:`devlib.target.Target`. .. note:: That will not forward", "and username is None: try: target.adb_root(enable=True) except Exception as e:", "be used if this is ``None``. :type argv: list(str) :param", "Returns a directory managed by LISA to store results. Usage", "the current datetime. :type append_time: bool :param symlink: Create a", "inside the wrapper's globals so that it can # access", "self.execute_python(f, f_args, f_kwargs, **kwargs) return wrapper return wrapper_param class Gem5SimulationPlatformWrapper(Gem5SimulationPlatform):", "could lead to a # number of problems that could", "the target to finish booting', ( KeyDesc('enable', 'Enable the boot", "import Mapping import copy import sys import argparse import textwrap", "= self._res_dir.relative else: root = self._res_dir relative = '' return", "logger.warning('Could not disable idle states, cpuidle devlib module is not", "attributes plus the ones from the underlying target, and the", "arguments. :param argv: The list of arguments. ``sys.argv[1:]`` will be", "= { 'devlib': { 'platform': { 'class': 'devlib.platform.Platform' } }", "'devlib_file_xfer': ['devlib', 'file-xfer'], 'wait_boot': ['wait-boot', 'enable'], 'wait_boot_timeout': ['wait-boot', 'timeout'], }", "the PlatformInfo so we don't modify the original # one", "functions cannot be called from remote functions') closure_vars = {", "in range(self.plat_info['cpus-count']): cpuidle.enable_all(cpu) with cm() as x: yield x def", "' '.join(map(shlex.quote, cmd)) try: self.execute(cmd, **execute_kwargs) except Exception as e:", "up {kind} target...') # If the target is Android, we", "if required not in target_conf: parser.error(f'--{required} must be specified') custom_args", "KIND, either express or implied. # See the License for", "Gem5SimulationPlatformWrapper(Gem5SimulationPlatform): def __init__(self, system, simulator, **kwargs): simulator_args = copy.copy(simulator.get('args', []))", "with open(name, 'rb') as f: return pickle.loads(f.read()) def parse_output(paths, err):", "name: !env:str BOARD_NAME port: !env:int PORT .. note:: Only load", "``{param_name: {dict of ArgumentParser.add_argument() options}}``. :type params: dict(str, dict) :return:", "if isinstance(mod, ModuleType) ) funcs = {} for f_name, f", "disable_idle_states(self): \"\"\" Context manager that lets you disable all idle", "the class to use', [str]), KeyDesc('args', 'Keyword arguments to build", "cgroups in a systemd # system is pretty bad idea.", "the relevant credentials: $ {script} --kind linux --host 192.0.2.1 --username", "args.kind == 'android': if ('host' not in target_conf) and ('device'", "self.is_module_available('cgroups'): logger.warning('Could not freeze userspace: \"cgroups\" devlib module is necessary')", "don't inline the whole world, which could lead to a", "TargetConf.from_yaml_map(args.conf) except (KeyError, ValueError): pass else: target_conf.add_src(args.conf, conf) target_conf.add_src('command-line', {", "if isinstance(mod, ModuleType) } def can_include(f): return ( isinstance(f, FunctionType)", "val, excep = paths try: return read_output(val) # If the", "!env:int PORT .. note:: Only load trusted YAML files as", "# Create devlib Target object ######################################################################## target = devlib_target_cls( platform=devlib_platform,", "target-conf: # \"!env:<type> ENV_VAR_NAME\" can be used to reference an", "binary arguments simulator_args.append('--listener-mode=on') simulator_args.append(system_platform['description']) simulator_args.extend(system_platform.get('args', [])) simulator_args.extend(( f\"--kernel {system['kernel']}\", f\"--dtb", "= {} resolved_username = username or 'root' logger.debug(f'Setting up {kind}", "otherwise. :param module: Devlib module to check. :type module: str", "from the conf file with contextlib.suppress(KeyError, ValueError): platform_info = PlatformInfo.from_yaml_map(args.conf)", "name will be the current datetime. :type append_time: bool :param", "of the tool + install location and keep # that", "4), modules=modules, out_tempfiles=repr(out_tempfiles), globals=repr(pickle.dumps(global_vars)), kwargs=', '.join( f'{name}={name}' for name in", "# pylint: disable=broad-except logger.warning(f'\"adb root\" failed: {e}') logger.debug(f'Target info: {dict(abi=target.abi,", "python TargetConf({{ 'name': 'myboard', 'host': 192.0.2.1, 'kind': 'linux', 'username': 'foo',", "'wait_boot_timeout': ['wait-boot', 'timeout'], } def __init__(self, kind, name='<noname>', tools=[], res_dir=None,", "top-level `target-conf` key is not needed here: .. code-block:: python", "from the file, and update it with command # line", "plat_info: lisa.platforms.platinfo.PlatformInfo You need to provide the information needed to", "keyfile else: conn_settings['password'] = password elif kind == 'host': devlib_target_cls", "the device if kind == 'android': devlib_target_cls = devlib.AndroidTarget #", ":class:`Target` from a single YAML configuration file. This file will", "KeyDesc('lazy-platinfo', 'Lazily autodect the platform information to speed up the", "hood, and avoids messing up with ``devlib`` internal members. \"\"\"", "append_time: logger.info('Directory already exists, retrying ...') continue else: raise if", "specification', ( KeyDesc('class', 'Name of the class to use', [str]),", "a directory managed by LISA to store results. Usage of", "we # try to freeze it. 'thermal-engine', # Similar issue", "cls.check_init_param(**kwargs) return cls(**kwargs) @classmethod def from_default_conf(cls): \"\"\" Create a :class:`Target`", "= resolve_dotted_name(devlib_platform_conf['class']) devlib_platform_kwargs = copy.copy(devlib_platform_conf.get('args', {})) # Hack for Gem5", "to us lines = [ line for line in lines", "it plat_info = copy.copy(plat_info) logger.info(f'User-defined platform information:\\n{plat_info}') self.plat_info = plat_info", "Check if systemd is being used, according to: # https://www.freedesktop.org/software/systemd/man/sd_booted.html", "tasks except: {','.join(exclude)}\") try: yield self.cgroups.freeze(exclude) finally: logger.info('Un-freezing userspace tasks')", "if use_scp is None else use_scp, ) # Configure password", "relocated as the caller wants it res_dir = ArtifactPath(root, os.path.join(relative,", "will be used to provide a :class:`TargetConf` and :class:`lisa.platforms.platinfo.PlatformInfo` instances.", "lines, _ = inspect.getsourcelines(f) # Remove decorators, as they are", "None: plat_info = PlatformInfo() else: # Make a copy of", "be specified') custom_args = { param: value for param, value", "!= 'early' ) } class TargetConf(SimpleMultiSrcConf, HideExekallID): \"\"\" Target connection", "'devlib_excluded_modules': ['devlib', 'excluded-modules'], 'devlib_file_xfer': ['devlib', 'file-xfer'], 'wait_boot': ['wait-boot', 'enable'], 'wait_boot_timeout':", "target.setup() logger.info(f\"Connected to target {(name or '')}\") return target def", "######################################################################## # Devlib Platform configuration ######################################################################## if not devlib_platform: devlib_platform", "== 'linux': devlib_target_cls = devlib.LinuxTarget conn_settings.update( username=resolved_username, port=port or self.SSH_PORT_DEFAULT,", "not self.is_module_available('cgroups'): logger.warning('Could not freeze userspace: \"cgroups\" devlib module is", ":Variable keyword arguments: Forwarded to :meth:`execute` that will spawn the", "simulator_args.extend(system_platform.get('args', [])) simulator_args.extend(( f\"--kernel {system['kernel']}\", f\"--dtb {system['dtb']}\", f\"--disk-image {system['disk']}\" ))", "the original # one we were passed when adding the", "for key, val in conn_settings.items() if key != 'password' )", "'DEFAULT' conn_settings['device'] = device # If the username was explicitly", "name clash risk # 2. we don't inline the whole", "# # Unless required by applicable law or agreed to", "binary tools to deploy if tools: logger.info(f'Tools to install: {tools}')", "err = None return parse_output(out_tempfiles, err) finally: for path in", "sys import argparse import textwrap import functools import inspect import", "\"\"\" Decorates a given function to execute remotely using :meth:`execute_python`::", "options will override what is specified in the file.\" )", "= (f, cls._get_code(f)[1]) for _name, _f in closure_vars.items(): if _f", "Install tools additional to those specified in the test config", "ArtifactPath): root = self._res_dir.root relative = self._res_dir.relative else: root =", "this target, for the benefits of user code. :type plat_info:", "but mutating them will not be reflected in the caller's", "RESULT_DIR, LATEST_LINK, setup_logging, ArtifactPath, nullcontext, ExekallTaggable, memoized from lisa.assets import", "is specified, a default location under $LISA_HOME will be used.\")", "pass elif host: port = port or self.ADB_PORT_DEFAULT device =", "for all domains') for cpu in range(self.plat_info['cpus-count']): cpuidle.enable_all(cpu) with cm()", "= set(tools) - self._installed_tools # TODO: compute the checksum of", "= TopLevelKeyDesc('target-conf', 'target connection settings', ( KeyDesc('name', 'Board name, free-form", "result in that structure which would normally be invalid, but", "devlib module is necessary') cm = nullcontext else: controllers =", "connect to the target. For SSH targets that means \"host\",", "v): return '<password>' # Make sure all submodules of devlib.module", "entity, which means the results will be lost in some", "host=host, strict_host_check=True if strict_host_check is None else strict_host_check, use_scp=False if", "CRITICAL_TASKS = { 'linux': [ 'init', # We want to", "Exception as e: # pylint: disable=broad-except cls.get_logger().warning(f'No platform information could", "} funcs[name] = (f, cls._get_code(f)[1]) for _name, _f in closure_vars.items():", "{system['disk']}\" )) diod_path = which('diod') if diod_path is None: raise", "'kind': 'linux', 'username': 'foo', 'password': '<PASSWORD>', }}) Or alternatively, from", "is returned/raised in the host caller. :Variable keyword arguments: Forwarded", "return False else: return True def __getattr__(self, attr): \"\"\" Forward", "fail if the folder already exists. In that case, #", "wrapping of :class:`devlib.target.Target` is done using composition, as opposed to", "def _make_remote_snippet(cls, name, code_str, module, kwargs, global_vars, out_tempfiles): # Inject", "loaded already, and bail out if it fails to load.", "since their definition depends on # external callable we cannot", "and the devlib modules that could be loaded on-demand. \"\"\"", "as opposed to inheritance. This allows swapping the exact class", "target. if kind == 'android' and username is None: try:", "foo.yml This will result in that structure which would normally", "and keyword arguments. The return value or any exception is", "implied. # See the License for the specific language governing", "_DEVLIB_AVAILABLE_MODULES: # pylint: disable=raise-missing-from raise AttributeError(f'Devlib target module {attr} was", "the wrapper using eval() global_vars = { **global_vars, **kwargs, }", "'Enable the boot check', [bool]), KeyDesc('timeout', 'Timeout of the boot", "funcs[name] = (f, cls._get_code(f)[1]) for _name, _f in closure_vars.items(): if", ":type append_time: bool :param symlink: Create a symlink named ``results_latest``", "just (eventually) the device if kind == 'android': devlib_target_cls =", "exists, retrying ...') continue else: raise if symlink: res_lnk =", "transfer method. Can be \"sftp\" (default) or \"scp\". (Only valid", "not load cgroups devlib module: target is using systemd, which", "of the host', [str, None]), KeyDesc('username', 'SSH username. On ADB", "task names that we can't afford to freeze when using", "def _get_res_dir(self, root, relative, name, append_time, symlink): logger = self.get_logger()", "names of tools to install :type tools: list(str) \"\"\" def", "death and a machine hard reboot will be required if", "--kind must be set along the relevant credentials: $ {script}", "out_tempfiles = tuple() try: out_tempfiles = (mktemp(), mktemp()) snippet =", "is pretty bad idea. if self._uses_systemd: logger.warning('Will not load cgroups", "It will fail if the folder already exists. In that", "= '' return self._get_res_dir( root=root, relative=relative, name=name, append_time=append_time, symlink=symlink, )", "a sudo # password. conn_settings.update( unrooted=password is None, password=password, )", "res_dir=args.res_dir) def _init_target(self, kind, name, workdir, device, host, port, username,", "is not None else ValueError('No exception was raised or value", "KeyDesc('file-xfer', 'File transfer method. Can be \"sftp\" (default) or \"scp\".", "not freeze userspace: freezer cgroup controller not available on the", "os.path.join(ASSETS_PATH, 'binaries', self.abi, tool) if not os.path.isfile(binary): binary = os.path.join(ASSETS_PATH,", "using :meth:`execute_python`:: target = Target(...) @target.remote_func(timeout=42) def foo(x, y): return", "load a PlatformInfo from the conf file with contextlib.suppress(KeyError, ValueError):", "copy import sys import argparse import textwrap import functools import", "module {attr}') self.target.install_module(attr) return get() # If it was not", "Configurable): \"\"\" Wrap :class:`devlib.target.Target` to provide additional features on top", "params.keys()} # Options that are not a key in TargetConf", "virtio # Brackets are there to let the output dir", "# from datetime import datetime import os import os.path import", "self.SSH_PORT_DEFAULT, host=host, strict_host_check=True if strict_host_check is None else strict_host_check, use_scp=False", "means the results will be lost in some automated environment.", "copy.copy(simulator.get('args', [])) system_platform = system['platform'] # Get gem5 binary arguments", "BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either", "TargetStableError from devlib.utils.misc import which from devlib.platform.gem5 import Gem5SimulationPlatform from", "get_tags(self): return {'board': self.name} @classmethod def _make_remote_snippet(cls, name, code_str, module,", "'password']: if required not in target_conf: parser.error(f'--{required} must be specified')", "as it can lead to abritrary code execution. \"\"\" conf", "requires a \"host_output_dir\" # argument computed at runtime. # Note:", "target as # early as possible conn_settings['adb_as_root'] = (username ==", "platform information to speed up the connection', [bool]), LevelKeyDesc('wait-boot', 'Wait", "= parser.add_mutually_exclusive_group() device_group.add_argument(\"--device\", \"-d\", help=\"The ADB ID of the target.", "be attempted, but failure will # not prevent from connecting", "LATEST_LINK, setup_logging, ArtifactPath, nullcontext, ExekallTaggable, memoized from lisa.assets import ASSETS_PATH", "the file, and update it with command # line arguments", "['wait-boot', 'enable'], 'wait_boot_timeout': ['wait-boot', 'timeout'], } def __init__(self, kind, name='<noname>',", "of board # (i.e. a Juno board might be named", "PlatformInfo.from_yaml_map(args.conf) # Load the TargetConf from the file, and update", "it is defined in the same module * referred to", "'sh', 'ssh', 'rsyslogd', 'jbd2' ], 'android': [ 'sh', 'adbd', 'usb',", "target-conf: name: myboard \"\"\" STRUCTURE = TopLevelKeyDesc('target-conf', 'target connection settings',", "file pointed by ``LISA_CONF`` environment variable. .. note:: Only load", "if self._uses_systemd: logger.warning('Will not load cgroups devlib module: target is", "as it can lead to abritrary code execution. .. note::", "] code_str = textwrap.dedent(''.join(lines)) name = f.__name__ return (name, code_str)", "signature without *args and **kwargs so that it's usable by", "x def get_tags(self): return {'board': self.name} @classmethod def _make_remote_snippet(cls, name,", ".. note:: Devlib modules are loaded on demand when accessed.", "Actually build the devlib Platform object devlib_platform = devlib_platform_cls(**devlib_platform_kwargs) kwargs['devlib_platform']", "Unless required by applicable law or agreed to in writing,", "): \"\"\" Initialize the Target \"\"\" logger = self.get_logger() conn_settings", "pylint: disable=broad-except logger.warning(f'\"adb root\" failed: {e}') logger.debug(f'Target info: {dict(abi=target.abi, cpuinfo=target.cpuinfo,", "exclude = copy.copy(self.CRITICAL_TASKS[self.target.os]) # Do not freeze the process in", "were given an ArtifactPath with an existing root, we #", "keep # that in _installed_tools, so we are sure to", "from devlib.utils.misc import which from devlib.platform.gem5 import Gem5SimulationPlatform from lisa.utils", "https://www.freedesktop.org/software/systemd/man/sd_booted.html self.execute('test -d /run/systemd/system/', check_exit_code=True) except TargetStableError: return False else:", "is available. :returns: ``True`` if module is available, ``False`` otherwise.", "{e}') logger.debug(f'Target info: {dict(abi=target.abi, cpuinfo=target.cpuinfo, workdir=target.working_directory)}') target.setup() logger.info(f\"Connected to target", "to speed up the connection', [bool]), LevelKeyDesc('wait-boot', 'Wait for the", "feed the function to us lines = [ line for", "[str]), KeyDesc('args', 'Keyword arguments to build the Platform object', [Mapping]),", "plat_info is None: plat_info = PlatformInfo() else: # Make a", "to create # the link with contextlib.suppress(FileExistsError): os.symlink(res_dir, res_lnk) return", "pickled back and is returned/raised in the host caller. :Variable", "return False else: return True def is_module_available(self, module): \"\"\" Check", "reserved to interactive use or simple scripts. Tests should not", "lisa.platforms.platinfo import PlatformInfo class PasswordKeyDesc(KeyDesc): def pretty_format(self, v): return '<password>'", "in target_conf) and ('device' not in target_conf): parser.error('--host or --device", "the specific language governing permissions and # limitations under the", "another tool trying to create # the link with contextlib.suppress(FileExistsError):", "port = port or self.ADB_PORT_DEFAULT device = f'{host}:{port}' else: device", "'foo', 'password': '<PASSWORD>', }}) Or alternatively, from a YAML configuration", "{ cls.name for cls in get_subclasses(devlib.module.Module) if ( getattr(cls, 'name',", "That structure in a YAML file is allowed and will", "else: conn_settings['password'] = password elif kind == 'host': devlib_target_cls =", "the current datetime will be appended to the given ``name``.", "\"\"\" parser = argparse.ArgumentParser( formatter_class=argparse.RawDescriptionHelpFormatter, description=textwrap.dedent( \"\"\" Connect to a", "= copy.copy(plat_info) logger.info(f'User-defined platform information:\\n{plat_info}') self.plat_info = plat_info # Take", "target to finish booting', ( KeyDesc('enable', 'Enable the boot check',", "specified, a default location under $LISA_HOME will be used.\") params", "\"\"\" Executes the given Python function ``f`` with the provided", "named ``results_latest`` to the newly created results directory :type symlink:", "f=name, code=textwrap.dedent(code_str).replace('\\n', '\\n' + ' ' * 4), modules=modules, out_tempfiles=repr(out_tempfiles),", "don't modify the original # one we were passed when", "symlink): logger = self.get_logger() while True: time_str = datetime.now().strftime('%Y%m%d_%H%M%S.%f') if", "uses cgroups') devlib_excluded_modules.add('cgroups') self._devlib_loadable_modules = _DEVLIB_AVAILABLE_MODULES - devlib_excluded_modules # Initialize", "parser = argparse.ArgumentParser( formatter_class=argparse.RawDescriptionHelpFormatter, description=textwrap.dedent( \"\"\" Connect to a target", "return self.execute_python(f, f_args, f_kwargs, **kwargs) return wrapper return wrapper_param class", "try: out_tempfiles = (mktemp(), mktemp()) snippet = self._make_remote_snippet( name=name, code_str=code_str,", "your PATH variable') # Setup virtio # Brackets are there", "import which from devlib.platform.gem5 import Gem5SimulationPlatform from lisa.utils import Loggable,", "logs', [str]), KeyDesc('kind', 'Target kind. Can be \"linux\" (ssh) or", "global_vars = { **global_vars, **kwargs, } # Treat the modules", "'name', None) # early modules try to connect to UART", "cmd = ' '.join(map(shlex.quote, cmd)) try: self.execute(cmd, **execute_kwargs) except Exception", "and a machine hard reboot will be required if isinstance(self.target,", "running. 'systemd[^-]', 'dbus', 'sh', 'ssh', 'rsyslogd', 'jbd2' ], 'android': [", "crash if this is frozen # for too long. 'watchdogd',", "1, we don't want to let # sysmted-journald or systemd-timesyncd", "Linux kind.\") parser.add_argument(\"--log-level\", default='info', choices=('warning', 'info', 'debug'), help=\"Verbosity level of", "# If we are given a password, assume we can", "Mapping import copy import sys import argparse import textwrap import", "to *not* load', [TypedList[str]]), KeyDesc('file-xfer', 'File transfer method. Can be", "can point to a YAML target configuration file with all", "add_func(f, f_name) code_str += '\\n' + '\\n'.join(map(itemgetter(1), funcs.values())) non_pickled =", "= textwrap.dedent(''' import pickle import sys def wrapper(): {modules} {code}", "in the caller's context. Also, functions that are referred to", "def foo(x, y): return x + y # Execute the", "PlatformInfo = None) -> 'Target': cls.get_logger().info(f'Target configuration:\\n{conf}') kwargs = cls.conf_to_init_kwargs(conf)", "from a module that is installed on the target and", "in global_vars.items() if isinstance(mod, ModuleType) } def can_include(f): return (", "self.execute(cmd, **execute_kwargs) except Exception as e: # pylint: disable=broad-except err", "Platform information attached to this target, for the benefits of", "under $LISA_HOME will be used.\") params = params or {}", "not available on the target') cm = nullcontext else: exclude", "err is not None else ValueError('No exception was raised or", "attr) try: return get() except AttributeError: # Load the module", "devlib.LocalLinuxTarget): exclude.append(str(os.getpid())) @contextlib.contextmanager def cm(): logger.info(f\"Freezing all tasks except: {','.join(exclude)}\")", "None]), KeyDesc('port', 'SSH or ADB server port', [int, None]), KeyDesc('device',", "the exact class used under the hood, and avoids messing", "self._devlib_loadable_modules = _DEVLIB_AVAILABLE_MODULES - devlib_excluded_modules # Initialize binary tools to", "vars(args).items() if v is not None and k not in", "[str]), KeyDesc('host', 'Hostname or IP address of the host', [str,", "else: modules = '' script = textwrap.dedent(''' import pickle import", "freeze when using :meth:`freeze_userspace`. \"\"\" CONF_CLASS = TargetConf INIT_KWARGS_KEY_MAP =", "according to: # https://www.freedesktop.org/software/systemd/man/sd_booted.html self.execute('test -d /run/systemd/system/', check_exit_code=True) except TargetStableError:", "in the script if it is defined in the same", "x: yield x @contextlib.contextmanager def disable_idle_states(self): \"\"\" Context manager that", "= ' '.join(map(shlex.quote, cmd)) try: self.execute(cmd, **execute_kwargs) except Exception as", "parser.error(f'--{required} must be specified') custom_args = { param: value for", "device_group.add_argument(\"--host\", \"-n\", help=\"The hostname/IP of the target.\") parser.add_argument(\"--username\", \"-u\", help=\"Login", "will override what is specified in the file.\" ) parser.add_argument(\"--kind\",", "files as it can lead to abritrary code execution. ..", "name: myboard \"\"\" STRUCTURE = TopLevelKeyDesc('target-conf', 'target connection settings', (", "separately as they cannot be pickled modules = { name:", "PlatformInfo YAML description. Note: only load trusted YAML files as", "target_conf: parser.error('--conf with target configuration or any of the connection", "may # not use this file except in compliance with", "devlib_file_xfer == 'scp' self._installed_tools = set() self.target = self._init_target( kind=kind,", "import pickle import tempfile from types import ModuleType, FunctionType from", "if args.conf: # Tentatively load a PlatformInfo from the conf", "will fail if the folder already exists. In that case,", "in var_dct.items() } funcs[name] = (f, cls._get_code(f)[1]) for _name, _f", "simulator, **kwargs): simulator_args = copy.copy(simulator.get('args', [])) system_platform = system['platform'] #", "keyfile=None, strict_host_check=None, devlib_platform=None, devlib_excluded_modules=[], devlib_file_xfer=None, wait_boot=True, wait_boot_timeout=10, ): # pylint:", "connections, \"root\" username will root adb upon target connection', [str,", "TargetConf, res_dir: ArtifactPath = None, plat_info: PlatformInfo = None) ->", "long. 'watchdogd', ] } \"\"\" Dictionary mapping OS name to", "is no name clash risk # 2. we don't inline", ":class:`lisa.platforms.platinfo.PlatformInfo` instances. .. note:: Only load trusted YAML files as", "fields are optional if the relevant features aren't needed. ..", "Dictionary of custom parameters to add to the parser. It", "_DEVLIB_AVAILABLE_MODULES = { cls.name for cls in get_subclasses(devlib.module.Module) if (", "This allows swapping the exact class used under the hood,", "what is specified in the file.\" ) parser.add_argument(\"--kind\", \"-k\", choices=[\"android\",", "self.target.install(tool) self._installed_tools.add(tool) @contextlib.contextmanager def freeze_userspace(self): \"\"\" Context manager that lets", "target .. note:: Closure variables are supported, but mutating them", "\"\"\" def bin_path(tool): binary = os.path.join(ASSETS_PATH, 'binaries', self.abi, tool) if", "things we are not interested in and getattr(cls, 'stage') !=", "self._get_res_dir( root=os.path.join(LISA_HOME, RESULT_DIR), relative='', name=f'{self.__class__.__qualname__}-{self.name}', append_time=True, symlink=True ) self._res_dir =", ")) diod_path = which('diod') if diod_path is None: raise RuntimeError('Failed", "**kwargs): simulator_args = copy.copy(simulator.get('args', [])) system_platform = system['platform'] # Get", "if param in custom_params } custom_args = argparse.Namespace(**custom_args) return custom_args,", "strict_host_check, use_scp, devlib_platform, wait_boot, wait_boot_timeout, ): \"\"\" Initialize the Target", "return parse_output(out_tempfiles, err) finally: for path in out_tempfiles: self.remove(path) def", "of the PlatformInfo so we don't modify the original #", "None return cls.from_conf(conf=conf, plat_info=plat_info) @classmethod # Keep the signature without", ":type tools: list(str) \"\"\" def bin_path(tool): binary = os.path.join(ASSETS_PATH, 'binaries',", "the function') else: raise excep out_tempfiles = tuple() try: out_tempfiles", "in self._devlib_loadable_modules: self.get_logger().info(f'Loading target devlib module {attr}') self.target.install_module(attr) return get()", "key in TargetConf must be listed here not_target_conf_opt = {", "# Compute base installation path logger.info(f'Creating result directory: {res_dir}') #", "code execution. .. note:: That structure in a YAML file", "ADB ID of the target. Superseeds --host. Only applies to", "[TypedList[str]]), KeyDesc('lazy-platinfo', 'Lazily autodect the platform information to speed up", "SPDX-License-Identifier: Apache-2.0 # # Copyright (C) 2018, ARM Limited and", "be the current datetime. :type append_time: bool :param symlink: Create", "root=root, relative=relative, name=name, append_time=append_time, symlink=symlink, ) def _get_res_dir(self, root, relative,", "return {'board': self.name} @classmethod def _make_remote_snippet(cls, name, code_str, module, kwargs,", "2018, ARM Limited and contributors. # # Licensed under the", "\"linux\", \"host\"], help=\"The kind of target to connect to.\") device_group", "file. Other options will override what is specified in the", "conn_settings['keyfile'] = keyfile else: conn_settings['password'] = password elif kind ==", "the License is distributed on an \"AS IS\" BASIS, WITHOUT", "= self.cpuidle except AttributeError: logger.warning('Could not disable idle states, cpuidle", "class Gem5SimulationPlatformWrapper(Gem5SimulationPlatform): def __init__(self, system, simulator, **kwargs): simulator_args = copy.copy(simulator.get('args',", "res_dir else self._get_res_dir( root=os.path.join(LISA_HOME, RESULT_DIR), relative='', name=f'{self.__class__.__qualname__}-{self.name}', append_time=True, symlink=True )", "code_str = textwrap.dedent(''.join(lines)) name = f.__name__ return (name, code_str) def", "there to let the output dir be created automatically virtio_args", "up. .. note:: Devlib modules are loaded on demand when", "back and is returned/raised in the host caller. :Variable keyword", "KeyDesc, LevelKeyDesc, TopLevelKeyDesc,Configurable from lisa.generic import TypedList from lisa.platforms.platinfo import", "= self.get_logger() logger.info('Disabling idle states for all domains') try: cpuidle", "the caller's context. Also, functions that are referred to will", "``LISA_CONF`` environment variable. .. note:: Only load trusted YAML files", "--host. Only applies to Android kind.\") device_group.add_argument(\"--host\", \"-n\", help=\"The hostname/IP", "line arguments try: conf = TargetConf.from_yaml_map(args.conf) except (KeyError, ValueError): pass", "os.path.join(LISA_HOME, LATEST_LINK) with contextlib.suppress(FileNotFoundError): os.remove(res_lnk) # There may be a", "isinstance(self.target, devlib.LocalLinuxTarget): exclude.append(str(os.getpid())) @contextlib.contextmanager def cm(): logger.info(f\"Freezing all tasks except:", "# Execute the function on the target transparently val =", "it') # Something else that does not exist ... else:", "the file is empty, we probably got an exception except", "to StrictHostKeyChecking option of OpenSSH', [bool, None]), KeyDesc('workdir', 'Remote target", "in the loadable list, it # has been excluded explicitly", "want to let # sysmted-journald or systemd-timesyncd running. 'systemd[^-]', 'dbus',", "'SSH or ADB server port', [int, None]), KeyDesc('device', 'ADB device.", "not the case, a :exc:`NameError` will be raised. .. attention::", "BOARD_NAME port: !env:int PORT .. note:: Only load trusted YAML", "class PasswordKeyDesc(KeyDesc): def pretty_format(self, v): return '<password>' # Make sure", "later inspection. That board name is mostly free form #", "attributes/method accesses to the underlying :class:`devlib.target.Target`. .. note:: That will", "code_str += '\\n' + '\\n'.join(map(itemgetter(1), funcs.values())) non_pickled = set(modules.keys()) |", "Brackets are there to let the output dir be created", "wrapper(): {modules} {code} return {f}({kwargs}) try: out = eval(wrapper.__code__, pickle.loads({globals}))", "root the target as # early as possible conn_settings['adb_as_root'] =", "bin_path(tool): binary = os.path.join(ASSETS_PATH, 'binaries', self.abi, tool) if not os.path.isfile(binary):", "in lines if not line.strip().startswith('@') ] code_str = textwrap.dedent(''.join(lines)) name", "that function is reserved to interactive use or simple scripts.", "is not the case, a :exc:`NameError` will be raised. ..", "This file will be used to provide a :class:`TargetConf` and", "kind.\") device_group.add_argument(\"--host\", \"-n\", help=\"The hostname/IP of the target.\") parser.add_argument(\"--username\", \"-u\",", "in scope. If that is not the case, a :exc:`NameError`", "by exekall def from_cli(cls, argv=None, params=None) -> 'Target': \"\"\" Same", "and it's restricted to the wrapper using eval() global_vars =", "submodules of devlib.module are imported so the classes # are", "out_is_excep else {out_tempfiles}[0] with open(out_tempfile, 'wb') as f: f.write(out) ''').format(", "used under the hood, and avoids messing up with ``devlib``", "not line.strip().startswith('@') ] code_str = textwrap.dedent(''.join(lines)) name = f.__name__ return", "spawn the Python interpreter on the target .. note:: Closure", "append_time, symlink): logger = self.get_logger() while True: time_str = datetime.now().strftime('%Y%m%d_%H%M%S.%f')", "YAML description. Note: only load trusted YAML files as it", "devlib_excluded_modules # Initialize binary tools to deploy if tools: logger.info(f'Tools", "return target def get_res_dir(self, name=None, append_time=True, symlink=True): \"\"\" Returns a", "target source to it plat_info = copy.copy(plat_info) logger.info(f'User-defined platform information:\\n{plat_info}')", "finally: for path in out_tempfiles: self.remove(path) def remote_func(self, **kwargs): \"\"\"", "would normally be invalid, but is handled as a special", ":return: A tuple ``(args, target)`` .. note:: This method should", "symlink: Create a symlink named ``results_latest`` to the newly created", "sig = inspect.signature(f) kwargs = sig.bind(*args, **kwargs).arguments closure_vars = inspect.getclosurevars(f)", "[bool, None]), KeyDesc('workdir', 'Remote target workdir', [str]), KeyDesc('tools', 'List of", "command line gem5_args = ' '.join(shlex.quote(a) for a in simulator_args)", "given devlib module is available. :returns: ``True`` if module is", "{ 'class': 'devlib.platform.Platform' } } } class Target(Loggable, HideExekallID, ExekallTaggable,", "CONF_CLASS = TargetConf INIT_KWARGS_KEY_MAP = { 'devlib_excluded_modules': ['devlib', 'excluded-modules'], 'devlib_file_xfer':", "not in target_conf: parser.error(f'--{required} must be specified') custom_args = {", "= [s.name for s in self.cgroups.list_subsystems()] if 'freezer' not in", "field :param tools: The list of names of tools to", "to inheritance. This allows swapping the exact class used under", "if not name: name = time_str elif append_time: name =", "You may obtain a copy of the License at #", "we don't modify the original # one we were passed", "return {f}({kwargs}) try: out = eval(wrapper.__code__, pickle.loads({globals})) except BaseException as", "Currently avaliable options # are 'sftp' and 'scp', defaults to", "self._init_target( kind=kind, name=name, workdir=workdir, device=device, host=host, port=port, username=username, password=password, keyfile=keyfile,", "device=device, host=host, port=port, username=username, password=password, keyfile=keyfile, strict_host_check=strict_host_check, use_scp=use_scp, devlib_platform=devlib_platform, wait_boot=wait_boot,", "cls.from_one_conf(path) @classmethod def from_one_conf(cls, path): \"\"\" Create a :class:`Target` from", "Exception as e: # pylint: disable=broad-except err = e else:", "Python function ``f`` with the provided positional and keyword arguments.", "Can be \"linux\" (ssh) or \"android\" (adb)', [str]), KeyDesc('host', 'Hostname", "targets that means \"host\", \"username\" and either \"password\" or \"keyfile\".", "or self.ADB_PORT_DEFAULT device = f'{host}:{port}' else: device = 'DEFAULT' conn_settings['device']", ") # Configure password or SSH keyfile if keyfile: conn_settings['keyfile']", "str :param append_time: If True, the current datetime will be", "even read the exception, raise the initial one # from", ":type devlib_platform: devlib.platform.Platform :param plat_info: Platform information attached to this", "x @contextlib.contextmanager def disable_idle_states(self): \"\"\" Context manager that lets you", "the wrapper's globals so that it can # access them.", "PlatformInfo class PasswordKeyDesc(KeyDesc): def pretty_format(self, v): return '<password>' # Make", "my_target.yml Alternatively, --kind must be set along the relevant credentials:", "so we don't modify the original # one we were", "or \"android\" (adb)', [str]), KeyDesc('host', 'Hostname or IP address of", "here not_target_conf_opt = { 'platform_info', 'log_level', 'res_dir', 'conf', } not_target_conf_opt.update(custom_params)", "* file bar.yml:: target-conf: !include foo.yml This will result in", "scripts, it's more designed for quick scripting. \"\"\" parser =", "the modules separately as they cannot be pickled modules =", "scripting. \"\"\" parser = argparse.ArgumentParser( formatter_class=argparse.RawDescriptionHelpFormatter, description=textwrap.dedent( \"\"\" Connect to", "non_pickled = set(modules.keys()) | set(funcs.keys()) global_vars = { name: val", "'rsyslogd', 'jbd2' ], 'android': [ 'sh', 'adbd', 'usb', 'transport', #", "global_vars, out_tempfiles): # Inject the parameters inside the wrapper's globals", "return target @classmethod def from_custom_cli(cls, argv=None, params=None): \"\"\" Create a", "in the form of ``{param_name: {dict of ArgumentParser.add_argument() options}}``. :type", "conn_settings.update( username=resolved_username, port=port or self.SSH_PORT_DEFAULT, host=host, strict_host_check=True if strict_host_check is", "When that happens, a warning is logged but no exception", "= self.get_logger() if not self.is_rooted: logger.warning('Could not freeze userspace: target", "with the provided positional and keyword arguments. The return value", "value for param, value in vars(args).items() if param in custom_params", "if systemd is being used, according to: # https://www.freedesktop.org/software/systemd/man/sd_booted.html self.execute('test", "any exception is pickled back and is returned/raised in the", "If the target is Android, we need just (eventually) the", "devlib.platform.Platform() ######################################################################## # Create devlib Target object ######################################################################## target =", "not in controllers: logger.warning('Could not freeze userspace: freezer cgroup controller", "out_is_excep = True else: out_is_excep = False out = pickle.dumps(out)", "execution. \"\"\".format( script=os.path.basename(sys.argv[0]) ))) parser.add_argument(\"--conf\", '-c', help=\"Path to a TargetConf", "attention:: Decorators are ignored and not applied. \"\"\" sig =", "= devlib_platform_cls(**devlib_platform_kwargs) kwargs['devlib_platform'] = devlib_platform cls.check_init_param(**kwargs) return cls(**kwargs) @classmethod def", "devlib module {attr}') self.target.install_module(attr) return get() # If it was", "# https://www.freedesktop.org/software/systemd/man/sd_booted.html self.execute('test -d /run/systemd/system/', check_exit_code=True) except TargetStableError: return False", "by ``LISA_CONF`` environment variable. .. note:: Only load trusted YAML", "object', [Mapping]), )), KeyDesc('excluded-modules', 'List of devlib modules to *not*", "of the original one to benefit from mapping configuration if", "used to provide a :class:`TargetConf` and :class:`lisa.platforms.platinfo.PlatformInfo` instances. .. note::", "domains') try: cpuidle = self.cpuidle except AttributeError: logger.warning('Could not disable", "# Some sanity check to get better error messages if", "!include foo.yml This will result in that structure which would", "already exists, retrying ...') continue else: raise if symlink: res_lnk", "def _uses_systemd(self): try: # Check if systemd is being used,", "\"\"\" conf = TargetConf.from_yaml_map(path) try: plat_info = PlatformInfo.from_yaml_map(path) except Exception", "if args.kind == 'android': if ('host' not in target_conf) and", "file', [str, None]), KeyDesc('strict-host-check', 'Equivalent to StrictHostKeyChecking option of OpenSSH',", "in closure_vars.items(): if _f is not f and can_include(_f): add_func(_f,", "'SSH password', [str, None]), KeyDesc('port', 'SSH or ADB server port',", "it as a sudo # password. conn_settings.update( unrooted=password is None,", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "for name, val in global_vars.items() if name not in non_pickled", "If no directory is specified, a default location under $LISA_HOME", "from freezing anything. When that happens, a warning is logged", "(name, mod) for name, mod in closure_vars.items() if isinstance(mod, ModuleType)", "os.makedirs(res_dir) break except FileExistsError: # If the time is used", "...') continue else: raise if symlink: res_lnk = os.path.join(LISA_HOME, LATEST_LINK)", "this is frozen # for too long. 'watchdogd', ] }", "KeyDesc('kind', 'Target kind. Can be \"linux\" (ssh) or \"android\" (adb)',", "SSH targets that means \"host\", \"username\" and either \"password\" or", "file with all the necessary connection information: $ {script} --conf", "necessary connection information: $ {script} --conf my_target.yml Alternatively, --kind must", "'\\n'.join(map(itemgetter(1), funcs.values())) non_pickled = set(modules.keys()) | set(funcs.keys()) global_vars = {", "non_pickled } if modules: modules = f\"import {', '.join(sorted(modules))}\" else:", "tools): \"\"\" Install tools additional to those specified in the", "the newly created results directory :type symlink: bool \"\"\" if", "information needed to connect to the target. For SSH targets", "logger.info(f'Creating result directory: {res_dir}') # It will fail if the", "from lisa.generic import TypedList from lisa.platforms.platinfo import PlatformInfo class PasswordKeyDesc(KeyDesc):", "by any other third-party code LevelKeyDesc('platform', 'devlib.platform.Platform subclass specification', (", "here: .. code-block:: python TargetConf({{ 'name': 'myboard', 'host': 192.0.2.1, 'kind':", "e: # pylint: disable=broad-except logger.warning(f'\"adb root\" failed: {e}') logger.debug(f'Target info:", "devlib.exception import TargetStableError from devlib.utils.misc import which from devlib.platform.gem5 import", "or your PATH variable') # Setup virtio # Brackets are", "this module is in scope. If that is not the", "# pylint: disable=broad-except cls.get_logger().warning(f'No platform information could be found: {e}')", "in # direct control f.__module__ == module ) def add_func(f,", "and do very # platform-specific things we are not interested", "# limitations under the License. # from datetime import datetime", "for too long. 'watchdogd', ] } \"\"\" Dictionary mapping OS", "{ k: v for k, v in vars(args).items() if v", "devlib module') try: getattr(self, module) except Exception: # pylint: disable=broad-except", "to interactive use or simple scripts. Tests should not rely", "os.path import contextlib import shlex from collections.abc import Mapping import", "'dbus', 'sh', 'ssh', 'rsyslogd', 'jbd2' ], 'android': [ 'sh', 'adbd',", "pickle import tempfile from types import ModuleType, FunctionType from operator", "passed when adding the target source to it plat_info =", "self._uses_systemd: logger.warning('Will not load cgroups devlib module: target is using", "'scp' self._installed_tools = set() self.target = self._init_target( kind=kind, name=name, workdir=workdir,", "LISA to store results. Usage of that function is reserved", "raised, so it's a best-effort approach. \"\"\" logger = self.get_logger()", "connection', [str, None]), PasswordKeyDesc('password', 'SSH password', [str, None]), KeyDesc('port', 'SSH", "{','.join(exclude)}\") try: yield self.cgroups.freeze(exclude) finally: logger.info('Un-freezing userspace tasks') self.cgroups.freeze(thaw=True) with", "an exception except EOFError: # pylint: disable=raise-missing-from try: excep =", "not in ('scp', 'sftp'): raise ValueError(f'Invalid file transfer method: {devlib_file_xfer}')", "the case, a :exc:`NameError` will be raised. .. attention:: Decorators", "your installation or your PATH variable') # Setup virtio #", "not None and k not in not_target_conf_opt }) # Some", "will result in that structure which would normally be invalid,", "it can be relocated as the caller wants it res_dir", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "system is pretty bad idea. if self._uses_systemd: logger.warning('Will not load", "load. \"\"\" if module not in _DEVLIB_AVAILABLE_MODULES: raise ValueError(f'\"{module}\" is", "except EOFError: raise err if err is not None else", "are deferred so they will only be # computed when", "[bool]), LevelKeyDesc('wait-boot', 'Wait for the target to finish booting', (", "License. # You may obtain a copy of the License", "a given function to execute remotely using :meth:`execute_python`:: target =", "inheritance. This allows swapping the exact class used under the", "``sys.argv[1:]`` will be used if this is ``None``. :type argv:", "kind == 'host': devlib_target_cls = devlib.LocalLinuxTarget # If we are", "will not be tracked by any external entity, which means", "remote_func(self, **kwargs): \"\"\" Decorates a given function to execute remotely", "try: return get() except AttributeError: # Load the module on", "# Take the board name from the target configuration so", "= self._get_code(f) def mktemp(): return self.execute( f'mktemp -p {shlex.quote(self.working_directory)}' ).strip()", "username. Only applies to Linux kind.\") parser.add_argument(\"--password\", <PASSWORD>\", help=\"Login password.", "machine, check your installation or your PATH variable') # Setup", "False else: return True def is_module_available(self, module): \"\"\" Check if", "{} for f_name, f in global_vars.items(): if can_include(f): add_func(f, f_name)", "over \"host\"', [str, None]), KeyDesc('keyfile', 'SSH private key file', [str,", "not freeze the process in charge of de-freezing, otherwise we", "logger.info('Un-freezing userspace tasks') self.cgroups.freeze(thaw=True) with cm() as x: yield x", "password or SSH keyfile if keyfile: conn_settings['keyfile'] = keyfile else:", "import tempfile from types import ModuleType, FunctionType from operator import", "used.\") params = params or {} for param, settings in", "settings = '\\n '.join( f' {key}: {val}' for key, val", "to :meth:`execute` that will spawn the Python interpreter on the", "FREEZING if we # try to freeze it. 'thermal-engine', #", "prevent from freezing anything. When that happens, a warning is", "pointed by ``LISA_CONF`` environment variable. .. note:: Only load trusted", "KeyDesc('timeout', 'Timeout of the boot check', [int]), )), LevelKeyDesc('devlib', 'devlib", "modules.update( (name, mod) for name, mod in closure_vars.items() if isinstance(mod,", "self.plat_info = plat_info # Take the board name from the", "install: {tools}') self.install_tools(tools) # Autodetect information from the target, after", "LATEST_LINK) with contextlib.suppress(FileNotFoundError): os.remove(res_lnk) # There may be a race", "file will be used to provide a :class:`TargetConf` and :class:`lisa.platforms.platinfo.PlatformInfo`", "can_include(_f): add_func(_f, _name) modules.update( (name, mod) for name, mod in", "target configuration or any of the connection options is required')", "\"sftp\" (default) or \"scp\". (Only valid for linux targets)', [TypedList[str]]),", "= datetime.now().strftime('%Y%m%d_%H%M%S.%f') if not name: name = time_str elif append_time:", "upon target connection', [str, None]), PasswordKeyDesc('password', 'SSH password', [str, None]),", "demand when accessed. \"\"\" def get(): return getattr(self.target, attr) try:", "case, # append_time should be used to ensure we get", "it will succeed if append_time: logger.info('Directory already exists, retrying ...')", "sig.bind(*args, **kwargs).arguments closure_vars = inspect.getclosurevars(f) name, code_str = self._get_code(f) def", "--conf my_target.yml Alternatively, --kind must be set along the relevant", "module on demand if attr in self._devlib_loadable_modules: self.get_logger().info(f'Loading target devlib", "platform=devlib_platform, load_default_modules=False, connection_settings=conn_settings, working_directory=workdir, connect=False, ) target.connect(check_boot_completed=wait_boot, timeout=wait_boot_timeout) # None", "} } class Target(Loggable, HideExekallID, ExekallTaggable, Configurable): \"\"\" Wrap :class:`devlib.target.Target`", "\"android\" (adb)', [str]), KeyDesc('host', 'Hostname or IP address of the", "} def __init__(self, kind, name='<noname>', tools=[], res_dir=None, plat_info=None, lazy_platinfo=False, workdir=None,", "an existing root, we # preserve that root so it", "file: Content of target_conf.yml: .. literalinclude:: ../target_conf.yml :language: YAML ::", "that does not exist ... else: raise def __dir__(self): \"\"\"", "as f: return pickle.loads(f.read()) def parse_output(paths, err): val, excep =", "those specified in the test config 'tools' field :param tools:", "reference an # environment variable. name: !env:str BOARD_NAME port: !env:int", "else that does not exist ... else: raise def __dir__(self):", "# that in _installed_tools, so we are sure to be", "one # from devlib except EOFError: raise err if err", "= self._make_remote_snippet( name=name, code_str=code_str, module=f.__module__, kwargs=kwargs, global_vars={ **closure_vars.globals, **closure_vars.nonlocals, },", "if devlib_file_xfer and devlib_file_xfer not in ('scp', 'sftp'): raise ValueError(f'Invalid", "else: raise def __dir__(self): \"\"\" List our attributes plus the", "if module is available, ``False`` otherwise. :param module: Devlib module", "rta_calib_res_dir = ArtifactPath.join(self._res_dir, 'rta_calib') os.makedirs(rta_calib_res_dir) self.plat_info.add_target_src(self, rta_calib_res_dir, deferred=lazy_platinfo, fallback=True) logger.info(f'Effective", "hope that the # next time it will succeed if", "using systemd, which already uses cgroups') devlib_excluded_modules.add('cgroups') self._devlib_loadable_modules = _DEVLIB_AVAILABLE_MODULES", "not be relied upon to implement long-term scripts, it's more", "a :class:`Target` from a single YAML configuration file. This file", "used in the name, there is some hope that the", "execution. \"\"\" path = os.environ['LISA_CONF'] return cls.from_one_conf(path) @classmethod def from_one_conf(cls,", "'res_dir', 'conf', } not_target_conf_opt.update(custom_params) args = parser.parse_args(argv) setup_logging(level=args.log_level.upper()) target_conf =", "be specified') if args.kind == 'linux': for required in ['host',", "append_time=True, symlink=True): \"\"\" Returns a directory managed by LISA to", "= None return cls.from_conf(conf=conf, plat_info=plat_info) @classmethod # Keep the signature", "conn_settings['device'] = device # If the username was explicitly set", "= self.get_logger() self.name = name res_dir = res_dir if res_dir", "try: plat_info = PlatformInfo.from_yaml_map(path) except Exception as e: # pylint:", "created Target. If no directory is specified, a default location", "On ADB connections, \"root\" username will root adb upon target", "username=username, password=password, keyfile=keyfile, strict_host_check=strict_host_check, use_scp=use_scp, devlib_platform=devlib_platform, wait_boot=wait_boot, wait_boot_timeout=wait_boot_timeout, ) devlib_excluded_modules", "to by name, assuming it comes from a module that", "HiKey960, the board will crash if this is frozen #", "######################################################################## if not devlib_platform: devlib_platform = devlib.platform.Platform() ######################################################################## # Create", "append_time: If True, the current datetime will be appended to", "is None: raise RuntimeError('Failed to find \"diod\" on your host", "in FREEZING if we # try to freeze it. 'thermal-engine',", "[bool]), KeyDesc('timeout', 'Timeout of the boot check', [int]), )), LevelKeyDesc('devlib',", "var_dct in inspect.getclosurevars(f) if isinstance(var_dct, Mapping) for name, val in", "operator import itemgetter import devlib from devlib.exception import TargetStableError from", "TopLevelKeyDesc,Configurable from lisa.generic import TypedList from lisa.platforms.platinfo import PlatformInfo class", "should be expected for a given kind of board #", "== 'scp' self._installed_tools = set() self.target = self._init_target( kind=kind, name=name,", "tools: logger.info(f'Tools to install: {tools}') self.install_tools(tools) # Autodetect information from", "wrapper_param class Gem5SimulationPlatformWrapper(Gem5SimulationPlatform): def __init__(self, system, simulator, **kwargs): simulator_args =", ") funcs = {} for f_name, f in global_vars.items(): if", "sure to be correct for tool in map(bin_path, tools): self.target.install(tool)", "list them import_all_submodules(devlib.module) _DEVLIB_AVAILABLE_MODULES = { cls.name for cls in", "the same module * referred to by name, assuming it", "under the Apache License, Version 2.0 (the \"License\"); you may", "invalid, but is handled as a special case:: target-conf: target-conf:", "current datetime will be appended to the given ``name``. If", "use_scp=use_scp, devlib_platform=devlib_platform, wait_boot=wait_boot, wait_boot_timeout=wait_boot_timeout, ) devlib_excluded_modules = set(devlib_excluded_modules) # Sorry,", "from the target, after the Target is # initialized. Expensive", "kwargs = cls.conf_to_init_kwargs(conf) kwargs['res_dir'] = res_dir kwargs['plat_info'] = plat_info #", "the target as # early as possible conn_settings['adb_as_root'] = (username", "warning is logged but no exception is raised, so it's", "{ name: val for var_dct in inspect.getclosurevars(f) if isinstance(var_dct, Mapping)", "that in _installed_tools, so we are sure to be correct", "not be reflected in the caller's context. Also, functions that", "get() # If it was not in the loadable list,", "target_conf.add_src(args.conf, conf) target_conf.add_src('command-line', { k: v for k, v in", "'rta_calib') os.makedirs(rta_calib_res_dir) self.plat_info.add_target_src(self, rta_calib_res_dir, deferred=lazy_platinfo, fallback=True) logger.info(f'Effective platform information:\\n{self.plat_info}') @property", "isinstance(f, FunctionType) and # Only allow inlining of functions defined", "else: err = None return parse_output(out_tempfiles, err) finally: for path", "which means the results will be lost in some automated", "to not use any python-specific YAML tags, so TargetConf files", "pylint: disable=broad-except err = e else: err = None return", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "import TypedList from lisa.platforms.platinfo import PlatformInfo class PasswordKeyDesc(KeyDesc): def pretty_format(self,", "logger = self.get_logger() self.name = name res_dir = res_dir if", "**global_vars, **kwargs, } # Treat the modules separately as they", "the target. Superseeds --host. Only applies to Android kind.\") device_group.add_argument(\"--host\",", "name=name, workdir=workdir, device=device, host=host, port=port, username=username, password=password, keyfile=keyfile, strict_host_check=strict_host_check, use_scp=use_scp,", "target.\") parser.add_argument(\"--username\", \"-u\", help=\"Login username. Only applies to Linux kind.\")", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "of the connection options is required') if args.kind == 'android':", "when actually needed. rta_calib_res_dir = ArtifactPath.join(self._res_dir, 'rta_calib') os.makedirs(rta_calib_res_dir) self.plat_info.add_target_src(self, rta_calib_res_dir,", "module to check. :type module: str .. note:: This will", "} not_target_conf_opt.update(custom_params) args = parser.parse_args(argv) setup_logging(level=args.log_level.upper()) target_conf = TargetConf() platform_info", "retrying ...') continue else: raise if symlink: res_lnk = os.path.join(LISA_HOME,", "referred to will be: * bundled in the script if", "<PASSWORD>\", help=\"Login password. Only applies to Linux kind.\") parser.add_argument(\"--log-level\", default='info',", "= '\\n '.join( f' {key}: {val}' for key, val in", "language governing permissions and # limitations under the License. #", "= self._res_dir relative = '' return self._get_res_dir( root=root, relative=relative, name=name,", ") return script @staticmethod def _get_code(f): lines, _ = inspect.getsourcelines(f)", "funcs.values())) non_pickled = set(modules.keys()) | set(funcs.keys()) global_vars = { name:", ".. note:: Closure variables are supported, but mutating them will", "required by applicable law or agreed to in writing, software", "better error messages if 'kind' not in target_conf: parser.error('--conf with", "'binaries', self.abi, tool) if not os.path.isfile(binary): binary = os.path.join(ASSETS_PATH, 'binaries',", "err): val, excep = paths try: return read_output(val) # If", "raise ValueError(f'res_dir must be empty: {self._res_dir}') if plat_info is None:", "closure_vars.items() if isinstance(mod, ModuleType) ) funcs = {} for f_name,", "the License. # from datetime import datetime import os import", "a single YAML configuration file. This file will be used", "'log_level', 'res_dir', 'conf', } not_target_conf_opt.update(custom_params) args = parser.parse_args(argv) setup_logging(level=args.log_level.upper()) target_conf", "are imported so the classes # are all created before", "to connect to.\") device_group = parser.add_mutually_exclusive_group() device_group.add_argument(\"--device\", \"-d\", help=\"The ADB", "= device # If the username was explicitly set to", "f'--which-diod={diod_path}', '--workload-automation-vio={}', ] simulator_args.extend(virtio_args) # Quote/escape arguments and build the", "devlib_platform=devlib_platform, wait_boot=wait_boot, wait_boot_timeout=wait_boot_timeout, ) devlib_excluded_modules = set(devlib_excluded_modules) # Sorry, can't", "of that function is reserved to interactive use or simple", "elif not self.is_module_available('cgroups'): logger.warning('Could not freeze userspace: \"cgroups\" devlib module", "will # not prevent from connecting to the target. if", "\"\"\" Wrap :class:`devlib.target.Target` to provide additional features on top of", "[str, None]), KeyDesc('port', 'SSH or ADB server port', [int, None]),", "exclude.append(str(os.getpid())) @contextlib.contextmanager def cm(): logger.info(f\"Freezing all tasks except: {','.join(exclude)}\") try:", "'scripts', tool) return binary tools = set(tools) - self._installed_tools #", "host, port, username, password, keyfile, strict_host_check, use_scp, devlib_platform, wait_boot, wait_boot_timeout,", "plat_info: Platform information attached to this target, for the benefits", "dict(str, dict) :return: A tuple ``(args, target)`` .. note:: This", "\"\"\" Connect to a target using the provided configuration in", "import functools import inspect import pickle import tempfile from types", "logger = self.get_logger() conn_settings = {} resolved_username = username or", "set(modules.keys()) | set(funcs.keys()) global_vars = { name: val for name,", "Decorators are ignored and not applied. \"\"\" sig = inspect.signature(f)", "-p {shlex.quote(self.working_directory)}' ).strip() def read_output(path): with tempfile.TemporaryDirectory() as d: name", "but failure will # not prevent from connecting to the", "of :class:`devlib.platform.Platform` to use to build the :class:`devlib.target.Target` :type devlib_platform:", "can't let you do that. Messing with cgroups in a", "agreed to in writing, software # distributed under the License", "Platform instance out of the configuration file devlib_platform_conf = conf['devlib']['platform']", "source to it plat_info = copy.copy(plat_info) logger.info(f'User-defined platform information:\\n{plat_info}') self.plat_info", "configuration file devlib_platform_conf = conf['devlib']['platform'] devlib_platform_cls = resolve_dotted_name(devlib_platform_conf['class']) devlib_platform_kwargs =", "name, free-form value only used to embelish logs', [str]), KeyDesc('kind',", "a TargetConf and PlatformInfo yaml file. Other options will override", "idle states for all domains') try: cpuidle = self.cpuidle except", "return ( isinstance(f, FunctionType) and # Only allow inlining of", "v is not None and k not in not_target_conf_opt })", "distributed under the License is distributed on an \"AS IS\"", "'<password>' # Make sure all submodules of devlib.module are imported", "out = eval(wrapper.__code__, pickle.loads({globals})) except BaseException as e: out =", "self._make_remote_snippet( name=name, code_str=code_str, module=f.__module__, kwargs=kwargs, global_vars={ **closure_vars.globals, **closure_vars.nonlocals, }, out_tempfiles=out_tempfiles", "file. This file will be used to provide a :class:`TargetConf`", "devlib_target_cls = devlib.AndroidTarget # Workaround for ARM-software/devlib#225 workdir = workdir", "not a key in TargetConf must be listed here not_target_conf_opt", "this YAML configuration # to not use any python-specific YAML", "a machine hard reboot will be required if isinstance(self.target, devlib.LocalLinuxTarget):", "normally be invalid, but is handled as a special case::", "of :class:`devlib.target.Target` is done using composition, as opposed to inheritance.", "bundled in the script if it is defined in the", "= 5555 SSH_PORT_DEFAULT = 22 CRITICAL_TASKS = { 'linux': [", "devlib_file_xfer and devlib_file_xfer not in ('scp', 'sftp'): raise ValueError(f'Invalid file", "to the target. For SSH targets that means \"host\", \"username\"", "# of the original one to benefit from mapping configuration", "Gem5SimulationPlatform from lisa.utils import Loggable, HideExekallID, resolve_dotted_name, get_subclasses, import_all_submodules, LISA_HOME,", "'jbd2' ], 'android': [ 'sh', 'adbd', 'usb', 'transport', # We", "given function to execute remotely using :meth:`execute_python`:: target = Target(...)", "needed. .. note:: The wrapping of :class:`devlib.target.Target` is done using", "OS name to list of task names that we can't", "board name is mostly free form # and no specific", "cannot be pickled modules = { name: mod for name,", "get() except AttributeError: # Load the module on demand if", "installation or your PATH variable') # Setup virtio # Brackets", "and either \"password\" or \"keyfile\". All other fields are optional", "import PlatformInfo class PasswordKeyDesc(KeyDesc): def pretty_format(self, v): return '<password>' #", ") else: raise ValueError(f'Unsupported platform type {kind}') settings = '\\n", "[])) system_platform = system['platform'] # Get gem5 binary arguments simulator_args.append('--listener-mode=on')", "be used to ensure we get a unique name. try:", "else: raise if symlink: res_lnk = os.path.join(LISA_HOME, LATEST_LINK) with contextlib.suppress(FileNotFoundError):", "tools=[], res_dir=None, plat_info=None, lazy_platinfo=False, workdir=None, device=None, host=None, port=None, username=None, password=<PASSWORD>,", "self.abi, tool) if not os.path.isfile(binary): binary = os.path.join(ASSETS_PATH, 'binaries', 'scripts',", "RESULT_DIR), relative='', name=f'{self.__class__.__qualname__}-{self.name}', append_time=True, symlink=True ) self._res_dir = res_dir os.makedirs(self._res_dir,", "keyfile, strict_host_check, use_scp, devlib_platform, wait_boot, wait_boot_timeout, ): \"\"\" Initialize the", "name, val in global_vars.items() if name not in non_pickled }", "charge of de-freezing, otherwise we # will freeze to death", "/run/systemd/system/', check_exit_code=True) except TargetStableError: return False else: return True def", "{} resolved_username = username or 'root' logger.debug(f'Setting up {kind} target...')", "= res_dir kwargs['plat_info'] = plat_info # Create a devlib Platform", "bad idea. if self._uses_systemd: logger.warning('Will not load cgroups devlib module:", "be \"sftp\" (default) or \"scp\". (Only valid for linux targets)',", "**kwargs) return wrapper return wrapper_param class Gem5SimulationPlatformWrapper(Gem5SimulationPlatform): def __init__(self, system,", "\"\"\" logger = self.get_logger() logger.info('Disabling idle states for all domains')", "\"username\" and either \"password\" or \"keyfile\". All other fields are", "platform_info = None if args.conf: # Tentatively load a PlatformInfo", "means adb root will be attempted, but failure will #", "or simple scripts. Tests should not rely on that as", "and keep # that in _installed_tools, so we are sure", "userspace: \"cgroups\" devlib module is necessary') cm = nullcontext else:", "= f\"import {', '.join(sorted(modules))}\" else: modules = '' script =", "for param, settings in params.items(): parser.add_argument(f'--{param}', **settings) custom_params = {k.replace('-',", "object devlib_platform = devlib_platform_cls(**devlib_platform_kwargs) kwargs['devlib_platform'] = devlib_platform cls.check_init_param(**kwargs) return cls(**kwargs)", "if 'freezer' not in controllers: logger.warning('Could not freeze userspace: freezer", "to connect to the target. For SSH targets that means", "except: {','.join(exclude)}\") try: yield self.cgroups.freeze(exclude) finally: logger.info('Un-freezing userspace tasks') self.cgroups.freeze(thaw=True)", "TargetConf() platform_info = None if args.conf: # Tentatively load a", "Platform allows this YAML configuration # to not use any", "freezing anything. When that happens, a warning is logged but", "'wait_boot': ['wait-boot', 'enable'], 'wait_boot_timeout': ['wait-boot', 'timeout'], } def __init__(self, kind,", "else self._get_res_dir( root=os.path.join(LISA_HOME, RESULT_DIR), relative='', name=f'{self.__class__.__qualname__}-{self.name}', append_time=True, symlink=True ) self._res_dir", "# environment variable. name: !env:str BOARD_NAME port: !env:int PORT ..", "them will not be reflected in the caller's context. Also,", "os import os.path import contextlib import shlex from collections.abc import", "provide additional features on top of it. {configurable_params} :param devlib_platform:", "port=None, username=None, password=<PASSWORD>, keyfile=None, strict_host_check=None, devlib_platform=None, devlib_excluded_modules=[], devlib_file_xfer=None, wait_boot=True, wait_boot_timeout=10,", "decorated functions since their definition depends on # external callable", "return custom_args, cls.from_conf(conf=target_conf, plat_info=platform_info, res_dir=args.res_dir) def _init_target(self, kind, name, workdir,", "for k, v in vars(args).items() if v is not None", "using the provided configuration in order to run a test.", "[TypedList[str]]), )) )) DEFAULT_SRC = { 'devlib': { 'platform': {", "'-c', snippet] cmd = ' '.join(map(shlex.quote, cmd)) try: self.execute(cmd, **execute_kwargs)", "is specified in the file.\" ) parser.add_argument(\"--kind\", \"-k\", choices=[\"android\", \"linux\",", "so the classes # are all created before we list", "parsed and produced by any other third-party code LevelKeyDesc('platform', 'devlib.platform.Platform", "issubclass(devlib_platform_cls, Gem5SimulationPlatform): devlib_platform_kwargs.setdefault('host_output_dir', res_dir) # Actually build the devlib Platform", "board # (i.e. a Juno board might be named \"foo-bar-juno-on-my-desk\")", "Target is # initialized. Expensive computations are deferred so they", "to reference an # environment variable. name: !env:str BOARD_NAME port:", "order to run a test. EXAMPLES --conf can point to", "stuck in FREEZING if we # try to freeze it.", "and build the command line gem5_args = ' '.join(shlex.quote(a) for", "to connect to UART and do very # platform-specific things", "name, val in var_dct.items() } funcs[name] = (f, cls._get_code(f)[1]) for", "private key file', [str, None]), KeyDesc('strict-host-check', 'Equivalent to StrictHostKeyChecking option", "this task but on Google Pixel it apparently # cannot", "keyfile=keyfile, strict_host_check=strict_host_check, use_scp=use_scp, devlib_platform=devlib_platform, wait_boot=wait_boot, wait_boot_timeout=wait_boot_timeout, ) devlib_excluded_modules = set(devlib_excluded_modules)", "are all created before we list them import_all_submodules(devlib.module) _DEVLIB_AVAILABLE_MODULES =", "world, which could lead to a # number of problems", "implement long-term scripts, it's more designed for quick scripting. \"\"\"", "not name: name = time_str elif append_time: name = f\"{name}-{time_str}\"", "This will result in that structure which would normally be", "self.get_logger() logger.info('Disabling idle states for all domains') try: cpuidle =", "} \"\"\" Dictionary mapping OS name to list of task", "{ 'devlib_excluded_modules': ['devlib', 'excluded-modules'], 'devlib_file_xfer': ['devlib', 'file-xfer'], 'wait_boot': ['wait-boot', 'enable'],", "direct control f.__module__ == module ) def add_func(f, name): #", "= TargetConf() platform_info = None if args.conf: # Tentatively load", "(i.e. a Juno board might be named \"foo-bar-juno-on-my-desk\") if name:", "get_res_dir(self, name=None, append_time=True, symlink=True): \"\"\" Returns a directory managed by", "kind, name, workdir, device, host, port, username, password, keyfile, strict_host_check,", "( # Using textual name of the Platform allows this", "\"linux\" (ssh) or \"android\" (adb)', [str]), KeyDesc('host', 'Hostname or IP", "to load. \"\"\" if module not in _DEVLIB_AVAILABLE_MODULES: raise ValueError(f'\"{module}\"", "IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND,", "def install_tools(self, tools): \"\"\" Install tools additional to those specified", "def _get_code(f): lines, _ = inspect.getsourcelines(f) # Remove decorators, as", "copy.copy(plat_info) logger.info(f'User-defined platform information:\\n{plat_info}') self.plat_info = plat_info # Take the", "so. We only inline local things that are in #", "Mapping) for name, val in var_dct.items() } funcs[name] = (f,", "raise ValueError(f'Invalid file transfer method: {devlib_file_xfer}') use_scp = devlib_file_xfer ==", "<PASSWORD> In both cases, --conf can also contain a PlatformInfo", "to a TargetConf and PlatformInfo yaml file. Other options will", "correct for tool in map(bin_path, tools): self.target.install(tool) self._installed_tools.add(tool) @contextlib.contextmanager def", "return (name, code_str) def execute_python(self, f, args, kwargs, **execute_kwargs): \"\"\"", "try: # Check if systemd is being used, according to:", "be used instead # of the original one to benefit", "None: try: target.adb_root(enable=True) except Exception as e: # pylint: disable=broad-except", "# Only allow inlining of functions defined in the same", "== module ) def add_func(f, name): # Disallow decorated functions", "os.path.join(relative, name)) # Compute base installation path logger.info(f'Creating result directory:", "very # platform-specific things we are not interested in and", "OR CONDITIONS OF ANY KIND, either express or implied. #", "logger.debug(f'{kind} {name} target connection settings:\\n {settings}') ######################################################################## # Devlib Platform", "or systemd-timesyncd running. 'systemd[^-]', 'dbus', 'sh', 'ssh', 'rsyslogd', 'jbd2' ],", "the process in charge of de-freezing, otherwise we # will", "def read_output(path): with tempfile.TemporaryDirectory() as d: name = os.path.join(d, 'out')", "as they would shadow any global name # anyway, and", "with command # line arguments try: conf = TargetConf.from_yaml_map(args.conf) except", "or so. We only inline local things that are in", "pickle.loads(f.read()) def parse_output(paths, err): val, excep = paths try: return", "def from_custom_cli(cls, argv=None, params=None): \"\"\" Create a Target from command", "available on the target') cm = nullcontext else: exclude =", "some automated environment. :param name: Name of the results directory", "info: {dict(abi=target.abi, cpuinfo=target.cpuinfo, workdir=target.working_directory)}') target.setup() logger.info(f\"Connected to target {(name or", "in order to run a test. EXAMPLES --conf can point", "username=None, password=<PASSWORD>, keyfile=None, strict_host_check=None, devlib_platform=None, devlib_excluded_modules=[], devlib_file_xfer=None, wait_boot=True, wait_boot_timeout=10, ):", "parser.add_argument(\"--kind\", \"-k\", choices=[\"android\", \"linux\", \"host\"], help=\"The kind of target to", "raised or value returned by the function') else: raise excep", "elif kind == 'linux': devlib_target_cls = devlib.LinuxTarget conn_settings.update( username=resolved_username, port=port", "= workdir or '/data/local/tmp/devlib-target' if device: pass elif host: port", "{ name: val for name, val in global_vars.items() if name", "by the function') else: raise excep out_tempfiles = tuple() try:", "f.__name__ return (name, code_str) def execute_python(self, f, args, kwargs, **execute_kwargs):", "would shadow any global name # anyway, and it's restricted", "we don't want to let # sysmted-journald or systemd-timesyncd running.", "to freeze everything except PID 1, we don't want to", "callable we cannot control if hasattr(f, '__wrapped__'): raise TypeError('Decorated functions", "for param, value in vars(args).items() if param in custom_params }", "isinstance(mod, ModuleType) } def can_include(f): return ( isinstance(f, FunctionType) and", ".. code-block:: python TargetConf({{ 'name': 'myboard', 'host': 192.0.2.1, 'kind': 'linux',", "except TargetStableError: return False else: return True def is_module_available(self, module):", "Disallow decorated functions since their definition depends on # external", "read_output(val) # If the file is empty, we probably got", "plat_info=None, lazy_platinfo=False, workdir=None, device=None, host=None, port=None, username=None, password=<PASSWORD>, keyfile=None, strict_host_check=None,", "result directory: {res_dir}') # It will fail if the folder", "'wb') as f: f.write(out) ''').format( f=name, code=textwrap.dedent(code_str).replace('\\n', '\\n' + '", "from remote functions') closure_vars = { name: val for var_dct", "f\"--kernel {system['kernel']}\", f\"--dtb {system['dtb']}\", f\"--disk-image {system['disk']}\" )) diod_path = which('diod')", "HideExekallID, ExekallTaggable, Configurable): \"\"\" Wrap :class:`devlib.target.Target` to provide additional features", "do very # platform-specific things we are not interested in", "host machine, check your installation or your PATH variable') #", "globals so that it can # access them. It's harmless", "of the target.\") parser.add_argument(\"--username\", \"-u\", help=\"Login username. Only applies to", "pylint: disable=dangerous-default-value super().__init__() logger = self.get_logger() self.name = name res_dir", "AttributeError: logger.warning('Could not disable idle states, cpuidle devlib module is", "pickle.loads({globals})) except BaseException as e: out = e out_is_excep =", "law or agreed to in writing, software # distributed under", "simulator_args) super().__init__( gem5_args=gem5_args, gem5_bin=simulator['bin'], **kwargs ) # vim :set tabstop=4", "freeze the process in charge of de-freezing, otherwise we #", "self._installed_tools = set() self.target = self._init_target( kind=kind, name=name, workdir=workdir, device=device,", "except FileExistsError: # If the time is used in the", "the target configuration so it becomes # available for later", "applies to Linux kind.\") parser.add_argument(\"--password\", <PASSWORD>\", help=\"Login password. Only applies", "help=\"Verbosity level of the logs.\") parser.add_argument(\"--res-dir\", \"-o\", help=\"Result directory of", "set(tools) - self._installed_tools # TODO: compute the checksum of the", "wait_boot_timeout=10, ): # pylint: disable=dangerous-default-value super().__init__() logger = self.get_logger() self.name", "\"\"\".format( script=os.path.basename(sys.argv[0]) ))) parser.add_argument(\"--conf\", '-c', help=\"Path to a TargetConf and", "# not prevent from connecting to the target. if kind", "are ignored and not applied. \"\"\" sig = inspect.signature(f) kwargs", "script = textwrap.dedent(''' import pickle import sys def wrapper(): {modules}", "try: out = eval(wrapper.__code__, pickle.loads({globals})) except BaseException as e: out", "EOFError: # pylint: disable=raise-missing-from try: excep = read_output(excep) # If", "after the Target is # initialized. Expensive computations are deferred", "\"\"\" logger = self.get_logger() if not self.is_rooted: logger.warning('Could not freeze", "str .. note:: This will attempt to load the module", "should be used instead # of the original one to", "name: mod for name, mod in global_vars.items() if isinstance(mod, ModuleType)", "spawn the Python interpreter on the target \"\"\" def wrapper_param(f):", "command line arguments. :param argv: The list of arguments. ``sys.argv[1:]``", "of tools to install on the target', [TypedList[str]]), KeyDesc('lazy-platinfo', 'Lazily", "# Do not freeze the process in charge of de-freezing,", ":param plat_info: Platform information attached to this target, for the", "the target') cm = nullcontext else: exclude = copy.copy(self.CRITICAL_TASKS[self.target.os]) #", "devlib_platform_conf = conf['devlib']['platform'] devlib_platform_cls = resolve_dotted_name(devlib_platform_conf['class']) devlib_platform_kwargs = copy.copy(devlib_platform_conf.get('args', {}))", "already, and bail out if it fails to load. \"\"\"", "not in target_conf: parser.error('--conf with target configuration or any of", "@contextlib.contextmanager def cm(): logger.info(f\"Freezing all tasks except: {','.join(exclude)}\") try: yield", "closure_vars = { name: val for var_dct in inspect.getclosurevars(f) if", "'.join(map(shlex.quote, cmd)) try: self.execute(cmd, **execute_kwargs) except Exception as e: #", "dir be created automatically virtio_args = [ f'--which-diod={diod_path}', '--workload-automation-vio={}', ]", "self.get_logger() while True: time_str = datetime.now().strftime('%Y%m%d_%H%M%S.%f') if not name: name", "to Android kind.\") device_group.add_argument(\"--host\", \"-n\", help=\"The hostname/IP of the target.\")", ":class:`devlib.target.Target` :type devlib_platform: devlib.platform.Platform :param plat_info: Platform information attached to", "That board name is mostly free form # and no", "module: target is using systemd, which already uses cgroups') devlib_excluded_modules.add('cgroups')", "userspace. .. note:: A number of situations prevent from freezing", "def can_include(f): return ( isinstance(f, FunctionType) and # Only allow", "Limited and contributors. # # Licensed under the Apache License,", "which would normally be invalid, but is handled as a", "with HiKey960, the board will crash if this is frozen", "parser.add_mutually_exclusive_group() device_group.add_argument(\"--device\", \"-d\", help=\"The ADB ID of the target. Superseeds", "may obtain a copy of the License at # #", "name = os.path.join(d, 'out') self.pull(path, name) with open(name, 'rb') as", "None else use_scp, ) # Configure password or SSH keyfile", "try: excep = read_output(excep) # If we can't even read", "so that: # 1. there is no name clash risk", "Only applies to Linux kind.\") parser.add_argument(\"--password\", <PASSWORD>\", help=\"Login password. Only", "cannot be called from remote functions') closure_vars = { name:", "_make_remote_snippet(cls, name, code_str, module, kwargs, global_vars, out_tempfiles): # Inject the", "'freezer' not in controllers: logger.warning('Could not freeze userspace: freezer cgroup", "the script if it is defined in the same module", "@classmethod def from_conf(cls, conf: TargetConf, res_dir: ArtifactPath = None, plat_info:", "keyword arguments. The return value or any exception is pickled", "devlib_platform, wait_boot, wait_boot_timeout, ): \"\"\" Initialize the Target \"\"\" logger", "file devlib_platform_conf = conf['devlib']['platform'] devlib_platform_cls = resolve_dotted_name(devlib_platform_conf['class']) devlib_platform_kwargs = copy.copy(devlib_platform_conf.get('args',", "be created by calling :class:`~TargetConf` with a dictionary. The top-level", "Target connection settings. Only keys defined below are allowed, with", "can be used in the configuration file: .. code-block:: YAML", "# password. conn_settings.update( unrooted=password is None, password=password, ) else: raise", "they are either undefined or just were used to #", "module) except Exception: # pylint: disable=broad-except return False else: return", "'password' ) logger.debug(f'{kind} {name} target connection settings:\\n {settings}') ######################################################################## #", "None, password=password, ) else: raise ValueError(f'Unsupported platform type {kind}') settings", "mod in global_vars.items() if isinstance(mod, ModuleType) } def can_include(f): return", "the module on demand if attr in self._devlib_loadable_modules: self.get_logger().info(f'Loading target", "be expected for a given kind of board # (i.e.", "names that we can't afford to freeze when using :meth:`freeze_userspace`.", "abritrary code execution. \"\"\" conf = TargetConf.from_yaml_map(path) try: plat_info =", "tasks') self.cgroups.freeze(thaw=True) with cm() as x: yield x @contextlib.contextmanager def", "any external entity, which means the results will be lost", "the function to us lines = [ line for line", "devlib_platform=None, devlib_excluded_modules=[], devlib_file_xfer=None, wait_boot=True, wait_boot_timeout=10, ): # pylint: disable=dangerous-default-value super().__init__()", "of situations prevent from freezing anything. When that happens, a", "systemd is being used, according to: # https://www.freedesktop.org/software/systemd/man/sd_booted.html self.execute('test -d", "for Gem5 devlib Platform, that requires a \"host_output_dir\" # argument", "specific value should be expected for a given kind of", "needed. rta_calib_res_dir = ArtifactPath.join(self._res_dir, 'rta_calib') os.makedirs(rta_calib_res_dir) self.plat_info.add_target_src(self, rta_calib_res_dir, deferred=lazy_platinfo, fallback=True)", "we can't even read the exception, raise the initial one", "note:: Only load trusted YAML files as it can lead", "this file except in compliance with the License. # You", "sure all submodules of devlib.module are imported so the classes", "file transfer method. Currently avaliable options # are 'sftp' and", "def __init__(self, system, simulator, **kwargs): simulator_args = copy.copy(simulator.get('args', [])) system_platform", ") target.connect(check_boot_completed=wait_boot, timeout=wait_boot_timeout) # None as username means adb root", "computed when actually needed. rta_calib_res_dir = ArtifactPath.join(self._res_dir, 'rta_calib') os.makedirs(rta_calib_res_dir) self.plat_info.add_target_src(self,", "contextlib.suppress(FileNotFoundError): os.remove(res_lnk) # There may be a race condition with", "[int, None]), KeyDesc('device', 'ADB device. Takes precedence over \"host\"', [str,", "devlib_target_cls( platform=devlib_platform, load_default_modules=False, connection_settings=conn_settings, working_directory=workdir, connect=False, ) target.connect(check_boot_completed=wait_boot, timeout=wait_boot_timeout) #", "**f_kwargs): return self.execute_python(f, f_args, f_kwargs, **kwargs) return wrapper return wrapper_param", "the necessary connection information: $ {script} --conf my_target.yml Alternatively, --kind", "your host machine, check your installation or your PATH variable')", "of the results directory :type name: str :param append_time: If", "Only applies to Linux kind.\") parser.add_argument(\"--log-level\", default='info', choices=('warning', 'info', 'debug'),", "# # Licensed under the Apache License, Version 2.0 (the", "global_vars.items(): if can_include(f): add_func(f, f_name) code_str += '\\n' + '\\n'.join(map(itemgetter(1),", "configuration # to not use any python-specific YAML tags, so", "username=resolved_username, port=port or self.SSH_PORT_DEFAULT, host=host, strict_host_check=True if strict_host_check is None", "} class TargetConf(SimpleMultiSrcConf, HideExekallID): \"\"\" Target connection settings. Only keys", "optional if the relevant features aren't needed. .. note:: The", "be relied upon to implement long-term scripts, it's more designed", "will be the current datetime. :type append_time: bool :param symlink:", "'Hostname or IP address of the host', [str, None]), KeyDesc('username',", "Load the TargetConf from the file, and update it with", "that will spawn the Python interpreter on the target \"\"\"", "lets you disable all idle states \"\"\" logger = self.get_logger()", "a Juno board might be named \"foo-bar-juno-on-my-desk\") if name: self.plat_info.add_src('target-conf',", "Initialize the Target \"\"\" logger = self.get_logger() conn_settings = {}", "{key}: {val}' for key, val in conn_settings.items() if key !=", "can_include(f): return ( isinstance(f, FunctionType) and # Only allow inlining", "parse_output(paths, err): val, excep = paths try: return read_output(val) #", "any of the connection options is required') if args.kind ==", "# \"!env:<type> ENV_VAR_NAME\" can be used to reference an #", "pass else: target_conf.add_src(args.conf, conf) target_conf.add_src('command-line', { k: v for k,", "or '')}\") return target def get_res_dir(self, name=None, append_time=True, symlink=True): \"\"\"", "is not needed here: .. code-block:: python TargetConf({{ 'name': 'myboard',", "= ' '.join(shlex.quote(a) for a in simulator_args) super().__init__( gem5_args=gem5_args, gem5_bin=simulator['bin'],", "get_subclasses, import_all_submodules, LISA_HOME, RESULT_DIR, LATEST_LINK, setup_logging, ArtifactPath, nullcontext, ExekallTaggable, memoized", "which already uses cgroups') devlib_excluded_modules.add('cgroups') self._devlib_loadable_modules = _DEVLIB_AVAILABLE_MODULES - devlib_excluded_modules", "in TargetConf must be listed here not_target_conf_opt = { 'platform_info',", "return read_output(val) # If the file is empty, we probably", ":class:`Target` from the YAML configuration file pointed by ``LISA_CONF`` environment", "password. conn_settings.update( unrooted=password is None, password=password, ) else: raise ValueError(f'Unsupported", "= None return parse_output(out_tempfiles, err) finally: for path in out_tempfiles:", "ASSETS_PATH from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc,Configurable from lisa.generic", "the connection', [bool]), LevelKeyDesc('wait-boot', 'Wait for the target to finish", "# available for later inspection. That board name is mostly", "use to build the :class:`devlib.target.Target` :type devlib_platform: devlib.platform.Platform :param plat_info:", "Options that are not a key in TargetConf must be", ") logger.debug(f'{kind} {name} target connection settings:\\n {settings}') ######################################################################## # Devlib", "except PID 1, we don't want to let # sysmted-journald", "modules: modules = f\"import {', '.join(sorted(modules))}\" else: modules = ''", "information: $ {script} --conf my_target.yml Alternatively, --kind must be set", "the logs.\") parser.add_argument(\"--res-dir\", \"-o\", help=\"Result directory of the created Target.", "# external callable we cannot control if hasattr(f, '__wrapped__'): raise", "KeyDesc('strict-host-check', 'Equivalent to StrictHostKeyChecking option of OpenSSH', [bool, None]), KeyDesc('workdir',", "directory name will be the current datetime. :type append_time: bool", "TargetConf files can # be parsed and produced by any", "name: val for name, val in global_vars.items() if name not", "f and can_include(_f): add_func(_f, _name) modules.update( (name, mod) for name,", "def execute_python(self, f, args, kwargs, **execute_kwargs): \"\"\" Executes the given", "cm = nullcontext else: controllers = [s.name for s in", "there is no name clash risk # 2. we don't", "are sure to be correct for tool in map(bin_path, tools):", "= system['platform'] # Get gem5 binary arguments simulator_args.append('--listener-mode=on') simulator_args.append(system_platform['description']) simulator_args.extend(system_platform.get('args',", "'early' ) } class TargetConf(SimpleMultiSrcConf, HideExekallID): \"\"\" Target connection settings.", "(adb)', [str]), KeyDesc('host', 'Hostname or IP address of the host',", "a devlib module') try: getattr(self, module) except Exception: # pylint:", "import ModuleType, FunctionType from operator import itemgetter import devlib from", "in target_conf): parser.error('--host or --device must be specified') if args.kind", "that root so it can be relocated as the caller", "when accessed. \"\"\" def get(): return getattr(self.target, attr) try: return", "on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS", "ignored and not applied. \"\"\" sig = inspect.signature(f) kwargs =", "be: * bundled in the script if it is defined", "res_lnk) return res_dir def install_tools(self, tools): \"\"\" Install tools additional", "checksum of the tool + install location and keep #", "name not in non_pickled } if modules: modules = f\"import", "address of the host', [str, None]), KeyDesc('username', 'SSH username. On", "ModuleType) } def can_include(f): return ( isinstance(f, FunctionType) and #", ") self._res_dir = res_dir os.makedirs(self._res_dir, exist_ok=True) if os.listdir(self._res_dir): raise ValueError(f'res_dir", "failed: {e}') logger.debug(f'Target info: {dict(abi=target.abi, cpuinfo=target.cpuinfo, workdir=target.working_directory)}') target.setup() logger.info(f\"Connected to", "Alternatively, --kind must be set along the relevant credentials: $", "= copy.copy(self.CRITICAL_TASKS[self.target.os]) # Do not freeze the process in charge", "a # number of problems that could appear after another", "(mktemp(), mktemp()) snippet = self._make_remote_snippet( name=name, code_str=code_str, module=f.__module__, kwargs=kwargs, global_vars={", "devlib modules that could be loaded on-demand. \"\"\" attrs =", "))) parser.add_argument(\"--conf\", '-c', help=\"Path to a TargetConf and PlatformInfo yaml", "note:: That will not forward special methods like __str__, since", "for name, mod in closure_vars.items() if isinstance(mod, ModuleType) ) funcs", "or implied. # See the License for the specific language", "from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc,Configurable from lisa.generic import", "expected for a given kind of board # (i.e. a", "}) # Some sanity check to get better error messages", "ValueError(f'res_dir must be empty: {self._res_dir}') if plat_info is None: plat_info", "# pylint: disable=broad-except return False else: return True def __getattr__(self,", "resolve_dotted_name(devlib_platform_conf['class']) devlib_platform_kwargs = copy.copy(devlib_platform_conf.get('args', {})) # Hack for Gem5 devlib", "def from_cli(cls, argv=None, params=None) -> 'Target': \"\"\" Same as :meth:`from_custom_cli`", "There may be a race condition with another tool trying", "initial one # from devlib except EOFError: raise err if", "from devlib.platform.gem5 import Gem5SimulationPlatform from lisa.utils import Loggable, HideExekallID, resolve_dotted_name,", "in and getattr(cls, 'stage') != 'early' ) } class TargetConf(SimpleMultiSrcConf,", "the directory name will be the current datetime. :type append_time:", "depends on # external callable we cannot control if hasattr(f,", "self.ADB_PORT_DEFAULT device = f'{host}:{port}' else: device = 'DEFAULT' conn_settings['device'] =", "lead to abritrary code execution. \"\"\".format( script=os.path.basename(sys.argv[0]) ))) parser.add_argument(\"--conf\", '-c',", "set(devlib_excluded_modules) # Sorry, can't let you do that. Messing with", "we are not interested in and getattr(cls, 'stage') != 'early'", "Licensed under the Apache License, Version 2.0 (the \"License\"); you", "= ArtifactPath(root, os.path.join(relative, name)) # Compute base installation path logger.info(f'Creating", "import sys def wrapper(): {modules} {code} return {f}({kwargs}) try: out", "If the username was explicitly set to \"root\", root the", "it apparently # cannot be frozen, so the cgroup state", "logger.warning(f'\"adb root\" failed: {e}') logger.debug(f'Target info: {dict(abi=target.abi, cpuinfo=target.cpuinfo, workdir=target.working_directory)}') target.setup()", "TypeError('Decorated functions cannot be called from remote functions') closure_vars =", "execute remotely using :meth:`execute_python`:: target = Target(...) @target.remote_func(timeout=42) def foo(x,", "\"\"\" if module not in _DEVLIB_AVAILABLE_MODULES: raise ValueError(f'\"{module}\" is not", "wait_boot=True, wait_boot_timeout=10, ): # pylint: disable=dangerous-default-value super().__init__() logger = self.get_logger()", "= set(super().__dir__()) | set(dir(self.target)) | self._devlib_loadable_modules return sorted(attrs) @classmethod def", "the host', [str, None]), KeyDesc('username', 'SSH username. On ADB connections,", "lines = [ line for line in lines if not", "= TargetConf INIT_KWARGS_KEY_MAP = { 'devlib_excluded_modules': ['devlib', 'excluded-modules'], 'devlib_file_xfer': ['devlib',", "configuration file. This file will be used to provide a", "from_custom_cli(cls, argv=None, params=None): \"\"\" Create a Target from command line", "tool trying to create # the link with contextlib.suppress(FileExistsError): os.symlink(res_dir,", "by calling :class:`~TargetConf` with a dictionary. The top-level `target-conf` key", "of target to connect to.\") device_group = parser.add_mutually_exclusive_group() device_group.add_argument(\"--device\", \"-d\",", "to build the Platform object', [Mapping]), )), KeyDesc('excluded-modules', 'List of", "name=None, append_time=True, symlink=True): \"\"\" Returns a directory managed by LISA", "import Gem5SimulationPlatform from lisa.utils import Loggable, HideExekallID, resolve_dotted_name, get_subclasses, import_all_submodules,", "be relocated as the caller wants it res_dir = ArtifactPath(root,", "PasswordKeyDesc('password', 'SSH password', [str, None]), KeyDesc('port', 'SSH or ADB server", "install on the target', [TypedList[str]]), KeyDesc('lazy-platinfo', 'Lazily autodect the platform", "] } \"\"\" Dictionary mapping OS name to list of", "name=name, code_str=code_str, module=f.__module__, kwargs=kwargs, global_vars={ **closure_vars.globals, **closure_vars.nonlocals, }, out_tempfiles=out_tempfiles )", "be used in the configuration file: .. code-block:: YAML target-conf:", "a systemd # system is pretty bad idea. if self._uses_systemd:", "and produced by any other third-party code LevelKeyDesc('platform', 'devlib.platform.Platform subclass", "idle states for all domains') for cpu in range(self.plat_info['cpus-count']): cpuidle.enable_all(cpu)", "{shlex.quote(self.working_directory)}' ).strip() def read_output(path): with tempfile.TemporaryDirectory() as d: name =", "workdir=workdir, device=device, host=host, port=port, username=username, password=password, keyfile=keyfile, strict_host_check=strict_host_check, use_scp=use_scp, devlib_platform=devlib_platform,", "the tool + install location and keep # that in", "check_exit_code=True) except TargetStableError: return False else: return True def is_module_available(self,", "the TargetConf from the file, and update it with command", "s in self.cgroups.list_subsystems()] if 'freezer' not in controllers: logger.warning('Could not", "is # initialized. Expensive computations are deferred so they will", "!= 'password' ) logger.debug(f'{kind} {name} target connection settings:\\n {settings}') ########################################################################", "for the benefits of user code. :type plat_info: lisa.platforms.platinfo.PlatformInfo You", "but is handled as a special case:: target-conf: target-conf: name:", "python-specific YAML tags, so TargetConf files can # be parsed", "= { cls.name for cls in get_subclasses(devlib.module.Module) if ( getattr(cls,", "file: .. code-block:: YAML target-conf: # \"!env:<type> ENV_VAR_NAME\" can be", "Google Pixel it apparently # cannot be frozen, so the", "# computed when actually needed. rta_calib_res_dir = ArtifactPath.join(self._res_dir, 'rta_calib') os.makedirs(rta_calib_res_dir)", "$ {script} --conf my_target.yml Alternatively, --kind must be set along", "None else ValueError('No exception was raised or value returned by", "Can be \"sftp\" (default) or \"scp\". (Only valid for linux", "hasattr(f, '__wrapped__'): raise TypeError('Decorated functions cannot be called from remote", "\"\"\" ADB_PORT_DEFAULT = 5555 SSH_PORT_DEFAULT = 22 CRITICAL_TASKS = {", "devlib_platform_kwargs.setdefault('host_output_dir', res_dir) # Actually build the devlib Platform object devlib_platform", "= None) -> 'Target': cls.get_logger().info(f'Target configuration:\\n{conf}') kwargs = cls.conf_to_init_kwargs(conf) kwargs['res_dir']", "kind=kind, name=name, workdir=workdir, device=device, host=host, port=port, username=username, password=password, keyfile=keyfile, strict_host_check=strict_host_check,", "while True: time_str = datetime.now().strftime('%Y%m%d_%H%M%S.%f') if not name: name =", "Autodetect information from the target, after the Target is #", "symlink: res_lnk = os.path.join(LISA_HOME, LATEST_LINK) with contextlib.suppress(FileNotFoundError): os.remove(res_lnk) # There", "process in charge of de-freezing, otherwise we # will freeze", "interested in and getattr(cls, 'stage') != 'early' ) } class", "file with contextlib.suppress(KeyError, ValueError): platform_info = PlatformInfo.from_yaml_map(args.conf) # Load the", "'ADB device. Takes precedence over \"host\"', [str, None]), KeyDesc('keyfile', 'SSH", "\"\"\" Initialize the Target \"\"\" logger = self.get_logger() conn_settings =", "that are referred to will be: * bundled in the", "'Wait for the target to finish booting', ( KeyDesc('enable', 'Enable", "--conf can point to a YAML target configuration file with", "on that as the created folder will not be tracked", "--conf can also contain a PlatformInfo YAML description. Note: only", "except AttributeError: # Load the module on demand if attr", "not in not_target_conf_opt }) # Some sanity check to get", "LISA_HOME, RESULT_DIR, LATEST_LINK, setup_logging, ArtifactPath, nullcontext, ExekallTaggable, memoized from lisa.assets", "check', [bool]), KeyDesc('timeout', 'Timeout of the boot check', [int]), )),", "abritrary code execution. \"\"\" path = os.environ['LISA_CONF'] return cls.from_one_conf(path) @classmethod", "if it fails to load. \"\"\" if module not in", "no exception is raised, so it's a best-effort approach. \"\"\"", "code execution. \"\"\".format( script=os.path.basename(sys.argv[0]) ))) parser.add_argument(\"--conf\", '-c', help=\"Path to a", "in the test config 'tools' field :param tools: The list", "Load the module on demand if attr in self._devlib_loadable_modules: self.get_logger().info(f'Loading", "condition with another tool trying to create # the link", "be # computed when actually needed. rta_calib_res_dir = ArtifactPath.join(self._res_dir, 'rta_calib')", "YAML target-conf: # \"!env:<type> ENV_VAR_NAME\" can be used to reference", "' '.join(shlex.quote(a) for a in simulator_args) super().__init__( gem5_args=gem5_args, gem5_bin=simulator['bin'], **kwargs", "EOFError: raise err if err is not None else ValueError('No", "a password, assume we can use it as a sudo", "'\\n' + ' ' * 4), modules=modules, out_tempfiles=repr(out_tempfiles), globals=repr(pickle.dumps(global_vars)), kwargs=',", "= [ f'--which-diod={diod_path}', '--workload-automation-vio={}', ] simulator_args.extend(virtio_args) # Quote/escape arguments and", "# initialized. Expensive computations are deferred so they will only", "control f.__module__ == module ) def add_func(f, name): # Disallow", "password=password, ) else: raise ValueError(f'Unsupported platform type {kind}') settings =", "None if args.conf: # Tentatively load a PlatformInfo from the", "out_tempfiles): # Inject the parameters inside the wrapper's globals so", "conf: TargetConf, res_dir: ArtifactPath = None, plat_info: PlatformInfo = None)", "device = f'{host}:{port}' else: device = 'DEFAULT' conn_settings['device'] = device", "relative = self._res_dir.relative else: root = self._res_dir relative = ''", "{tools}') self.install_tools(tools) # Autodetect information from the target, after the", "A connected :class:`Target` \"\"\" _, target = cls.from_custom_cli(argv=argv, params=params) return", "file bar.yml:: target-conf: !include foo.yml This will result in that", "raise the initial one # from devlib except EOFError: raise", "if keyfile: conn_settings['keyfile'] = keyfile else: conn_settings['password'] = password elif", "'-c', help=\"Path to a TargetConf and PlatformInfo yaml file. Other", "load_default_modules=False, connection_settings=conn_settings, working_directory=workdir, connect=False, ) target.connect(check_boot_completed=wait_boot, timeout=wait_boot_timeout) # None as", "= self.get_logger() while True: time_str = datetime.now().strftime('%Y%m%d_%H%M%S.%f') if not name:", "can # be parsed and produced by any other third-party", "setup_logging(level=args.log_level.upper()) target_conf = TargetConf() platform_info = None if args.conf: #", "def add_func(f, name): # Disallow decorated functions since their definition", "files can # be parsed and produced by any other", "happens, a warning is logged but no exception is raised,", "a module that is installed on the target and that", "was raised or value returned by the function') else: raise", "function to execute remotely using :meth:`execute_python`:: target = Target(...) @target.remote_func(timeout=42)", "code execution. \"\"\" path = os.environ['LISA_CONF'] return cls.from_one_conf(path) @classmethod def", "\"scp\". (Only valid for linux targets)', [TypedList[str]]), )) )) DEFAULT_SRC", "= os.path.join(ASSETS_PATH, 'binaries', 'scripts', tool) return binary tools = set(tools)", ":param symlink: Create a symlink named ``results_latest`` to the newly", "a :class:`Target` from the YAML configuration file pointed by ``LISA_CONF``", "= res_dir os.makedirs(self._res_dir, exist_ok=True) if os.listdir(self._res_dir): raise ValueError(f'res_dir must be", "= '' script = textwrap.dedent(''' import pickle import sys def", "--password <PASSWORD> In both cases, --conf can also contain a", "will not forward special methods like __str__, since the interpreter", "them. It's harmless as they would shadow any global name", "the devlib Platform object devlib_platform = devlib_platform_cls(**devlib_platform_kwargs) kwargs['devlib_platform'] = devlib_platform", "simulator_args.append(system_platform['description']) simulator_args.extend(system_platform.get('args', [])) simulator_args.extend(( f\"--kernel {system['kernel']}\", f\"--dtb {system['dtb']}\", f\"--disk-image {system['disk']}\"", "not loading it') # Something else that does not exist", "def from_one_conf(cls, path): \"\"\" Create a :class:`Target` from a single", "* referred to by name, assuming it comes from a", "to the newly created results directory :type symlink: bool \"\"\"", "description=textwrap.dedent( \"\"\" Connect to a target using the provided configuration", "target connection settings:\\n {settings}') ######################################################################## # Devlib Platform configuration ########################################################################", "The list of arguments. ``sys.argv[1:]`` will be used if this", "PlatformInfo from the conf file with contextlib.suppress(KeyError, ValueError): platform_info =", "use it as a sudo # password. conn_settings.update( unrooted=password is", "= paths try: return read_output(val) # If the file is", "by name, assuming it comes from a module that is", "v for k, v in vars(args).items() if v is not", "module is not loaded') cm = nullcontext else: @contextlib.contextmanager def", "res_dir kwargs['plat_info'] = plat_info # Create a devlib Platform instance", "device # If the username was explicitly set to \"root\",", "next time it will succeed if append_time: logger.info('Directory already exists,", "$LISA_HOME will be used.\") params = params or {} for", "using :meth:`freeze_userspace`. \"\"\" CONF_CLASS = TargetConf INIT_KWARGS_KEY_MAP = { 'devlib_excluded_modules':", "environment variable. name: !env:str BOARD_NAME port: !env:int PORT .. note::", "argument computed at runtime. # Note: lisa.target.Gem5SimulationPlatformWrapper should be used", "An instance can be created by calling :class:`~TargetConf` with a", "Note: only load trusted YAML files as it can lead", "help=\"Login username. Only applies to Linux kind.\") parser.add_argument(\"--password\", <PASSWORD>\", help=\"Login", "read_output(excep) # If we can't even read the exception, raise", "'sftp' and 'scp', defaults to sftp. if devlib_file_xfer and devlib_file_xfer", "connection options is required') if args.kind == 'android': if ('host'", "\"\"\" def wrapper_param(f): @functools.wraps(f) def wrapper(*f_args, **f_kwargs): return self.execute_python(f, f_args,", "in _DEVLIB_AVAILABLE_MODULES: # pylint: disable=raise-missing-from raise AttributeError(f'Devlib target module {attr}", "and ('device' not in target_conf): parser.error('--host or --device must be", "permissions and # limitations under the License. # from datetime", "AttributeError(f'Devlib target module {attr} was explicitly excluded, not loading it')", "+ '\\n'.join(map(itemgetter(1), funcs.values())) non_pickled = set(modules.keys()) | set(funcs.keys()) global_vars =", "with another tool trying to create # the link with", "it can # access them. It's harmless as they would", "root adb upon target connection', [str, None]), PasswordKeyDesc('password', 'SSH password',", "from mapping configuration if issubclass(devlib_platform_cls, Gem5SimulationPlatform): devlib_platform_kwargs.setdefault('host_output_dir', res_dir) # Actually", "ADB connections, \"root\" username will root adb upon target connection',", "disable=raise-missing-from try: excep = read_output(excep) # If we can't even", "platform type {kind}') settings = '\\n '.join( f' {key}: {val}'", "\"host\"], help=\"The kind of target to connect to.\") device_group =", "as possible conn_settings['adb_as_root'] = (username == 'root') elif kind ==", "that we can't afford to freeze when using :meth:`freeze_userspace`. \"\"\"", "not None else ValueError('No exception was raised or value returned", "= copy.copy(devlib_platform_conf.get('args', {})) # Hack for Gem5 devlib Platform, that", "do that. Messing with cgroups in a systemd # system", "module {attr} was explicitly excluded, not loading it') # Something", "\"\"\" if isinstance(self._res_dir, ArtifactPath): root = self._res_dir.root relative = self._res_dir.relative", "be listed here not_target_conf_opt = { 'platform_info', 'log_level', 'res_dir', 'conf',", "# Setup virtio # Brackets are there to let the", "# Keep the signature without *args and **kwargs so that", "other third-party code LevelKeyDesc('platform', 'devlib.platform.Platform subclass specification', ( KeyDesc('class', 'Name", "another module # is updated or so. We only inline", "Pixel it apparently # cannot be frozen, so the cgroup", "``None``. :type argv: list(str) :param params: Dictionary of custom parameters", "yield x def get_tags(self): return {'board': self.name} @classmethod def _make_remote_snippet(cls,", "'password': '<PASSWORD>', }}) Or alternatively, from a YAML configuration file:", "in _DEVLIB_AVAILABLE_MODULES: raise ValueError(f'\"{module}\" is not a devlib module') try:", "a PlatformInfo from the conf file with contextlib.suppress(KeyError, ValueError): platform_info", "to sftp. if devlib_file_xfer and devlib_file_xfer not in ('scp', 'sftp'):", "The wrapping of :class:`devlib.target.Target` is done using composition, as opposed", "= { 'devlib_excluded_modules': ['devlib', 'excluded-modules'], 'devlib_file_xfer': ['devlib', 'file-xfer'], 'wait_boot': ['wait-boot',", "\"-k\", choices=[\"android\", \"linux\", \"host\"], help=\"The kind of target to connect", "KeyDesc('excluded-modules', 'List of devlib modules to *not* load', [TypedList[str]]), KeyDesc('file-xfer',", ".. note:: A number of situations prevent from freezing anything.", "symlink named ``results_latest`` to the newly created results directory :type", "{self._res_dir}') if plat_info is None: plat_info = PlatformInfo() else: #", "name res_dir = res_dir if res_dir else self._get_res_dir( root=os.path.join(LISA_HOME, RESULT_DIR),", "= inspect.getclosurevars(f) name, code_str = self._get_code(f) def mktemp(): return self.execute(", "myboard * file bar.yml:: target-conf: !include foo.yml This will result", "be empty: {self._res_dir}') if plat_info is None: plat_info = PlatformInfo()", "type {kind}') settings = '\\n '.join( f' {key}: {val}' for", "in writing, software # distributed under the License is distributed", "not in target_conf): parser.error('--host or --device must be specified') if", "# number of problems that could appear after another module", "os.symlink(res_dir, res_lnk) return res_dir def install_tools(self, tools): \"\"\" Install tools", "to those specified in the test config 'tools' field :param", "structure in a YAML file is allowed and will work:", "is used in the name, there is some hope that", "not prevent from connecting to the target. if kind ==", "= 'DEFAULT' conn_settings['device'] = device # If the username was", "all domains') for cpu in range(self.plat_info['cpus-count']): cpuidle.enable_all(cpu) with cm() as", "# platform-specific things we are not interested in and getattr(cls,", "attached to this target, for the benefits of user code.", "attr): \"\"\" Forward all non-overriden attributes/method accesses to the underlying", "symlink: bool \"\"\" if isinstance(self._res_dir, ArtifactPath): root = self._res_dir.root relative", "disable=dangerous-default-value super().__init__() logger = self.get_logger() self.name = name res_dir =", "additional to those specified in the test config 'tools' field", "tools): self.target.install(tool) self._installed_tools.add(tool) @contextlib.contextmanager def freeze_userspace(self): \"\"\" Context manager that", "reflected in the caller's context. Also, functions that are referred", "Check if the given devlib module is available. :returns: ``True``", "@classmethod def from_one_conf(cls, path): \"\"\" Create a :class:`Target` from a", "be reflected in the caller's context. Also, functions that are", "work: * file foo.yml:: target-conf: name: myboard * file bar.yml::", "{res_dir}') # It will fail if the folder already exists.", "username. On ADB connections, \"root\" username will root adb upon", "strict_host_check=None, devlib_platform=None, devlib_excluded_modules=[], devlib_file_xfer=None, wait_boot=True, wait_boot_timeout=10, ): # pylint: disable=dangerous-default-value", "target.adb_root(enable=True) except Exception as e: # pylint: disable=broad-except logger.warning(f'\"adb root\"", "params=params) return target @classmethod def from_custom_cli(cls, argv=None, params=None): \"\"\" Create", "upon to implement long-term scripts, it's more designed for quick", "cm() as x: yield x @contextlib.contextmanager def disable_idle_states(self): \"\"\" Context", "line in lines if not line.strip().startswith('@') ] code_str = textwrap.dedent(''.join(lines))", "Usage of that function is reserved to interactive use or", "\"-u\", help=\"Login username. Only applies to Linux kind.\") parser.add_argument(\"--password\", <PASSWORD>\",", "device_group.add_argument(\"--device\", \"-d\", help=\"The ADB ID of the target. Superseeds --host.", "local things that are in # direct control f.__module__ ==", "# If we can't even read the exception, raise the", "to the parser. It is in the form of ``{param_name:", "load trusted YAML files as it can lead to abritrary", "'.join( f'{name}={name}' for name in kwargs.keys() ) ) return script", "ExekallTaggable, Configurable): \"\"\" Wrap :class:`devlib.target.Target` to provide additional features on", "return sorted(attrs) @classmethod def from_conf(cls, conf: TargetConf, res_dir: ArtifactPath =", "target, for the benefits of user code. :type plat_info: lisa.platforms.platinfo.PlatformInfo", "available for later inspection. That board name is mostly free", "that lets you freeze the userspace. .. note:: A number", "# Note: lisa.target.Gem5SimulationPlatformWrapper should be used instead # of the", "# Remove decorators, as they are either undefined or just", "variables are supported, but mutating them will not be reflected", "workdir = workdir or '/data/local/tmp/devlib-target' if device: pass elif host:", "or self.SSH_PORT_DEFAULT, host=host, strict_host_check=True if strict_host_check is None else strict_host_check,", "check your installation or your PATH variable') # Setup virtio", "Exception: # pylint: disable=broad-except return False else: return True def", "f_args, f_kwargs, **kwargs) return wrapper return wrapper_param class Gem5SimulationPlatformWrapper(Gem5SimulationPlatform): def", "( isinstance(f, FunctionType) and # Only allow inlining of functions", "name): # Disallow decorated functions since their definition depends on", "_f is not f and can_include(_f): add_func(_f, _name) modules.update( (name,", "platform information:\\n{plat_info}') self.plat_info = plat_info # Take the board name", "cpu in range(self.plat_info['cpus-count']): cpuidle.disable_all(cpu) yield finally: logger.info('Re-enabling idle states for", "Take the board name from the target configuration so it", "the License for the specific language governing permissions and #", "lead to abritrary code execution. \"\"\" conf = TargetConf.from_yaml_map(path) try:", "out_is_excep = False out = pickle.dumps(out) out_tempfile = {out_tempfiles}[1] if", ".. note:: That structure in a YAML file is allowed", "code_str) def execute_python(self, f, args, kwargs, **execute_kwargs): \"\"\" Executes the", "None) -> 'Target': cls.get_logger().info(f'Target configuration:\\n{conf}') kwargs = cls.conf_to_init_kwargs(conf) kwargs['res_dir'] =", "if this is frozen # for too long. 'watchdogd', ]", "and can_include(_f): add_func(_f, _name) modules.update( (name, mod) for name, mod", "v in vars(args).items() if v is not None and k", "if hasattr(f, '__wrapped__'): raise TypeError('Decorated functions cannot be called from", "either \"password\" or \"keyfile\". All other fields are optional if", "must be set along the relevant credentials: $ {script} --kind", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "_name, _f in closure_vars.items(): if _f is not f and", "given Python function ``f`` with the provided positional and keyword", "cm = nullcontext elif not self.is_module_available('cgroups'): logger.warning('Could not freeze userspace:", "argv=None, params=None): \"\"\" Create a Target from command line arguments.", "returned by the function') else: raise excep out_tempfiles = tuple()", "LevelKeyDesc('wait-boot', 'Wait for the target to finish booting', ( KeyDesc('enable',", "ModuleType, FunctionType from operator import itemgetter import devlib from devlib.exception", "kwargs, global_vars, out_tempfiles): # Inject the parameters inside the wrapper's", "devlib modules to *not* load', [TypedList[str]]), KeyDesc('file-xfer', 'File transfer method.", "= devlib_platform cls.check_init_param(**kwargs) return cls(**kwargs) @classmethod def from_default_conf(cls): \"\"\" Create", "= { **global_vars, **kwargs, } # Treat the modules separately", "**kwargs).arguments closure_vars = inspect.getclosurevars(f) name, code_str = self._get_code(f) def mktemp():", "= time_str elif append_time: name = f\"{name}-{time_str}\" # If we", "TargetConf.from_yaml_map('target_conf.yml') The following special YAML tags can be used in", "from operator import itemgetter import devlib from devlib.exception import TargetStableError", "adb root will be attempted, but failure will # not", "Create devlib Target object ######################################################################## target = devlib_target_cls( platform=devlib_platform, load_default_modules=False,", "res_dir def install_tools(self, tools): \"\"\" Install tools additional to those", "= cls.from_custom_cli(argv=argv, params=params) return target @classmethod def from_custom_cli(cls, argv=None, params=None):", "userspace: freezer cgroup controller not available on the target') cm", "swapping the exact class used under the hood, and avoids", "means \"host\", \"username\" and either \"password\" or \"keyfile\". All other", "= (username == 'root') elif kind == 'linux': devlib_target_cls =", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "parser.add_argument(\"--username\", \"-u\", help=\"Login username. Only applies to Linux kind.\") parser.add_argument(\"--password\",", "logger.info('Disabling idle states for all domains') try: cpuidle = self.cpuidle", "in non_pickled } if modules: modules = f\"import {', '.join(sorted(modules))}\"", "__init__(self, system, simulator, **kwargs): simulator_args = copy.copy(simulator.get('args', [])) system_platform =", "early modules try to connect to UART and do very", "Dictionary mapping OS name to list of task names that", "features on top of it. {configurable_params} :param devlib_platform: Instance of", "OpenSSH', [bool, None]), KeyDesc('workdir', 'Remote target workdir', [str]), KeyDesc('tools', 'List", "been excluded explicitly elif attr in _DEVLIB_AVAILABLE_MODULES: # pylint: disable=raise-missing-from", "kind == 'linux': devlib_target_cls = devlib.LinuxTarget conn_settings.update( username=resolved_username, port=port or", "is not loaded') cm = nullcontext else: @contextlib.contextmanager def cm():", "[str, None]), KeyDesc('username', 'SSH username. On ADB connections, \"root\" username", "# pylint: disable=raise-missing-from try: excep = read_output(excep) # If we", "name: val for var_dct in inspect.getclosurevars(f) if isinstance(var_dct, Mapping) for", "( KeyDesc('enable', 'Enable the boot check', [bool]), KeyDesc('timeout', 'Timeout of", "are supported, but mutating them will not be reflected in", "map(bin_path, tools): self.target.install(tool) self._installed_tools.add(tool) @contextlib.contextmanager def freeze_userspace(self): \"\"\" Context manager", "be a race condition with another tool trying to create", "if res_dir else self._get_res_dir( root=os.path.join(LISA_HOME, RESULT_DIR), relative='', name=f'{self.__class__.__qualname__}-{self.name}', append_time=True, symlink=True", "out = pickle.dumps(out) out_tempfile = {out_tempfiles}[1] if out_is_excep else {out_tempfiles}[0]", "tempfile from types import ModuleType, FunctionType from operator import itemgetter", "special YAML tags can be used in the configuration file:", "strict_host_check=True if strict_host_check is None else strict_host_check, use_scp=False if use_scp", "modules separately as they cannot be pickled modules = {", ":language: YAML :: TargetConf.from_yaml_map('target_conf.yml') The following special YAML tags can", "closure_vars = inspect.getclosurevars(f) name, code_str = self._get_code(f) def mktemp(): return", "arguments and build the command line gem5_args = ' '.join(shlex.quote(a)", "binary tools = set(tools) - self._installed_tools # TODO: compute the", "tags can be used in the configuration file: .. code-block::", "module that is installed on the target and that this", "= eval(wrapper.__code__, pickle.loads({globals})) except BaseException as e: out = e", "TargetConf(SimpleMultiSrcConf, HideExekallID): \"\"\" Target connection settings. Only keys defined below", "**closure_vars.nonlocals, }, out_tempfiles=out_tempfiles ) cmd = ['python3', '-c', snippet] cmd", "# append_time should be used to ensure we get a", "self.cgroups.list_subsystems()] if 'freezer' not in controllers: logger.warning('Could not freeze userspace:", "try: for cpu in range(self.plat_info['cpus-count']): cpuidle.disable_all(cpu) yield finally: logger.info('Re-enabling idle", "by LISA to store results. Usage of that function is", "globals=repr(pickle.dumps(global_vars)), kwargs=', '.join( f'{name}={name}' for name in kwargs.keys() ) )", "name. try: os.makedirs(res_dir) break except FileExistsError: # If the time", "device. Takes precedence over \"host\"', [str, None]), KeyDesc('keyfile', 'SSH private", "of names of tools to install :type tools: list(str) \"\"\"", "will be: * bundled in the script if it is", "{name} target connection settings:\\n {settings}') ######################################################################## # Devlib Platform configuration", "} class Target(Loggable, HideExekallID, ExekallTaggable, Configurable): \"\"\" Wrap :class:`devlib.target.Target` to", "Some sanity check to get better error messages if 'kind'", "1. there is no name clash risk # 2. we", "PlatformInfo.from_yaml_map(path) except Exception as e: # pylint: disable=broad-except cls.get_logger().warning(f'No platform", "in charge of de-freezing, otherwise we # will freeze to", "# distributed under the License is distributed on an \"AS", "is mostly free form # and no specific value should", "connect to UART and do very # platform-specific things we", "relative = '' return self._get_res_dir( root=root, relative=relative, name=name, append_time=append_time, symlink=symlink,", "# Something else that does not exist ... else: raise", "the relevant features aren't needed. .. note:: The wrapping of", "contextlib.suppress(FileExistsError): os.symlink(res_dir, res_lnk) return res_dir def install_tools(self, tools): \"\"\" Install", "# Unless required by applicable law or agreed to in", "device if kind == 'android': devlib_target_cls = devlib.AndroidTarget # Workaround", "{code} return {f}({kwargs}) try: out = eval(wrapper.__code__, pickle.loads({globals})) except BaseException", "res_dir=None, plat_info=None, lazy_platinfo=False, workdir=None, device=None, host=None, port=None, username=None, password=<PASSWORD>, keyfile=None,", "we can't afford to freeze when using :meth:`freeze_userspace`. \"\"\" CONF_CLASS", "given kind of board # (i.e. a Juno board might", "cgroups') devlib_excluded_modules.add('cgroups') self._devlib_loadable_modules = _DEVLIB_AVAILABLE_MODULES - devlib_excluded_modules # Initialize binary", "long-term scripts, it's more designed for quick scripting. \"\"\" parser", "= devlib.AndroidTarget # Workaround for ARM-software/devlib#225 workdir = workdir or", "TargetConf.from_yaml_map(path) try: plat_info = PlatformInfo.from_yaml_map(path) except Exception as e: #", "in _installed_tools, so we are sure to be correct for", "KeyDesc('port', 'SSH or ADB server port', [int, None]), KeyDesc('device', 'ADB", "is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES", "called from remote functions') closure_vars = { name: val for", "else strict_host_check, use_scp=False if use_scp is None else use_scp, )", "f\"import {', '.join(sorted(modules))}\" else: modules = '' script = textwrap.dedent('''", "loaded on-demand. \"\"\" attrs = set(super().__dir__()) | set(dir(self.target)) | self._devlib_loadable_modules", "is updated or so. We only inline local things that", "= { name: mod for name, mod in global_vars.items() if", "exception was raised or value returned by the function') else:", "the output dir be created automatically virtio_args = [ f'--which-diod={diod_path}',", ") ) return script @staticmethod def _get_code(f): lines, _ =", "from_cli(cls, argv=None, params=None) -> 'Target': \"\"\" Same as :meth:`from_custom_cli` without", "connection settings', ( KeyDesc('name', 'Board name, free-form value only used", "connection', [bool]), LevelKeyDesc('wait-boot', 'Wait for the target to finish booting',", "be lost in some automated environment. :param name: Name of", "or \"keyfile\". All other fields are optional if the relevant", "PasswordKeyDesc(KeyDesc): def pretty_format(self, v): return '<password>' # Make sure all", "as it can lead to abritrary code execution. \"\"\".format( script=os.path.basename(sys.argv[0])", "``name``. If ``name`` is None, the directory name will be", "import Loggable, HideExekallID, resolve_dotted_name, get_subclasses, import_all_submodules, LISA_HOME, RESULT_DIR, LATEST_LINK, setup_logging,", "attempt to load the module if it's not loaded already,", "def __init__(self, kind, name='<noname>', tools=[], res_dir=None, plat_info=None, lazy_platinfo=False, workdir=None, device=None,", "[TypedList[str]]), KeyDesc('file-xfer', 'File transfer method. Can be \"sftp\" (default) or", "modules = '' script = textwrap.dedent(''' import pickle import sys", ":class:`devlib.target.Target` is done using composition, as opposed to inheritance. This", "tool) if not os.path.isfile(binary): binary = os.path.join(ASSETS_PATH, 'binaries', 'scripts', tool)", "self.name} @classmethod def _make_remote_snippet(cls, name, code_str, module, kwargs, global_vars, out_tempfiles):", "instead # of the original one to benefit from mapping", "TargetConf({{ 'name': 'myboard', 'host': 192.0.2.1, 'kind': 'linux', 'username': 'foo', 'password':", "'class': 'devlib.platform.Platform' } } } class Target(Loggable, HideExekallID, ExekallTaggable, Configurable):", "in closure_vars.items() if isinstance(mod, ModuleType) ) funcs = {} for", "'Target': cls.get_logger().info(f'Target configuration:\\n{conf}') kwargs = cls.conf_to_init_kwargs(conf) kwargs['res_dir'] = res_dir kwargs['plat_info']", "as a sudo # password. conn_settings.update( unrooted=password is None, password=password,", "k, v in vars(args).items() if v is not None and", "workdir=target.working_directory)}') target.setup() logger.info(f\"Connected to target {(name or '')}\") return target", "not be tracked by any external entity, which means the", "note:: The wrapping of :class:`devlib.target.Target` is done using composition, as", "or ADB server port', [int, None]), KeyDesc('device', 'ADB device. Takes", "Only load trusted YAML files as it can lead to", "as # early as possible conn_settings['adb_as_root'] = (username == 'root')", "SSH keyfile if keyfile: conn_settings['keyfile'] = keyfile else: conn_settings['password'] =", "name, there is some hope that the # next time", "done using composition, as opposed to inheritance. This allows swapping", "return cls.from_one_conf(path) @classmethod def from_one_conf(cls, path): \"\"\" Create a :class:`Target`", "TopLevelKeyDesc('target-conf', 'target connection settings', ( KeyDesc('name', 'Board name, free-form value", "the boot check', [int]), )), LevelKeyDesc('devlib', 'devlib configuration', ( #", "target)`` .. note:: This method should not be relied upon", "get(): return getattr(self.target, attr) try: return get() except AttributeError: #", "target', [TypedList[str]]), KeyDesc('lazy-platinfo', 'Lazily autodect the platform information to speed", "os.remove(res_lnk) # There may be a race condition with another", "'\\n' + '\\n'.join(map(itemgetter(1), funcs.values())) non_pickled = set(modules.keys()) | set(funcs.keys()) global_vars", "def __getattr__(self, attr): \"\"\" Forward all non-overriden attributes/method accesses to", "{f}({kwargs}) try: out = eval(wrapper.__code__, pickle.loads({globals})) except BaseException as e:", "of ArgumentParser.add_argument() options}}``. :type params: dict(str, dict) :return: A tuple", "name, assuming it comes from a module that is installed", "demand if attr in self._devlib_loadable_modules: self.get_logger().info(f'Loading target devlib module {attr}')", "is logged but no exception is raised, so it's a", "def get_res_dir(self, name=None, append_time=True, symlink=True): \"\"\" Returns a directory managed", "parser. It is in the form of ``{param_name: {dict of", "to UART and do very # platform-specific things we are", "{', '.join(sorted(modules))}\" else: modules = '' script = textwrap.dedent(''' import", "that structure which would normally be invalid, but is handled", "build the :class:`devlib.target.Target` :type devlib_platform: devlib.platform.Platform :param plat_info: Platform information", "devlib.AndroidTarget # Workaround for ARM-software/devlib#225 workdir = workdir or '/data/local/tmp/devlib-target'", "'host': devlib_target_cls = devlib.LocalLinuxTarget # If we are given a", "in global_vars.items(): if can_include(f): add_func(f, f_name) code_str += '\\n' +", "dictionary. The top-level `target-conf` key is not needed here: ..", "boot check', [int]), )), LevelKeyDesc('devlib', 'devlib configuration', ( # Using", "required in ['host', 'username', 'password']: if required not in target_conf:", "# not use this file except in compliance with the", "@property @memoized def _uses_systemd(self): try: # Check if systemd is", "as they cannot be pickled modules = { name: mod", "e: # pylint: disable=broad-except err = e else: err =", "argparse.ArgumentParser( formatter_class=argparse.RawDescriptionHelpFormatter, description=textwrap.dedent( \"\"\" Connect to a target using the", "( KeyDesc('name', 'Board name, free-form value only used to embelish", "value or any exception is pickled back and is returned/raised", "if it is defined in the same module * referred", "key != 'password' ) logger.debug(f'{kind} {name} target connection settings:\\n {settings}')", "was not in the loadable list, it # has been", "are either undefined or just were used to # feed", ") def _get_res_dir(self, root, relative, name, append_time, symlink): logger =", "if modules: modules = f\"import {', '.join(sorted(modules))}\" else: modules =", "it's a best-effort approach. \"\"\" logger = self.get_logger() if not", "\"\"\" Create a :class:`Target` from a single YAML configuration file.", "path): \"\"\" Create a :class:`Target` from a single YAML configuration", "execution. .. note:: That structure in a YAML file is", "at runtime. # Note: lisa.target.Gem5SimulationPlatformWrapper should be used instead #", "them import_all_submodules(devlib.module) _DEVLIB_AVAILABLE_MODULES = { cls.name for cls in get_subclasses(devlib.module.Module)", "parser.add_argument(\"--log-level\", default='info', choices=('warning', 'info', 'debug'), help=\"Verbosity level of the logs.\")", "this is ``None``. :type argv: list(str) :param params: Dictionary of", "YAML target configuration file with all the necessary connection information:", "underlying :class:`devlib.target.Target`. .. note:: That will not forward special methods", "self.execute('test -d /run/systemd/system/', check_exit_code=True) except TargetStableError: return False else: return", "to \"root\", root the target as # early as possible", "in a systemd # system is pretty bad idea. if", "will be used if this is ``None``. :type argv: list(str)", "the interpreter bypasses __getattr__ when looking them up. .. note::", "self.cgroups.freeze(thaw=True) with cm() as x: yield x @contextlib.contextmanager def disable_idle_states(self):", "the target. For SSH targets that means \"host\", \"username\" and", "tool + install location and keep # that in _installed_tools,", "specified in the file.\" ) parser.add_argument(\"--kind\", \"-k\", choices=[\"android\", \"linux\", \"host\"],", "INIT_KWARGS_KEY_MAP = { 'devlib_excluded_modules': ['devlib', 'excluded-modules'], 'devlib_file_xfer': ['devlib', 'file-xfer'], 'wait_boot':", "os.environ['LISA_CONF'] return cls.from_one_conf(path) @classmethod def from_one_conf(cls, path): \"\"\" Create a", "board will crash if this is frozen # for too", "devlib Platform, that requires a \"host_output_dir\" # argument computed at", "results. Usage of that function is reserved to interactive use", "cgroups devlib module: target is using systemd, which already uses", "from lisa.assets import ASSETS_PATH from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc,", "will work: * file foo.yml:: target-conf: name: myboard * file", "# the link with contextlib.suppress(FileExistsError): os.symlink(res_dir, res_lnk) return res_dir def", "def wrapper_param(f): @functools.wraps(f) def wrapper(*f_args, **f_kwargs): return self.execute_python(f, f_args, f_kwargs,", "_, target = cls.from_custom_cli(argv=argv, params=params) return target @classmethod def from_custom_cli(cls,", ":return: A connected :class:`Target` \"\"\" _, target = cls.from_custom_cli(argv=argv, params=params)", "file, and update it with command # line arguments try:", "import ASSETS_PATH from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc,Configurable from", "underlying target, and the devlib modules that could be loaded", "Sorry, can't let you do that. Messing with cgroups in", "= self._init_target( kind=kind, name=name, workdir=workdir, device=device, host=host, port=port, username=username, password=password,", "from the YAML configuration file pointed by ``LISA_CONF`` environment variable.", "diod_path is None: raise RuntimeError('Failed to find \"diod\" on your", "['devlib', 'file-xfer'], 'wait_boot': ['wait-boot', 'enable'], 'wait_boot_timeout': ['wait-boot', 'timeout'], } def", "\"foo-bar-juno-on-my-desk\") if name: self.plat_info.add_src('target-conf', dict(name=name)) # Determine file transfer method.", "under the License is distributed on an \"AS IS\" BASIS,", "k: v for k, v in vars(args).items() if v is", "given meaning and type: {generated_help} An instance can be created", "kind of board # (i.e. a Juno board might be", "tools: The list of names of tools to install :type", "undefined or just were used to # feed the function", "up the connection', [bool]), LevelKeyDesc('wait-boot', 'Wait for the target to", "target transparently val = foo(1, y=2) :Variable keyword arguments: Forwarded", "for all domains') try: cpuidle = self.cpuidle except AttributeError: logger.warning('Could", "as e: out = e out_is_excep = True else: out_is_excep", "raised. .. attention:: Decorators are ignored and not applied. \"\"\"", "location under $LISA_HOME will be used.\") params = params or", "for cls in get_subclasses(devlib.module.Module) if ( getattr(cls, 'name', None) #", "mktemp()) snippet = self._make_remote_snippet( name=name, code_str=code_str, module=f.__module__, kwargs=kwargs, global_vars={ **closure_vars.globals,", "cls.get_logger().warning(f'No platform information could be found: {e}') plat_info = None", "of ``{param_name: {dict of ArgumentParser.add_argument() options}}``. :type params: dict(str, dict)", "from devlib except EOFError: raise err if err is not", "'kind' not in target_conf: parser.error('--conf with target configuration or any", "tracked by any external entity, which means the results will", "exists. In that case, # append_time should be used to", "if attr in self._devlib_loadable_modules: self.get_logger().info(f'Loading target devlib module {attr}') self.target.install_module(attr)", "parser.add_argument(\"--conf\", '-c', help=\"Path to a TargetConf and PlatformInfo yaml file.", "to store results. Usage of that function is reserved to", "is None: try: target.adb_root(enable=True) except Exception as e: # pylint:", ":class:`devlib.target.Target`. .. note:: That will not forward special methods like", "@functools.wraps(f) def wrapper(*f_args, **f_kwargs): return self.execute_python(f, f_args, f_kwargs, **kwargs) return", "= e out_is_excep = True else: out_is_excep = False out", ":meth:`from_custom_cli` without the custom parameters capabilities. :return: A connected :class:`Target`", "Target object ######################################################################## target = devlib_target_cls( platform=devlib_platform, load_default_modules=False, connection_settings=conn_settings, working_directory=workdir,", "return True def is_module_available(self, module): \"\"\" Check if the given", "up with ``devlib`` internal members. \"\"\" ADB_PORT_DEFAULT = 5555 SSH_PORT_DEFAULT", "get a unique name. try: os.makedirs(res_dir) break except FileExistsError: #", "target connection', [str, None]), PasswordKeyDesc('password', 'SSH password', [str, None]), KeyDesc('port',", "time is used in the name, there is some hope", "\"\"\" sig = inspect.signature(f) kwargs = sig.bind(*args, **kwargs).arguments closure_vars =", "This will attempt to load the module if it's not", "we are given a password, assume we can use it", "it comes from a module that is installed on the", "_uses_systemd(self): try: # Check if systemd is being used, according", "(name, code_str) def execute_python(self, f, args, kwargs, **execute_kwargs): \"\"\" Executes", "to death and a machine hard reboot will be required", "--username root --password <PASSWORD> In both cases, --conf can also", "devlib Platform object devlib_platform = devlib_platform_cls(**devlib_platform_kwargs) kwargs['devlib_platform'] = devlib_platform cls.check_init_param(**kwargs)", "a devlib Platform instance out of the configuration file devlib_platform_conf", "not interested in and getattr(cls, 'stage') != 'early' ) }", "deploy if tools: logger.info(f'Tools to install: {tools}') self.install_tools(tools) # Autodetect", "any global name # anyway, and it's restricted to the", "conf = TargetConf.from_yaml_map(args.conf) except (KeyError, ValueError): pass else: target_conf.add_src(args.conf, conf)", "'devlib configuration', ( # Using textual name of the Platform", "the Target \"\"\" logger = self.get_logger() conn_settings = {} resolved_username", "# from devlib except EOFError: raise err if err is", "afford to freeze when using :meth:`freeze_userspace`. \"\"\" CONF_CLASS = TargetConf", "!env:str BOARD_NAME port: !env:int PORT .. note:: Only load trusted", "**execute_kwargs) except Exception as e: # pylint: disable=broad-except err =", "target is not rooted') cm = nullcontext elif not self.is_module_available('cgroups'):", "{out_tempfiles}[1] if out_is_excep else {out_tempfiles}[0] with open(out_tempfile, 'wb') as f:", "'SSH username. On ADB connections, \"root\" username will root adb", "or just were used to # feed the function to", "configuration file: Content of target_conf.yml: .. literalinclude:: ../target_conf.yml :language: YAML", "same module so that: # 1. there is no name", "= [ line for line in lines if not line.strip().startswith('@')", "got an exception except EOFError: # pylint: disable=raise-missing-from try: excep", "to let # sysmted-journald or systemd-timesyncd running. 'systemd[^-]', 'dbus', 'sh',", "__str__, since the interpreter bypasses __getattr__ when looking them up.", "BaseException as e: out = e out_is_excep = True else:", "value in vars(args).items() if param in custom_params } custom_args =", "``False`` otherwise. :param module: Devlib module to check. :type module:", "If we are given a password, assume we can use", "using eval() global_vars = { **global_vars, **kwargs, } # Treat", "self.pull(path, name) with open(name, 'rb') as f: return pickle.loads(f.read()) def", "[str, None]), KeyDesc('strict-host-check', 'Equivalent to StrictHostKeyChecking option of OpenSSH', [bool,", "# Sorry, can't let you do that. Messing with cgroups", "host=None, port=None, username=None, password=<PASSWORD>, keyfile=None, strict_host_check=None, devlib_platform=None, devlib_excluded_modules=[], devlib_file_xfer=None, wait_boot=True,", "folder will not be tracked by any external entity, which", "target @classmethod def from_custom_cli(cls, argv=None, params=None): \"\"\" Create a Target", "that. Messing with cgroups in a systemd # system is", "name, append_time, symlink): logger = self.get_logger() while True: time_str =", "on-demand. \"\"\" attrs = set(super().__dir__()) | set(dir(self.target)) | self._devlib_loadable_modules return", "False out = pickle.dumps(out) out_tempfile = {out_tempfiles}[1] if out_is_excep else", "without *args and **kwargs so that it's usable by exekall", "in get_subclasses(devlib.module.Module) if ( getattr(cls, 'name', None) # early modules", "'sh', 'adbd', 'usb', 'transport', # We don't actually need this", "Create a devlib Platform instance out of the configuration file", "freeze userspace: \"cgroups\" devlib module is necessary') cm = nullcontext", "return x + y # Execute the function on the", "of OpenSSH', [bool, None]), KeyDesc('workdir', 'Remote target workdir', [str]), KeyDesc('tools',", "as the caller wants it res_dir = ArtifactPath(root, os.path.join(relative, name))", "in inspect.getclosurevars(f) if isinstance(var_dct, Mapping) for name, val in var_dct.items()", "may be a race condition with another tool trying to", "f'{host}:{port}' else: device = 'DEFAULT' conn_settings['device'] = device # If", "so they will only be # computed when actually needed.", "to.\") device_group = parser.add_mutually_exclusive_group() device_group.add_argument(\"--device\", \"-d\", help=\"The ADB ID of", "explicitly excluded, not loading it') # Something else that does", "module is necessary') cm = nullcontext else: controllers = [s.name", "name = f.__name__ return (name, code_str) def execute_python(self, f, args,", "inline local things that are in # direct control f.__module__", "Platform object devlib_platform = devlib_platform_cls(**devlib_platform_kwargs) kwargs['devlib_platform'] = devlib_platform cls.check_init_param(**kwargs) return", "to abritrary code execution. \"\"\" path = os.environ['LISA_CONF'] return cls.from_one_conf(path)", "relied upon to implement long-term scripts, it's more designed for", "as e: # pylint: disable=broad-except logger.warning(f'\"adb root\" failed: {e}') logger.debug(f'Target", "append_time should be used to ensure we get a unique", "no specific value should be expected for a given kind", "from_default_conf(cls): \"\"\" Create a :class:`Target` from the YAML configuration file", "lead to abritrary code execution. \"\"\" path = os.environ['LISA_CONF'] return", "ANY KIND, either express or implied. # See the License", "TargetConf must be listed here not_target_conf_opt = { 'platform_info', 'log_level',", "\"\"\" Returns a directory managed by LISA to store results.", "'binaries', 'scripts', tool) return binary tools = set(tools) - self._installed_tools", "root, we # preserve that root so it can be", "the License. # You may obtain a copy of the", "the connection options is required') if args.kind == 'android': if", "and contributors. # # Licensed under the Apache License, Version", "logger.info(f'User-defined platform information:\\n{plat_info}') self.plat_info = plat_info # Take the board", "# See the License for the specific language governing permissions", "configuration if issubclass(devlib_platform_cls, Gem5SimulationPlatform): devlib_platform_kwargs.setdefault('host_output_dir', res_dir) # Actually build the", "relative='', name=f'{self.__class__.__qualname__}-{self.name}', append_time=True, symlink=True ) self._res_dir = res_dir os.makedirs(self._res_dir, exist_ok=True)", "so that it can # access them. It's harmless as", "is raised, so it's a best-effort approach. \"\"\" logger =", "so TargetConf files can # be parsed and produced by", "# If the target is Android, we need just (eventually)", "Gem5 devlib Platform, that requires a \"host_output_dir\" # argument computed", "} # Treat the modules separately as they cannot be", "mod for name, mod in global_vars.items() if isinstance(mod, ModuleType) }", "things that are in # direct control f.__module__ == module", "\"\"\" def get(): return getattr(self.target, attr) try: return get() except", "devlib_platform: Instance of :class:`devlib.platform.Platform` to use to build the :class:`devlib.target.Target`", "plat_info=platform_info, res_dir=args.res_dir) def _init_target(self, kind, name, workdir, device, host, port,", "out_tempfiles = (mktemp(), mktemp()) snippet = self._make_remote_snippet( name=name, code_str=code_str, module=f.__module__,", "strict_host_check is None else strict_host_check, use_scp=False if use_scp is None", "# 2. we don't inline the whole world, which could", "# If the time is used in the name, there", "mapping OS name to list of task names that we", "\"cgroups\" devlib module is necessary') cm = nullcontext else: controllers", "members. \"\"\" ADB_PORT_DEFAULT = 5555 SSH_PORT_DEFAULT = 22 CRITICAL_TASKS =", "\"\"\" attrs = set(super().__dir__()) | set(dir(self.target)) | self._devlib_loadable_modules return sorted(attrs)", "configuration:\\n{conf}') kwargs = cls.conf_to_init_kwargs(conf) kwargs['res_dir'] = res_dir kwargs['plat_info'] = plat_info", "target is Android, we need just (eventually) the device if", "cases, --conf can also contain a PlatformInfo YAML description. Note:", "True, the current datetime will be appended to the given", "'_') for k in params.keys()} # Options that are not", "datetime.now().strftime('%Y%m%d_%H%M%S.%f') if not name: name = time_str elif append_time: name", "freezer cgroup controller not available on the target') cm =", "if name not in non_pickled } if modules: modules =", "the signature without *args and **kwargs so that it's usable", "tool) return binary tools = set(tools) - self._installed_tools # TODO:", "it becomes # available for later inspection. That board name", "configuration in order to run a test. EXAMPLES --conf can", "cls in get_subclasses(devlib.module.Module) if ( getattr(cls, 'name', None) # early", "is in scope. If that is not the case, a", "the hood, and avoids messing up with ``devlib`` internal members.", "if diod_path is None: raise RuntimeError('Failed to find \"diod\" on", "set(funcs.keys()) global_vars = { name: val for name, val in", "ArtifactPath with an existing root, we # preserve that root", "Instance of :class:`devlib.platform.Platform` to use to build the :class:`devlib.target.Target` :type", "the target, after the Target is # initialized. Expensive computations", "but on Google Pixel it apparently # cannot be frozen,", "more designed for quick scripting. \"\"\" parser = argparse.ArgumentParser( formatter_class=argparse.RawDescriptionHelpFormatter,", "lines if not line.strip().startswith('@') ] code_str = textwrap.dedent(''.join(lines)) name =", "username or 'root' logger.debug(f'Setting up {kind} target...') # If the", "mostly free form # and no specific value should be", "aren't needed. .. note:: The wrapping of :class:`devlib.target.Target` is done", "logger.info('Directory already exists, retrying ...') continue else: raise if symlink:", "and avoids messing up with ``devlib`` internal members. \"\"\" ADB_PORT_DEFAULT", "password. Only applies to Linux kind.\") parser.add_argument(\"--log-level\", default='info', choices=('warning', 'info',", "of arguments. ``sys.argv[1:]`` will be used if this is ``None``.", "is pickled back and is returned/raised in the host caller.", "arguments simulator_args.append('--listener-mode=on') simulator_args.append(system_platform['description']) simulator_args.extend(system_platform.get('args', [])) simulator_args.extend(( f\"--kernel {system['kernel']}\", f\"--dtb {system['dtb']}\",", "idea. if self._uses_systemd: logger.warning('Will not load cgroups devlib module: target", "parameters to add to the parser. It is in the", "userspace tasks') self.cgroups.freeze(thaw=True) with cm() as x: yield x @contextlib.contextmanager", "# system is pretty bad idea. if self._uses_systemd: logger.warning('Will not", "list(str) \"\"\" def bin_path(tool): binary = os.path.join(ASSETS_PATH, 'binaries', self.abi, tool)", "res_dir if res_dir else self._get_res_dir( root=os.path.join(LISA_HOME, RESULT_DIR), relative='', name=f'{self.__class__.__qualname__}-{self.name}', append_time=True,", "non-overriden attributes/method accesses to the underlying :class:`devlib.target.Target`. .. note:: That", "# Check if systemd is being used, according to: #", "to embelish logs', [str]), KeyDesc('kind', 'Target kind. Can be \"linux\"", "For SSH targets that means \"host\", \"username\" and either \"password\"", "attr in self._devlib_loadable_modules: self.get_logger().info(f'Loading target devlib module {attr}') self.target.install_module(attr) return", "tool in map(bin_path, tools): self.target.install(tool) self._installed_tools.add(tool) @contextlib.contextmanager def freeze_userspace(self): \"\"\"", "writing, software # distributed under the License is distributed on", "set(super().__dir__()) | set(dir(self.target)) | self._devlib_loadable_modules return sorted(attrs) @classmethod def from_conf(cls,", "the cgroup state gets stuck in FREEZING if we #", "is None, the directory name will be the current datetime.", "to the given ``name``. If ``name`` is None, the directory", "Create a symlink named ``results_latest`` to the newly created results", "time it will succeed if append_time: logger.info('Directory already exists, retrying", "not disable idle states, cpuidle devlib module is not loaded')", ":type name: str :param append_time: If True, the current datetime", "[str, None]), PasswordKeyDesc('password', 'SSH password', [str, None]), KeyDesc('port', 'SSH or", "*not* load', [TypedList[str]]), KeyDesc('file-xfer', 'File transfer method. Can be \"sftp\"", "type: {generated_help} An instance can be created by calling :class:`~TargetConf`", "+ y # Execute the function on the target transparently", "Platform object', [Mapping]), )), KeyDesc('excluded-modules', 'List of devlib modules to", "cpuidle = self.cpuidle except AttributeError: logger.warning('Could not disable idle states,", "are there to let the output dir be created automatically", "as e: # pylint: disable=broad-except err = e else: err", "symlink=True): \"\"\" Returns a directory managed by LISA to store", "# Actually build the devlib Platform object devlib_platform = devlib_platform_cls(**devlib_platform_kwargs)", "compute the checksum of the tool + install location and", "raise excep out_tempfiles = tuple() try: out_tempfiles = (mktemp(), mktemp())", "in target_conf: parser.error(f'--{required} must be specified') custom_args = { param:", "Exception as e: # pylint: disable=broad-except logger.warning(f'\"adb root\" failed: {e}')", "# Quote/escape arguments and build the command line gem5_args =", "out_tempfiles=repr(out_tempfiles), globals=repr(pickle.dumps(global_vars)), kwargs=', '.join( f'{name}={name}' for name in kwargs.keys() )", "except Exception as e: # pylint: disable=broad-except cls.get_logger().warning(f'No platform information", "using composition, as opposed to inheritance. This allows swapping the", "{kind}') settings = '\\n '.join( f' {key}: {val}' for key,", "with an existing root, we # preserve that root so", "choices=[\"android\", \"linux\", \"host\"], help=\"The kind of target to connect to.\")", "'devlib': { 'platform': { 'class': 'devlib.platform.Platform' } } } class", "for cpu in range(self.plat_info['cpus-count']): cpuidle.disable_all(cpu) yield finally: logger.info('Re-enabling idle states", "target...') # If the target is Android, we need just", "parse_output(out_tempfiles, err) finally: for path in out_tempfiles: self.remove(path) def remote_func(self,", "def pretty_format(self, v): return '<password>' # Make sure all submodules", "cannot be frozen, so the cgroup state gets stuck in", "= devlib.LocalLinuxTarget # If we are given a password, assume", "if ('host' not in target_conf) and ('device' not in target_conf):", "root will be attempted, but failure will # not prevent", "self.target = self._init_target( kind=kind, name=name, workdir=workdir, device=device, host=host, port=port, username=username,", "the target transparently val = foo(1, y=2) :Variable keyword arguments:", "\"\"\" List our attributes plus the ones from the underlying", "Initialize binary tools to deploy if tools: logger.info(f'Tools to install:", "script=os.path.basename(sys.argv[0]) ))) parser.add_argument(\"--conf\", '-c', help=\"Path to a TargetConf and PlatformInfo", "a unique name. try: os.makedirs(res_dir) break except FileExistsError: # If", "not f and can_include(_f): add_func(_f, _name) modules.update( (name, mod) for", "devlib_target_cls = devlib.LinuxTarget conn_settings.update( username=resolved_username, port=port or self.SSH_PORT_DEFAULT, host=host, strict_host_check=True", "be invalid, but is handled as a special case:: target-conf:", ".. literalinclude:: ../target_conf.yml :language: YAML :: TargetConf.from_yaml_map('target_conf.yml') The following special", "password=<PASSWORD>, keyfile=None, strict_host_check=None, devlib_platform=None, devlib_excluded_modules=[], devlib_file_xfer=None, wait_boot=True, wait_boot_timeout=10, ): #", "can use it as a sudo # password. conn_settings.update( unrooted=password", "prevent from connecting to the target. if kind == 'android'", "} if modules: modules = f\"import {', '.join(sorted(modules))}\" else: modules", "Similar issue with HiKey960, the board will crash if this", "KeyDesc('class', 'Name of the class to use', [str]), KeyDesc('args', 'Keyword", "best-effort approach. \"\"\" logger = self.get_logger() if not self.is_rooted: logger.warning('Could", "{'board': self.name} @classmethod def _make_remote_snippet(cls, name, code_str, module, kwargs, global_vars,", "of the boot check', [int]), )), LevelKeyDesc('devlib', 'devlib configuration', (", "target-conf: !include foo.yml This will result in that structure which", "file is empty, we probably got an exception except EOFError:", "range(self.plat_info['cpus-count']): cpuidle.enable_all(cpu) with cm() as x: yield x def get_tags(self):", "'root') elif kind == 'linux': devlib_target_cls = devlib.LinuxTarget conn_settings.update( username=resolved_username,", "the Apache License, Version 2.0 (the \"License\"); you may #", "devlib Platform instance out of the configuration file devlib_platform_conf =", "with contextlib.suppress(FileExistsError): os.symlink(res_dir, res_lnk) return res_dir def install_tools(self, tools): \"\"\"", "# None as username means adb root will be attempted,", "@classmethod def from_custom_cli(cls, argv=None, params=None): \"\"\" Create a Target from", "contain a PlatformInfo YAML description. Note: only load trusted YAML", "True: time_str = datetime.now().strftime('%Y%m%d_%H%M%S.%f') if not name: name = time_str", "self.name = name res_dir = res_dir if res_dir else self._get_res_dir(", "connection_settings=conn_settings, working_directory=workdir, connect=False, ) target.connect(check_boot_completed=wait_boot, timeout=wait_boot_timeout) # None as username", "by any external entity, which means the results will be", "anything. When that happens, a warning is logged but no", "not freeze userspace: \"cgroups\" devlib module is necessary') cm =", "sftp. if devlib_file_xfer and devlib_file_xfer not in ('scp', 'sftp'): raise", "use_scp=False if use_scp is None else use_scp, ) # Configure", "out = e out_is_excep = True else: out_is_excep = False", "\"diod\" on your host machine, check your installation or your", "relevant credentials: $ {script} --kind linux --host 192.0.2.1 --username root", "import_all_submodules(devlib.module) _DEVLIB_AVAILABLE_MODULES = { cls.name for cls in get_subclasses(devlib.module.Module) if", "the benefits of user code. :type plat_info: lisa.platforms.platinfo.PlatformInfo You need", "host caller. :Variable keyword arguments: Forwarded to :meth:`execute` that will", "parser.add_argument(f'--{param}', **settings) custom_params = {k.replace('-', '_') for k in params.keys()}", "global_vars={ **closure_vars.globals, **closure_vars.nonlocals, }, out_tempfiles=out_tempfiles ) cmd = ['python3', '-c',", "can be created by calling :class:`~TargetConf` with a dictionary. The", "valid for linux targets)', [TypedList[str]]), )) )) DEFAULT_SRC = {", "f'{name}={name}' for name in kwargs.keys() ) ) return script @staticmethod", "self.is_rooted: logger.warning('Could not freeze userspace: target is not rooted') cm", "'Timeout of the boot check', [int]), )), LevelKeyDesc('devlib', 'devlib configuration',", "pretty bad idea. if self._uses_systemd: logger.warning('Will not load cgroups devlib", "devlib_excluded_modules=[], devlib_file_xfer=None, wait_boot=True, wait_boot_timeout=10, ): # pylint: disable=dangerous-default-value super().__init__() logger", "rooted') cm = nullcontext elif not self.is_module_available('cgroups'): logger.warning('Could not freeze", "os.listdir(self._res_dir): raise ValueError(f'res_dir must be empty: {self._res_dir}') if plat_info is", "TODO: compute the checksum of the tool + install location", "# direct control f.__module__ == module ) def add_func(f, name):", "is reserved to interactive use or simple scripts. Tests should", "in map(bin_path, tools): self.target.install(tool) self._installed_tools.add(tool) @contextlib.contextmanager def freeze_userspace(self): \"\"\" Context", "that is not the case, a :exc:`NameError` will be raised.", "functions') closure_vars = { name: val for var_dct in inspect.getclosurevars(f)", "target_conf) and ('device' not in target_conf): parser.error('--host or --device must", "opposed to inheritance. This allows swapping the exact class used", "assume we can use it as a sudo # password.", "params: dict(str, dict) :return: A tuple ``(args, target)`` .. note::", "snippet] cmd = ' '.join(map(shlex.quote, cmd)) try: self.execute(cmd, **execute_kwargs) except", "\"-d\", help=\"The ADB ID of the target. Superseeds --host. Only", "except Exception: # pylint: disable=broad-except return False else: return True", "it can lead to abritrary code execution. .. note:: That", "target = devlib_target_cls( platform=devlib_platform, load_default_modules=False, connection_settings=conn_settings, working_directory=workdir, connect=False, ) target.connect(check_boot_completed=wait_boot,", "# to not use any python-specific YAML tags, so TargetConf", "True def is_module_available(self, module): \"\"\" Check if the given devlib", "the results will be lost in some automated environment. :param", "Treat the modules separately as they cannot be pickled modules", "Only allow inlining of functions defined in the same module", "load the module if it's not loaded already, and bail", "it was not in the loadable list, it # has", "self._get_res_dir( root=root, relative=relative, name=name, append_time=append_time, symlink=symlink, ) def _get_res_dir(self, root,", "devlib_platform_cls(**devlib_platform_kwargs) kwargs['devlib_platform'] = devlib_platform cls.check_init_param(**kwargs) return cls(**kwargs) @classmethod def from_default_conf(cls):", "['devlib', 'excluded-modules'], 'devlib_file_xfer': ['devlib', 'file-xfer'], 'wait_boot': ['wait-boot', 'enable'], 'wait_boot_timeout': ['wait-boot',", "to it plat_info = copy.copy(plat_info) logger.info(f'User-defined platform information:\\n{plat_info}') self.plat_info =", "# will freeze to death and a machine hard reboot", "None]), KeyDesc('keyfile', 'SSH private key file', [str, None]), KeyDesc('strict-host-check', 'Equivalent", "in ('scp', 'sftp'): raise ValueError(f'Invalid file transfer method: {devlib_file_xfer}') use_scp", "class Target(Loggable, HideExekallID, ExekallTaggable, Configurable): \"\"\" Wrap :class:`devlib.target.Target` to provide", "= params or {} for param, settings in params.items(): parser.add_argument(f'--{param}',", "code_str=code_str, module=f.__module__, kwargs=kwargs, global_vars={ **closure_vars.globals, **closure_vars.nonlocals, }, out_tempfiles=out_tempfiles ) cmd", "already uses cgroups') devlib_excluded_modules.add('cgroups') self._devlib_loadable_modules = _DEVLIB_AVAILABLE_MODULES - devlib_excluded_modules #", ".. note:: The wrapping of :class:`devlib.target.Target` is done using composition,", "YAML files as it can lead to abritrary code execution.", "as :meth:`from_custom_cli` without the custom parameters capabilities. :return: A connected", "Apache-2.0 # # Copyright (C) 2018, ARM Limited and contributors.", "target def get_res_dir(self, name=None, append_time=True, symlink=True): \"\"\" Returns a directory", "as the created folder will not be tracked by any", "target_conf.yml: .. literalinclude:: ../target_conf.yml :language: YAML :: TargetConf.from_yaml_map('target_conf.yml') The following", "the underlying :class:`devlib.target.Target`. .. note:: That will not forward special", "= set() self.target = self._init_target( kind=kind, name=name, workdir=workdir, device=device, host=host,", "args = parser.parse_args(argv) setup_logging(level=args.log_level.upper()) target_conf = TargetConf() platform_info = None", "use_scp, ) # Configure password or SSH keyfile if keyfile:", "# Configure password or SSH keyfile if keyfile: conn_settings['keyfile'] =", "plat_info = PlatformInfo() else: # Make a copy of the", "kind.\") parser.add_argument(\"--log-level\", default='info', choices=('warning', 'info', 'debug'), help=\"Verbosity level of the", "is using systemd, which already uses cgroups') devlib_excluded_modules.add('cgroups') self._devlib_loadable_modules =", "parser.add_argument(\"--password\", <PASSWORD>\", help=\"Login password. Only applies to Linux kind.\") parser.add_argument(\"--log-level\",", "not loaded') cm = nullcontext else: @contextlib.contextmanager def cm(): try:", "module ) def add_func(f, name): # Disallow decorated functions since", "the information needed to connect to the target. For SSH", "virtio_args = [ f'--which-diod={diod_path}', '--workload-automation-vio={}', ] simulator_args.extend(virtio_args) # Quote/escape arguments", "for ARM-software/devlib#225 workdir = workdir or '/data/local/tmp/devlib-target' if device: pass", "as f: f.write(out) ''').format( f=name, code=textwrap.dedent(code_str).replace('\\n', '\\n' + ' '", "modify the original # one we were passed when adding", "devlib.module are imported so the classes # are all created", "'Remote target workdir', [str]), KeyDesc('tools', 'List of tools to install", "module: Devlib module to check. :type module: str .. note::", "vars(args).items() if param in custom_params } custom_args = argparse.Namespace(**custom_args) return", "break except FileExistsError: # If the time is used in", "it's restricted to the wrapper using eval() global_vars = {", "modules that could be loaded on-demand. \"\"\" attrs = set(super().__dir__())", "cannot control if hasattr(f, '__wrapped__'): raise TypeError('Decorated functions cannot be", "allowed, with the given meaning and type: {generated_help} An instance", "script if it is defined in the same module *", "err = e else: err = None return parse_output(out_tempfiles, err)", "= username or 'root' logger.debug(f'Setting up {kind} target...') # If", "\"\"\" Forward all non-overriden attributes/method accesses to the underlying :class:`devlib.target.Target`.", "types import ModuleType, FunctionType from operator import itemgetter import devlib", "attr in _DEVLIB_AVAILABLE_MODULES: # pylint: disable=raise-missing-from raise AttributeError(f'Devlib target module", "Create a :class:`Target` from a single YAML configuration file. This", "controller not available on the target') cm = nullcontext else:", "name, code_str, module, kwargs, global_vars, out_tempfiles): # Inject the parameters", "Tentatively load a PlatformInfo from the conf file with contextlib.suppress(KeyError,", "'systemd[^-]', 'dbus', 'sh', 'ssh', 'rsyslogd', 'jbd2' ], 'android': [ 'sh',", "This method should not be relied upon to implement long-term", "In that case, # append_time should be used to ensure", "args.conf: # Tentatively load a PlatformInfo from the conf file", "else: target_conf.add_src(args.conf, conf) target_conf.add_src('command-line', { k: v for k, v", "of functions defined in the same module so that: #", "[ f'--which-diod={diod_path}', '--workload-automation-vio={}', ] simulator_args.extend(virtio_args) # Quote/escape arguments and build", "args, kwargs, **execute_kwargs): \"\"\" Executes the given Python function ``f``", "could be loaded on-demand. \"\"\" attrs = set(super().__dir__()) | set(dir(self.target))", "being used, according to: # https://www.freedesktop.org/software/systemd/man/sd_booted.html self.execute('test -d /run/systemd/system/', check_exit_code=True)", "import devlib from devlib.exception import TargetStableError from devlib.utils.misc import which", "with a dictionary. The top-level `target-conf` key is not needed", "file foo.yml:: target-conf: name: myboard * file bar.yml:: target-conf: !include", "initialized. Expensive computations are deferred so they will only be", "used in the configuration file: .. code-block:: YAML target-conf: #", "in ['host', 'username', 'password']: if required not in target_conf: parser.error(f'--{required}", "devlib_platform: devlib.platform.Platform :param plat_info: Platform information attached to this target,", "not in _DEVLIB_AVAILABLE_MODULES: raise ValueError(f'\"{module}\" is not a devlib module')", "the host caller. :Variable keyword arguments: Forwarded to :meth:`execute` that", "out_tempfiles: self.remove(path) def remote_func(self, **kwargs): \"\"\" Decorates a given function", "wrapper_param(f): @functools.wraps(f) def wrapper(*f_args, **f_kwargs): return self.execute_python(f, f_args, f_kwargs, **kwargs)", ".. attention:: Decorators are ignored and not applied. \"\"\" sig", "try to connect to UART and do very # platform-specific", "KeyDesc('args', 'Keyword arguments to build the Platform object', [Mapping]), )),", "list(str) :param params: Dictionary of custom parameters to add to", "given ``name``. If ``name`` is None, the directory name will", "name)) # Compute base installation path logger.info(f'Creating result directory: {res_dir}')", "def disable_idle_states(self): \"\"\" Context manager that lets you disable all", "raise TypeError('Decorated functions cannot be called from remote functions') closure_vars", "'/data/local/tmp/devlib-target' if device: pass elif host: port = port or", "contextlib import shlex from collections.abc import Mapping import copy import", "needed here: .. code-block:: python TargetConf({{ 'name': 'myboard', 'host': 192.0.2.1,", "booting', ( KeyDesc('enable', 'Enable the boot check', [bool]), KeyDesc('timeout', 'Timeout", "when adding the target source to it plat_info = copy.copy(plat_info)", "devlib.platform.gem5 import Gem5SimulationPlatform from lisa.utils import Loggable, HideExekallID, resolve_dotted_name, get_subclasses,", "= plat_info # Take the board name from the target", "the given meaning and type: {generated_help} An instance can be", "directory is specified, a default location under $LISA_HOME will be", "``f`` with the provided positional and keyword arguments. The return", "= e else: err = None return parse_output(out_tempfiles, err) finally:", "to abritrary code execution. .. note:: That structure in a", "import TargetStableError from devlib.utils.misc import which from devlib.platform.gem5 import Gem5SimulationPlatform", "an # environment variable. name: !env:str BOARD_NAME port: !env:int PORT", "timeout=wait_boot_timeout) # None as username means adb root will be", ":meth:`execute_python`:: target = Target(...) @target.remote_func(timeout=42) def foo(x, y): return x", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "username means adb root will be attempted, but failure will", "== 'android': if ('host' not in target_conf) and ('device' not", "parser.parse_args(argv) setup_logging(level=args.log_level.upper()) target_conf = TargetConf() platform_info = None if args.conf:", "and update it with command # line arguments try: conf", "logger = self.get_logger() logger.info('Disabling idle states for all domains') try:", "{attr}') self.target.install_module(attr) return get() # If it was not in", "on the target .. note:: Closure variables are supported, but", "are optional if the relevant features aren't needed. .. note::", "provide a :class:`TargetConf` and :class:`lisa.platforms.platinfo.PlatformInfo` instances. .. note:: Only load", "it with command # line arguments try: conf = TargetConf.from_yaml_map(args.conf)", "configuration ######################################################################## if not devlib_platform: devlib_platform = devlib.platform.Platform() ######################################################################## #", "you freeze the userspace. .. note:: A number of situations", "the caller wants it res_dir = ArtifactPath(root, os.path.join(relative, name)) #", "('device' not in target_conf): parser.error('--host or --device must be specified')", "param: value for param, value in vars(args).items() if param in", "Using textual name of the Platform allows this YAML configuration", "function') else: raise excep out_tempfiles = tuple() try: out_tempfiles =", "myboard \"\"\" STRUCTURE = TopLevelKeyDesc('target-conf', 'target connection settings', ( KeyDesc('name',", "f: return pickle.loads(f.read()) def parse_output(paths, err): val, excep = paths", "instance can be created by calling :class:`~TargetConf` with a dictionary.", "access them. It's harmless as they would shadow any global", "allows swapping the exact class used under the hood, and", "'sftp'): raise ValueError(f'Invalid file transfer method: {devlib_file_xfer}') use_scp = devlib_file_xfer", "\"\"\" Context manager that lets you freeze the userspace. ..", "information:\\n{self.plat_info}') @property @memoized def _uses_systemd(self): try: # Check if systemd", "credentials: $ {script} --kind linux --host 192.0.2.1 --username root --password", "test config 'tools' field :param tools: The list of names", "'file-xfer'], 'wait_boot': ['wait-boot', 'enable'], 'wait_boot_timeout': ['wait-boot', 'timeout'], } def __init__(self,", ":type symlink: bool \"\"\" if isinstance(self._res_dir, ArtifactPath): root = self._res_dir.root", "be raised. .. attention:: Decorators are ignored and not applied.", "the provided positional and keyword arguments. The return value or", "not exist ... else: raise def __dir__(self): \"\"\" List our", "to freeze it. 'thermal-engine', # Similar issue with HiKey960, the", "our attributes plus the ones from the underlying target, and", "= TargetConf.from_yaml_map(path) try: plat_info = PlatformInfo.from_yaml_map(path) except Exception as e:", "kwargs.keys() ) ) return script @staticmethod def _get_code(f): lines, _", "scope. If that is not the case, a :exc:`NameError` will", "res_dir: ArtifactPath = None, plat_info: PlatformInfo = None) -> 'Target':", "params=None) -> 'Target': \"\"\" Same as :meth:`from_custom_cli` without the custom", "server port', [int, None]), KeyDesc('device', 'ADB device. Takes precedence over", "freeze userspace: freezer cgroup controller not available on the target')", "be required if isinstance(self.target, devlib.LocalLinuxTarget): exclude.append(str(os.getpid())) @contextlib.contextmanager def cm(): logger.info(f\"Freezing", "val in global_vars.items() if name not in non_pickled } if", "cls._get_code(f)[1]) for _name, _f in closure_vars.items(): if _f is not", "was explicitly excluded, not loading it') # Something else that", "def from_conf(cls, conf: TargetConf, res_dir: ArtifactPath = None, plat_info: PlatformInfo", "mod in closure_vars.items() if isinstance(mod, ModuleType) ) funcs = {}", "foo(1, y=2) :Variable keyword arguments: Forwarded to :meth:`execute` that will", "StrictHostKeyChecking option of OpenSSH', [bool, None]), KeyDesc('workdir', 'Remote target workdir',", "check', [int]), )), LevelKeyDesc('devlib', 'devlib configuration', ( # Using textual", "{ 'devlib': { 'platform': { 'class': 'devlib.platform.Platform' } } }", "idle states \"\"\" logger = self.get_logger() logger.info('Disabling idle states for", "Target from command line arguments. :param argv: The list of", "fallback=True) logger.info(f'Effective platform information:\\n{self.plat_info}') @property @memoized def _uses_systemd(self): try: #", "\"\"\" Same as :meth:`from_custom_cli` without the custom parameters capabilities. :return:", "attrs = set(super().__dir__()) | set(dir(self.target)) | self._devlib_loadable_modules return sorted(attrs) @classmethod", "and **kwargs so that it's usable by exekall def from_cli(cls,", "import_all_submodules, LISA_HOME, RESULT_DIR, LATEST_LINK, setup_logging, ArtifactPath, nullcontext, ExekallTaggable, memoized from", "TargetConf and PlatformInfo yaml file. Other options will override what", "clash risk # 2. we don't inline the whole world,", "os.makedirs(self._res_dir, exist_ok=True) if os.listdir(self._res_dir): raise ValueError(f'res_dir must be empty: {self._res_dir}')", "want to freeze everything except PID 1, we don't want", "the exception, raise the initial one # from devlib except", "try: getattr(self, module) except Exception: # pylint: disable=broad-except return False", "# If it was not in the loadable list, it", "- self._installed_tools # TODO: compute the checksum of the tool", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "on the target transparently val = foo(1, y=2) :Variable keyword", "= f'{host}:{port}' else: device = 'DEFAULT' conn_settings['device'] = device #", "except BaseException as e: out = e out_is_excep = True", "If we can't even read the exception, raise the initial", "['wait-boot', 'timeout'], } def __init__(self, kind, name='<noname>', tools=[], res_dir=None, plat_info=None,", "adding the target source to it plat_info = copy.copy(plat_info) logger.info(f'User-defined", "help=\"Login password. Only applies to Linux kind.\") parser.add_argument(\"--log-level\", default='info', choices=('warning',", "if 'kind' not in target_conf: parser.error('--conf with target configuration or", "anyway, and it's restricted to the wrapper using eval() global_vars", "note:: Closure variables are supported, but mutating them will not", "only inline local things that are in # direct control", "target workdir', [str]), KeyDesc('tools', 'List of tools to install on", "from datetime import datetime import os import os.path import contextlib", "cgroup state gets stuck in FREEZING if we # try", "of custom parameters to add to the parser. It is", "import datetime import os import os.path import contextlib import shlex", "YAML file is allowed and will work: * file foo.yml::", "elif host: port = port or self.ADB_PORT_DEFAULT device = f'{host}:{port}'", "{val}' for key, val in conn_settings.items() if key != 'password'", "textwrap.dedent(''' import pickle import sys def wrapper(): {modules} {code} return", "# If the file is empty, we probably got an", "a default location under $LISA_HOME will be used.\") params =", "``(args, target)`` .. note:: This method should not be relied", "note:: A number of situations prevent from freezing anything. When", "of task names that we can't afford to freeze when", "to a # number of problems that could appear after", "specific language governing permissions and # limitations under the License.", "be called from remote functions') closure_vars = { name: val", "the platform information to speed up the connection', [bool]), LevelKeyDesc('wait-boot',", "not rooted') cm = nullcontext elif not self.is_module_available('cgroups'): logger.warning('Could not", "states \"\"\" logger = self.get_logger() logger.info('Disabling idle states for all", "connect to.\") device_group = parser.add_mutually_exclusive_group() device_group.add_argument(\"--device\", \"-d\", help=\"The ADB ID", "are 'sftp' and 'scp', defaults to sftp. if devlib_file_xfer and", "given a password, assume we can use it as a", "also contain a PlatformInfo YAML description. Note: only load trusted", "* file foo.yml:: target-conf: name: myboard * file bar.yml:: target-conf:", "to load the module if it's not loaded already, and", "adb upon target connection', [str, None]), PasswordKeyDesc('password', 'SSH password', [str,", "to use', [str]), KeyDesc('args', 'Keyword arguments to build the Platform", "form # and no specific value should be expected for", "in simulator_args) super().__init__( gem5_args=gem5_args, gem5_bin=simulator['bin'], **kwargs ) # vim :set", "succeed if append_time: logger.info('Directory already exists, retrying ...') continue else:", "mutating them will not be reflected in the caller's context.", "global_vars.items() if name not in non_pickled } if modules: modules", "not_target_conf_opt }) # Some sanity check to get better error", "192.0.2.1 --username root --password <PASSWORD> In both cases, --conf can", "the board name from the target configuration so it becomes", "datetime will be appended to the given ``name``. If ``name``", "preserve that root so it can be relocated as the", "and that this module is in scope. If that is", "all created before we list them import_all_submodules(devlib.module) _DEVLIB_AVAILABLE_MODULES = {", "'devlib.platform.Platform subclass specification', ( KeyDesc('class', 'Name of the class to", "module is available. :returns: ``True`` if module is available, ``False``", "controllers = [s.name for s in self.cgroups.list_subsystems()] if 'freezer' not", "linux --host 192.0.2.1 --username root --password <PASSWORD> In both cases,", "the target source to it plat_info = copy.copy(plat_info) logger.info(f'User-defined platform", "# We want to freeze everything except PID 1, we", "kind. Can be \"linux\" (ssh) or \"android\" (adb)', [str]), KeyDesc('host',", ":param module: Devlib module to check. :type module: str ..", "that are not a key in TargetConf must be listed", "= self._res_dir.root relative = self._res_dir.relative else: root = self._res_dir relative", "'List of tools to install on the target', [TypedList[str]]), KeyDesc('lazy-platinfo',", "{ **global_vars, **kwargs, } # Treat the modules separately as", "code_str = self._get_code(f) def mktemp(): return self.execute( f'mktemp -p {shlex.quote(self.working_directory)}'", "_installed_tools, so we are sure to be correct for tool", "states, cpuidle devlib module is not loaded') cm = nullcontext", "information could be found: {e}') plat_info = None return cls.from_conf(conf=conf,", "kind == 'android' and username is None: try: target.adb_root(enable=True) except", "= TargetConf.from_yaml_map(args.conf) except (KeyError, ValueError): pass else: target_conf.add_src(args.conf, conf) target_conf.add_src('command-line',", "cpuinfo=target.cpuinfo, workdir=target.working_directory)}') target.setup() logger.info(f\"Connected to target {(name or '')}\") return", "self._installed_tools # TODO: compute the checksum of the tool +", "devlib_file_xfer not in ('scp', 'sftp'): raise ValueError(f'Invalid file transfer method:", "description. Note: only load trusted YAML files as it can", "keyfile: conn_settings['keyfile'] = keyfile else: conn_settings['password'] = password elif kind", "e else: err = None return parse_output(out_tempfiles, err) finally: for", "line arguments. :param argv: The list of arguments. ``sys.argv[1:]`` will", "is defined in the same module * referred to by", "environment. :param name: Name of the results directory :type name:", "Something else that does not exist ... else: raise def", "name='<noname>', tools=[], res_dir=None, plat_info=None, lazy_platinfo=False, workdir=None, device=None, host=None, port=None, username=None,", "True else: out_is_excep = False out = pickle.dumps(out) out_tempfile =", "res_dir os.makedirs(self._res_dir, exist_ok=True) if os.listdir(self._res_dir): raise ValueError(f'res_dir must be empty:", "nullcontext elif not self.is_module_available('cgroups'): logger.warning('Could not freeze userspace: \"cgroups\" devlib", "cm(): try: for cpu in range(self.plat_info['cpus-count']): cpuidle.disable_all(cpu) yield finally: logger.info('Re-enabling", "# Determine file transfer method. Currently avaliable options # are", "# line arguments try: conf = TargetConf.from_yaml_map(args.conf) except (KeyError, ValueError):", "meaning and type: {generated_help} An instance can be created by", "arguments to build the Platform object', [Mapping]), )), KeyDesc('excluded-modules', 'List", "= os.environ['LISA_CONF'] return cls.from_one_conf(path) @classmethod def from_one_conf(cls, path): \"\"\" Create", "params: Dictionary of custom parameters to add to the parser.", "return script @staticmethod def _get_code(f): lines, _ = inspect.getsourcelines(f) #", "cls.get_logger().info(f'Target configuration:\\n{conf}') kwargs = cls.conf_to_init_kwargs(conf) kwargs['res_dir'] = res_dir kwargs['plat_info'] =", "== 'linux': for required in ['host', 'username', 'password']: if required", "is frozen # for too long. 'watchdogd', ] } \"\"\"", "self.plat_info.add_target_src(self, rta_calib_res_dir, deferred=lazy_platinfo, fallback=True) logger.info(f'Effective platform information:\\n{self.plat_info}') @property @memoized def", "all the necessary connection information: $ {script} --conf my_target.yml Alternatively,", "connect=False, ) target.connect(check_boot_completed=wait_boot, timeout=wait_boot_timeout) # None as username means adb", ":param params: Dictionary of custom parameters to add to the", "}, out_tempfiles=out_tempfiles ) cmd = ['python3', '-c', snippet] cmd =", "range(self.plat_info['cpus-count']): cpuidle.disable_all(cpu) yield finally: logger.info('Re-enabling idle states for all domains')", "return binary tools = set(tools) - self._installed_tools # TODO: compute", "FunctionType from operator import itemgetter import devlib from devlib.exception import", "so it can be relocated as the caller wants it", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "Only keys defined below are allowed, with the given meaning", "if append_time: logger.info('Directory already exists, retrying ...') continue else: raise", "{dict(abi=target.abi, cpuinfo=target.cpuinfo, workdir=target.working_directory)}') target.setup() logger.info(f\"Connected to target {(name or '')}\")", "'__wrapped__'): raise TypeError('Decorated functions cannot be called from remote functions')", "'SSH private key file', [str, None]), KeyDesc('strict-host-check', 'Equivalent to StrictHostKeyChecking", "'' script = textwrap.dedent(''' import pickle import sys def wrapper():", "SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc,Configurable from lisa.generic import TypedList from lisa.platforms.platinfo", "logged but no exception is raised, so it's a best-effort", "that is installed on the target and that this module", "{devlib_file_xfer}') use_scp = devlib_file_xfer == 'scp' self._installed_tools = set() self.target", "devlib_file_xfer=None, wait_boot=True, wait_boot_timeout=10, ): # pylint: disable=dangerous-default-value super().__init__() logger =", "{(name or '')}\") return target def get_res_dir(self, name=None, append_time=True, symlink=True):", "for tool in map(bin_path, tools): self.target.install(tool) self._installed_tools.add(tool) @contextlib.contextmanager def freeze_userspace(self):", "out_tempfiles=out_tempfiles ) cmd = ['python3', '-c', snippet] cmd = '", "# Disallow decorated functions since their definition depends on #", "probably got an exception except EOFError: # pylint: disable=raise-missing-from try:", "parser.error('--host or --device must be specified') if args.kind == 'linux':", "**execute_kwargs): \"\"\" Executes the given Python function ``f`` with the", "should be used to ensure we get a unique name.", "configuration', ( # Using textual name of the Platform allows", "be parsed and produced by any other third-party code LevelKeyDesc('platform',", "name is mostly free form # and no specific value", "for linux targets)', [TypedList[str]]), )) )) DEFAULT_SRC = { 'devlib':", "self.execute( f'mktemp -p {shlex.quote(self.working_directory)}' ).strip() def read_output(path): with tempfile.TemporaryDirectory() as", "frozen # for too long. 'watchdogd', ] } \"\"\" Dictionary", "# Load the TargetConf from the file, and update it", "yaml file. Other options will override what is specified in", "that the # next time it will succeed if append_time:", "to finish booting', ( KeyDesc('enable', 'Enable the boot check', [bool]),", "= {out_tempfiles}[1] if out_is_excep else {out_tempfiles}[0] with open(out_tempfile, 'wb') as", "method. Can be \"sftp\" (default) or \"scp\". (Only valid for", "EXAMPLES --conf can point to a YAML target configuration file", "( getattr(cls, 'name', None) # early modules try to connect", "isinstance(var_dct, Mapping) for name, val in var_dct.items() } funcs[name] =", "top of it. {configurable_params} :param devlib_platform: Instance of :class:`devlib.platform.Platform` to", "self._res_dir.root relative = self._res_dir.relative else: root = self._res_dir relative =", "params or {} for param, settings in params.items(): parser.add_argument(f'--{param}', **settings)", "we were passed when adding the target source to it", "ExekallTaggable, memoized from lisa.assets import ASSETS_PATH from lisa.conf import SimpleMultiSrcConf,", "YAML tags can be used in the configuration file: ..", "only load trusted YAML files as it can lead to", ".. code-block:: YAML target-conf: # \"!env:<type> ENV_VAR_NAME\" can be used", "the name, there is some hope that the # next", "ArtifactPath = None, plat_info: PlatformInfo = None) -> 'Target': cls.get_logger().info(f'Target", "will succeed if append_time: logger.info('Directory already exists, retrying ...') continue", "Linux kind.\") parser.add_argument(\"--password\", <PASSWORD>\", help=\"Login password. Only applies to Linux", "or SSH keyfile if keyfile: conn_settings['keyfile'] = keyfile else: conn_settings['password']", "# early as possible conn_settings['adb_as_root'] = (username == 'root') elif", "[ 'init', # We want to freeze everything except PID", ") def add_func(f, name): # Disallow decorated functions since their", "we probably got an exception except EOFError: # pylint: disable=raise-missing-from", "empty, we probably got an exception except EOFError: # pylint:", "'linux', 'username': 'foo', 'password': '<PASSWORD>', }}) Or alternatively, from a", "KeyDesc('host', 'Hostname or IP address of the host', [str, None]),", "d: name = os.path.join(d, 'out') self.pull(path, name) with open(name, 'rb')", "import copy import sys import argparse import textwrap import functools", "logger.debug(f'Setting up {kind} target...') # If the target is Android,", "append_time=True, symlink=True ) self._res_dir = res_dir os.makedirs(self._res_dir, exist_ok=True) if os.listdir(self._res_dir):", "That will not forward special methods like __str__, since the", "= inspect.signature(f) kwargs = sig.bind(*args, **kwargs).arguments closure_vars = inspect.getclosurevars(f) name,", "execution. \"\"\" conf = TargetConf.from_yaml_map(path) try: plat_info = PlatformInfo.from_yaml_map(path) except", "computed at runtime. # Note: lisa.target.Gem5SimulationPlatformWrapper should be used instead", "= ['python3', '-c', snippet] cmd = ' '.join(map(shlex.quote, cmd)) try:", "can also contain a PlatformInfo YAML description. Note: only load", "out of the configuration file devlib_platform_conf = conf['devlib']['platform'] devlib_platform_cls =", "'conf', } not_target_conf_opt.update(custom_params) args = parser.parse_args(argv) setup_logging(level=args.log_level.upper()) target_conf = TargetConf()", "devlib module is not loaded') cm = nullcontext else: @contextlib.contextmanager", "kwargs=', '.join( f'{name}={name}' for name in kwargs.keys() ) ) return", "if kind == 'android' and username is None: try: target.adb_root(enable=True)", "lisa.target.Gem5SimulationPlatformWrapper should be used instead # of the original one", "[s.name for s in self.cgroups.list_subsystems()] if 'freezer' not in controllers:", "device = 'DEFAULT' conn_settings['device'] = device # If the username", "speed up the connection', [bool]), LevelKeyDesc('wait-boot', 'Wait for the target", "f\"--dtb {system['dtb']}\", f\"--disk-image {system['disk']}\" )) diod_path = which('diod') if diod_path", "KeyDesc('keyfile', 'SSH private key file', [str, None]), KeyDesc('strict-host-check', 'Equivalent to", "| set(dir(self.target)) | self._devlib_loadable_modules return sorted(attrs) @classmethod def from_conf(cls, conf:", "Closure variables are supported, but mutating them will not be", "allow inlining of functions defined in the same module so", "None]), KeyDesc('device', 'ADB device. Takes precedence over \"host\"', [str, None]),", "'' return self._get_res_dir( root=root, relative=relative, name=name, append_time=append_time, symlink=symlink, ) def", "else: raise ValueError(f'Unsupported platform type {kind}') settings = '\\n '.join(", "alternatively, from a YAML configuration file: Content of target_conf.yml: ..", "it can lead to abritrary code execution. \"\"\" path =", "the target. if kind == 'android' and username is None:", "same module * referred to by name, assuming it comes", "devlib_platform = devlib.platform.Platform() ######################################################################## # Create devlib Target object ########################################################################", "elif kind == 'host': devlib_target_cls = devlib.LocalLinuxTarget # If we", "None: raise RuntimeError('Failed to find \"diod\" on your host machine,", "with contextlib.suppress(KeyError, ValueError): platform_info = PlatformInfo.from_yaml_map(args.conf) # Load the TargetConf", "be pickled modules = { name: mod for name, mod", "features aren't needed. .. note:: The wrapping of :class:`devlib.target.Target` is", "plat_info = None return cls.from_conf(conf=conf, plat_info=plat_info) @classmethod # Keep the", "KeyDesc('enable', 'Enable the boot check', [bool]), KeyDesc('timeout', 'Timeout of the", "is installed on the target and that this module is", "PlatformInfo yaml file. Other options will override what is specified", "f_kwargs, **kwargs) return wrapper return wrapper_param class Gem5SimulationPlatformWrapper(Gem5SimulationPlatform): def __init__(self,", "username, password, keyfile, strict_host_check, use_scp, devlib_platform, wait_boot, wait_boot_timeout, ): \"\"\"", "defined in the same module * referred to by name,", "from_one_conf(cls, path): \"\"\" Create a :class:`Target` from a single YAML", "value should be expected for a given kind of board", "DEFAULT_SRC = { 'devlib': { 'platform': { 'class': 'devlib.platform.Platform' }", "f\"{name}-{time_str}\" # If we were given an ArtifactPath with an", "= cls.conf_to_init_kwargs(conf) kwargs['res_dir'] = res_dir kwargs['plat_info'] = plat_info # Create", "trying to create # the link with contextlib.suppress(FileExistsError): os.symlink(res_dir, res_lnk)", "it's not loaded already, and bail out if it fails", "\"password\" or \"keyfile\". All other fields are optional if the", "provide the information needed to connect to the target. For", "'android': devlib_target_cls = devlib.AndroidTarget # Workaround for ARM-software/devlib#225 workdir =", "= f.__name__ return (name, code_str) def execute_python(self, f, args, kwargs,", "needed to connect to the target. For SSH targets that", ")), LevelKeyDesc('devlib', 'devlib configuration', ( # Using textual name of", "for required in ['host', 'username', 'password']: if required not in", "embelish logs', [str]), KeyDesc('kind', 'Target kind. Can be \"linux\" (ssh)", "\"\"\" Create a Target from command line arguments. :param argv:", "param, settings in params.items(): parser.add_argument(f'--{param}', **settings) custom_params = {k.replace('-', '_')", "if strict_host_check is None else strict_host_check, use_scp=False if use_scp is", "List our attributes plus the ones from the underlying target,", "The list of names of tools to install :type tools:", "use_scp, devlib_platform, wait_boot, wait_boot_timeout, ): \"\"\" Initialize the Target \"\"\"", "the checksum of the tool + install location and keep", "PORT .. note:: Only load trusted YAML files as it", "is not rooted') cm = nullcontext elif not self.is_module_available('cgroups'): logger.warning('Could", "comes from a module that is installed on the target", ") parser.add_argument(\"--kind\", \"-k\", choices=[\"android\", \"linux\", \"host\"], help=\"The kind of target", "is allowed and will work: * file foo.yml:: target-conf: name:", "with ``devlib`` internal members. \"\"\" ADB_PORT_DEFAULT = 5555 SSH_PORT_DEFAULT =", "from a single YAML configuration file. This file will be", "kind == 'android': devlib_target_cls = devlib.AndroidTarget # Workaround for ARM-software/devlib#225", "set along the relevant credentials: $ {script} --kind linux --host", "provided configuration in order to run a test. EXAMPLES --conf", "excep = paths try: return read_output(val) # If the file", "if key != 'password' ) logger.debug(f'{kind} {name} target connection settings:\\n", "not use any python-specific YAML tags, so TargetConf files can", "find \"diod\" on your host machine, check your installation or", "required') if args.kind == 'android': if ('host' not in target_conf)", "and # limitations under the License. # from datetime import", "will attempt to load the module if it's not loaded", "for name, val in var_dct.items() } funcs[name] = (f, cls._get_code(f)[1])", "root so it can be relocated as the caller wants", "== 'android' and username is None: try: target.adb_root(enable=True) except Exception", "or any of the connection options is required') if args.kind", "\"\"\" Install tools additional to those specified in the test", "simulator_args = copy.copy(simulator.get('args', [])) system_platform = system['platform'] # Get gem5", "'transport', # We don't actually need this task but on", ").strip() def read_output(path): with tempfile.TemporaryDirectory() as d: name = os.path.join(d,", "targets)', [TypedList[str]]), )) )) DEFAULT_SRC = { 'devlib': { 'platform':", "plat_info # Create a devlib Platform instance out of the", "params = params or {} for param, settings in params.items():", "wrapper(*f_args, **f_kwargs): return self.execute_python(f, f_args, f_kwargs, **kwargs) return wrapper return", "f.write(out) ''').format( f=name, code=textwrap.dedent(code_str).replace('\\n', '\\n' + ' ' * 4),", "configuration file: .. code-block:: YAML target-conf: # \"!env:<type> ENV_VAR_NAME\" can", "inspection. That board name is mostly free form # and", "freeze to death and a machine hard reboot will be", "existing root, we # preserve that root so it can", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "= devlib.platform.Platform() ######################################################################## # Create devlib Target object ######################################################################## target", "self.get_logger() self.name = name res_dir = res_dir if res_dir else", "note:: Devlib modules are loaded on demand when accessed. \"\"\"", "# one we were passed when adding the target source", "STRUCTURE = TopLevelKeyDesc('target-conf', 'target connection settings', ( KeyDesc('name', 'Board name,", "name of the Platform allows this YAML configuration # to", "username was explicitly set to \"root\", root the target as", "conn_settings.items() if key != 'password' ) logger.debug(f'{kind} {name} target connection", "since the interpreter bypasses __getattr__ when looking them up. ..", "code-block:: YAML target-conf: # \"!env:<type> ENV_VAR_NAME\" can be used to", "exception, raise the initial one # from devlib except EOFError:", "'linux': for required in ['host', 'username', 'password']: if required not", "otherwise we # will freeze to death and a machine", "you may # not use this file except in compliance", "('host' not in target_conf) and ('device' not in target_conf): parser.error('--host", "keyfile if keyfile: conn_settings['keyfile'] = keyfile else: conn_settings['password'] = password", "- devlib_excluded_modules # Initialize binary tools to deploy if tools:", "target \"\"\" def wrapper_param(f): @functools.wraps(f) def wrapper(*f_args, **f_kwargs): return self.execute_python(f,", "logger = self.get_logger() if not self.is_rooted: logger.warning('Could not freeze userspace:", "be set along the relevant credentials: $ {script} --kind linux", "if not devlib_platform: devlib_platform = devlib.platform.Platform() ######################################################################## # Create devlib", "caller wants it res_dir = ArtifactPath(root, os.path.join(relative, name)) # Compute", "in out_tempfiles: self.remove(path) def remote_func(self, **kwargs): \"\"\" Decorates a given", "be correct for tool in map(bin_path, tools): self.target.install(tool) self._installed_tools.add(tool) @contextlib.contextmanager", "of de-freezing, otherwise we # will freeze to death and", "note:: That structure in a YAML file is allowed and", "to: # https://www.freedesktop.org/software/systemd/man/sd_booted.html self.execute('test -d /run/systemd/system/', check_exit_code=True) except TargetStableError: return", "either express or implied. # See the License for the", "has been excluded explicitly elif attr in _DEVLIB_AVAILABLE_MODULES: # pylint:", "A number of situations prevent from freezing anything. When that", "def cm(): logger.info(f\"Freezing all tasks except: {','.join(exclude)}\") try: yield self.cgroups.freeze(exclude)", "all idle states \"\"\" logger = self.get_logger() logger.info('Disabling idle states", "eval() global_vars = { **global_vars, **kwargs, } # Treat the", "= inspect.getsourcelines(f) # Remove decorators, as they are either undefined", "res_lnk = os.path.join(LISA_HOME, LATEST_LINK) with contextlib.suppress(FileNotFoundError): os.remove(res_lnk) # There may", "else: return True def is_module_available(self, module): \"\"\" Check if the", "modules to *not* load', [TypedList[str]]), KeyDesc('file-xfer', 'File transfer method. Can", "target-conf: name: myboard * file bar.yml:: target-conf: !include foo.yml This", "return '<password>' # Make sure all submodules of devlib.module are", "# WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "pickle.dumps(out) out_tempfile = {out_tempfiles}[1] if out_is_excep else {out_tempfiles}[0] with open(out_tempfile,", ":: TargetConf.from_yaml_map('target_conf.yml') The following special YAML tags can be used", "instances. .. note:: Only load trusted YAML files as it", "directory managed by LISA to store results. Usage of that", "we get a unique name. try: os.makedirs(res_dir) break except FileExistsError:", "try: target.adb_root(enable=True) except Exception as e: # pylint: disable=broad-except logger.warning(f'\"adb", "applies to Linux kind.\") parser.add_argument(\"--log-level\", default='info', choices=('warning', 'info', 'debug'), help=\"Verbosity", "required if isinstance(self.target, devlib.LocalLinuxTarget): exclude.append(str(os.getpid())) @contextlib.contextmanager def cm(): logger.info(f\"Freezing all", "######################################################################## # Create devlib Target object ######################################################################## target = devlib_target_cls(", "with the given meaning and type: {generated_help} An instance can", "install :type tools: list(str) \"\"\" def bin_path(tool): binary = os.path.join(ASSETS_PATH,", "only be # computed when actually needed. rta_calib_res_dir = ArtifactPath.join(self._res_dir,", "Remove decorators, as they are either undefined or just were", "config 'tools' field :param tools: The list of names of", "``devlib`` internal members. \"\"\" ADB_PORT_DEFAULT = 5555 SSH_PORT_DEFAULT = 22", "to a YAML target configuration file with all the necessary", "that as the created folder will not be tracked by", "created results directory :type symlink: bool \"\"\" if isinstance(self._res_dir, ArtifactPath):", "def mktemp(): return self.execute( f'mktemp -p {shlex.quote(self.working_directory)}' ).strip() def read_output(path):", "\"root\" username will root adb upon target connection', [str, None]),", "and # Only allow inlining of functions defined in the", "is Android, we need just (eventually) the device if kind", "to this target, for the benefits of user code. :type", "out_tempfile = {out_tempfiles}[1] if out_is_excep else {out_tempfiles}[0] with open(out_tempfile, 'wb')", "... else: raise def __dir__(self): \"\"\" List our attributes plus", "'platform_info', 'log_level', 'res_dir', 'conf', } not_target_conf_opt.update(custom_params) args = parser.parse_args(argv) setup_logging(level=args.log_level.upper())", "key, val in conn_settings.items() if key != 'password' ) logger.debug(f'{kind}", "interactive use or simple scripts. Tests should not rely on", "root\" failed: {e}') logger.debug(f'Target info: {dict(abi=target.abi, cpuinfo=target.cpuinfo, workdir=target.working_directory)}') target.setup() logger.info(f\"Connected", "use_scp is None else use_scp, ) # Configure password or", "function is reserved to interactive use or simple scripts. Tests", "YAML configuration file: Content of target_conf.yml: .. literalinclude:: ../target_conf.yml :language:", "unrooted=password is None, password=password, ) else: raise ValueError(f'Unsupported platform type", "target_conf): parser.error('--host or --device must be specified') if args.kind ==", "cm = nullcontext else: @contextlib.contextmanager def cm(): try: for cpu", "if isinstance(self._res_dir, ArtifactPath): root = self._res_dir.root relative = self._res_dir.relative else:", "RuntimeError('Failed to find \"diod\" on your host machine, check your", "[str]), KeyDesc('kind', 'Target kind. Can be \"linux\" (ssh) or \"android\"", "= self.get_logger() conn_settings = {} resolved_username = username or 'root'", "if issubclass(devlib_platform_cls, Gem5SimulationPlatform): devlib_platform_kwargs.setdefault('host_output_dir', res_dir) # Actually build the devlib", ".. note:: Only load trusted YAML files as it can", "ValueError(f'\"{module}\" is not a devlib module') try: getattr(self, module) except", "sorted(attrs) @classmethod def from_conf(cls, conf: TargetConf, res_dir: ArtifactPath = None,", "inline the whole world, which could lead to a #", "else: root = self._res_dir relative = '' return self._get_res_dir( root=root,", "we cannot control if hasattr(f, '__wrapped__'): raise TypeError('Decorated functions cannot", "used instead # of the original one to benefit from", "to Linux kind.\") parser.add_argument(\"--log-level\", default='info', choices=('warning', 'info', 'debug'), help=\"Verbosity level", "return get() # If it was not in the loadable", "@classmethod # Keep the signature without *args and **kwargs so", "workdir', [str]), KeyDesc('tools', 'List of tools to install on the", "options}}``. :type params: dict(str, dict) :return: A tuple ``(args, target)``", "required not in target_conf: parser.error(f'--{required} must be specified') custom_args =", "target using the provided configuration in order to run a", "add to the parser. It is in the form of", "use or simple scripts. Tests should not rely on that", "in the file.\" ) parser.add_argument(\"--kind\", \"-k\", choices=[\"android\", \"linux\", \"host\"], help=\"The", "simple scripts. Tests should not rely on that as the", "tools: list(str) \"\"\" def bin_path(tool): binary = os.path.join(ASSETS_PATH, 'binaries', self.abi,", "else: raise excep out_tempfiles = tuple() try: out_tempfiles = (mktemp(),", "options # are 'sftp' and 'scp', defaults to sftp. if", "runtime. # Note: lisa.target.Gem5SimulationPlatformWrapper should be used instead # of", "don't actually need this task but on Google Pixel it", "excluded explicitly elif attr in _DEVLIB_AVAILABLE_MODULES: # pylint: disable=raise-missing-from raise", "looking them up. .. note:: Devlib modules are loaded on", "resolve_dotted_name, get_subclasses, import_all_submodules, LISA_HOME, RESULT_DIR, LATEST_LINK, setup_logging, ArtifactPath, nullcontext, ExekallTaggable,", "configuration file pointed by ``LISA_CONF`` environment variable. .. note:: Only", "for var_dct in inspect.getclosurevars(f) if isinstance(var_dct, Mapping) for name, val", "classes # are all created before we list them import_all_submodules(devlib.module)", "don't want to let # sysmted-journald or systemd-timesyncd running. 'systemd[^-]',", "list of names of tools to install :type tools: list(str)", "should not be relied upon to implement long-term scripts, it's", "isinstance(self._res_dir, ArtifactPath): root = self._res_dir.root relative = self._res_dir.relative else: root", "name # anyway, and it's restricted to the wrapper using", "must be specified') if args.kind == 'linux': for required in", "and :class:`lisa.platforms.platinfo.PlatformInfo` instances. .. note:: Only load trusted YAML files", "there is some hope that the # next time it", "wait_boot_timeout=wait_boot_timeout, ) devlib_excluded_modules = set(devlib_excluded_modules) # Sorry, can't let you", "= { 'linux': [ 'init', # We want to freeze", "form of ``{param_name: {dict of ArgumentParser.add_argument() options}}``. :type params: dict(str,", "manager that lets you disable all idle states \"\"\" logger", "to a target using the provided configuration in order to", "def get_tags(self): return {'board': self.name} @classmethod def _make_remote_snippet(cls, name, code_str,", ") devlib_excluded_modules = set(devlib_excluded_modules) # Sorry, can't let you do", "strict_host_check, use_scp=False if use_scp is None else use_scp, ) #", "to provide a :class:`TargetConf` and :class:`lisa.platforms.platinfo.PlatformInfo` instances. .. note:: Only", "Superseeds --host. Only applies to Android kind.\") device_group.add_argument(\"--host\", \"-n\", help=\"The", "\"\"\" Check if the given devlib module is available. :returns:", "attempted, but failure will # not prevent from connecting to", "logger.info(f'Tools to install: {tools}') self.install_tools(tools) # Autodetect information from the", "modules = f\"import {', '.join(sorted(modules))}\" else: modules = '' script", "from command line arguments. :param argv: The list of arguments.", "LevelKeyDesc('devlib', 'devlib configuration', ( # Using textual name of the", "# Copyright (C) 2018, ARM Limited and contributors. # #", "be used.\") params = params or {} for param, settings", "\"!env:<type> ENV_VAR_NAME\" can be used to reference an # environment", "a race condition with another tool trying to create #", "conn_settings.update( unrooted=password is None, password=password, ) else: raise ValueError(f'Unsupported platform", "used to embelish logs', [str]), KeyDesc('kind', 'Target kind. Can be", "in controllers: logger.warning('Could not freeze userspace: freezer cgroup controller not", "module * referred to by name, assuming it comes from", "settings:\\n {settings}') ######################################################################## # Devlib Platform configuration ######################################################################## if not", "devlib_target_cls = devlib.LocalLinuxTarget # If we are given a password,", "' * 4), modules=modules, out_tempfiles=repr(out_tempfiles), globals=repr(pickle.dumps(global_vars)), kwargs=', '.join( f'{name}={name}' for", "connected :class:`Target` \"\"\" _, target = cls.from_custom_cli(argv=argv, params=params) return target", "Wrap :class:`devlib.target.Target` to provide additional features on top of it.", "cpuidle.enable_all(cpu) with cm() as x: yield x def get_tags(self): return", "target. For SSH targets that means \"host\", \"username\" and either", "'thermal-engine', # Similar issue with HiKey960, the board will crash", "abritrary code execution. .. note:: That structure in a YAML", "imported so the classes # are all created before we", "import shlex from collections.abc import Mapping import copy import sys", "exist_ok=True) if os.listdir(self._res_dir): raise ValueError(f'res_dir must be empty: {self._res_dir}') if", "Juno board might be named \"foo-bar-juno-on-my-desk\") if name: self.plat_info.add_src('target-conf', dict(name=name))", "resolved_username = username or 'root' logger.debug(f'Setting up {kind} target...') #", "In both cases, --conf can also contain a PlatformInfo YAML", "interpreter on the target .. note:: Closure variables are supported,", "definition depends on # external callable we cannot control if", "caller's context. Also, functions that are referred to will be:", "return get() except AttributeError: # Load the module on demand", "bypasses __getattr__ when looking them up. .. note:: Devlib modules", "path in out_tempfiles: self.remove(path) def remote_func(self, **kwargs): \"\"\" Decorates a", "Quote/escape arguments and build the command line gem5_args = '", "y): return x + y # Execute the function on", "{settings}') ######################################################################## # Devlib Platform configuration ######################################################################## if not devlib_platform:", "relative, name, append_time, symlink): logger = self.get_logger() while True: time_str", "try: conf = TargetConf.from_yaml_map(args.conf) except (KeyError, ValueError): pass else: target_conf.add_src(args.conf,", "**closure_vars.globals, **closure_vars.nonlocals, }, out_tempfiles=out_tempfiles ) cmd = ['python3', '-c', snippet]", "We want to freeze everything except PID 1, we don't", "target. Superseeds --host. Only applies to Android kind.\") device_group.add_argument(\"--host\", \"-n\",", "plat_info=plat_info) @classmethod # Keep the signature without *args and **kwargs", "f, args, kwargs, **execute_kwargs): \"\"\" Executes the given Python function", ") cmd = ['python3', '-c', snippet] cmd = ' '.join(map(shlex.quote,", "= nullcontext elif not self.is_module_available('cgroups'): logger.warning('Could not freeze userspace: \"cgroups\"", "devlib_excluded_modules = set(devlib_excluded_modules) # Sorry, can't let you do that.", "available, ``False`` otherwise. :param module: Devlib module to check. :type", "self.cgroups.freeze(exclude) finally: logger.info('Un-freezing userspace tasks') self.cgroups.freeze(thaw=True) with cm() as x:", "arguments: Forwarded to :meth:`execute` that will spawn the Python interpreter", "Configure password or SSH keyfile if keyfile: conn_settings['keyfile'] = keyfile", "information attached to this target, for the benefits of user", "= None if args.conf: # Tentatively load a PlatformInfo from", "both cases, --conf can also contain a PlatformInfo YAML description.", "directory :type symlink: bool \"\"\" if isinstance(self._res_dir, ArtifactPath): root =", "as a special case:: target-conf: target-conf: name: myboard \"\"\" STRUCTURE", "devlib_platform_kwargs = copy.copy(devlib_platform_conf.get('args', {})) # Hack for Gem5 devlib Platform,", "# If we were given an ArtifactPath with an existing", "disable=broad-except err = e else: err = None return parse_output(out_tempfiles,", "eval(wrapper.__code__, pickle.loads({globals})) except BaseException as e: out = e out_is_excep", "controllers: logger.warning('Could not freeze userspace: freezer cgroup controller not available", "before we list them import_all_submodules(devlib.module) _DEVLIB_AVAILABLE_MODULES = { cls.name for", "None, plat_info: PlatformInfo = None) -> 'Target': cls.get_logger().info(f'Target configuration:\\n{conf}') kwargs", "HideExekallID, resolve_dotted_name, get_subclasses, import_all_submodules, LISA_HOME, RESULT_DIR, LATEST_LINK, setup_logging, ArtifactPath, nullcontext,", "capabilities. :return: A connected :class:`Target` \"\"\" _, target = cls.from_custom_cli(argv=argv,", "**kwargs): \"\"\" Decorates a given function to execute remotely using", "state gets stuck in FREEZING if we # try to", "(username == 'root') elif kind == 'linux': devlib_target_cls = devlib.LinuxTarget", "# for too long. 'watchdogd', ] } \"\"\" Dictionary mapping", "return self.execute( f'mktemp -p {shlex.quote(self.working_directory)}' ).strip() def read_output(path): with tempfile.TemporaryDirectory()", "tools = set(tools) - self._installed_tools # TODO: compute the checksum", "val for var_dct in inspect.getclosurevars(f) if isinstance(var_dct, Mapping) for name,", "is handled as a special case:: target-conf: target-conf: name: myboard", "whole world, which could lead to a # number of", "if os.listdir(self._res_dir): raise ValueError(f'res_dir must be empty: {self._res_dir}') if plat_info", "k not in not_target_conf_opt }) # Some sanity check to", "val in conn_settings.items() if key != 'password' ) logger.debug(f'{kind} {name}", "argv: list(str) :param params: Dictionary of custom parameters to add", "self.cpuidle except AttributeError: logger.warning('Could not disable idle states, cpuidle devlib", "= foo(1, y=2) :Variable keyword arguments: Forwarded to :meth:`execute` that", "from_conf(cls, conf: TargetConf, res_dir: ArtifactPath = None, plat_info: PlatformInfo =", "not freeze userspace: target is not rooted') cm = nullcontext", "managed by LISA to store results. Usage of that function", "use this file except in compliance with the License. #", "that: # 1. there is no name clash risk #", "args.kind == 'linux': for required in ['host', 'username', 'password']: if", "arguments. The return value or any exception is pickled back", "res_dir = res_dir if res_dir else self._get_res_dir( root=os.path.join(LISA_HOME, RESULT_DIR), relative='',", "datetime import os import os.path import contextlib import shlex from", "of problems that could appear after another module # is", "ADB server port', [int, None]), KeyDesc('device', 'ADB device. Takes precedence", "not loaded already, and bail out if it fails to", "to let the output dir be created automatically virtio_args =", "= port or self.ADB_PORT_DEFAULT device = f'{host}:{port}' else: device =", "devlib.utils.misc import which from devlib.platform.gem5 import Gem5SimulationPlatform from lisa.utils import", "None) # early modules try to connect to UART and", ":class:`devlib.target.Target` to provide additional features on top of it. {configurable_params}", "host: port = port or self.ADB_PORT_DEFAULT device = f'{host}:{port}' else:", "domains') for cpu in range(self.plat_info['cpus-count']): cpuidle.enable_all(cpu) with cm() as x:", "if out_is_excep else {out_tempfiles}[0] with open(out_tempfile, 'wb') as f: f.write(out)", "return getattr(self.target, attr) try: return get() except AttributeError: # Load", "special case:: target-conf: target-conf: name: myboard \"\"\" STRUCTURE = TopLevelKeyDesc('target-conf',", "root = self._res_dir.root relative = self._res_dir.relative else: root = self._res_dir", "--device must be specified') if args.kind == 'linux': for required", "simulator_args.append('--listener-mode=on') simulator_args.append(system_platform['description']) simulator_args.extend(system_platform.get('args', [])) simulator_args.extend(( f\"--kernel {system['kernel']}\", f\"--dtb {system['dtb']}\", f\"--disk-image", "cls.from_custom_cli(argv=argv, params=params) return target @classmethod def from_custom_cli(cls, argv=None, params=None): \"\"\"", "root = self._res_dir relative = '' return self._get_res_dir( root=root, relative=relative,", "override what is specified in the file.\" ) parser.add_argument(\"--kind\", \"-k\",", "cls.name for cls in get_subclasses(devlib.module.Module) if ( getattr(cls, 'name', None)", "the Platform object', [Mapping]), )), KeyDesc('excluded-modules', 'List of devlib modules", "= devlib.LinuxTarget conn_settings.update( username=resolved_username, port=port or self.SSH_PORT_DEFAULT, host=host, strict_host_check=True if", "used to ensure we get a unique name. try: os.makedirs(res_dir)", "+ install location and keep # that in _installed_tools, so", "err) finally: for path in out_tempfiles: self.remove(path) def remote_func(self, **kwargs):", "def cm(): try: for cpu in range(self.plat_info['cpus-count']): cpuidle.disable_all(cpu) yield finally:", "they cannot be pickled modules = { name: mod for", "(Only valid for linux targets)', [TypedList[str]]), )) )) DEFAULT_SRC =", "{ param: value for param, value in vars(args).items() if param", "y # Execute the function on the target transparently val", "logger.warning('Could not freeze userspace: target is not rooted') cm =", "If ``name`` is None, the directory name will be the", "res_dir) # Actually build the devlib Platform object devlib_platform =", "path logger.info(f'Creating result directory: {res_dir}') # It will fail if", "of the Platform allows this YAML configuration # to not", "All other fields are optional if the relevant features aren't", "Workaround for ARM-software/devlib#225 workdir = workdir or '/data/local/tmp/devlib-target' if device:", "port or self.ADB_PORT_DEFAULT device = f'{host}:{port}' else: device = 'DEFAULT'", "and PlatformInfo yaml file. Other options will override what is", "= pickle.dumps(out) out_tempfile = {out_tempfiles}[1] if out_is_excep else {out_tempfiles}[0] with", "SSH_PORT_DEFAULT = 22 CRITICAL_TASKS = { 'linux': [ 'init', #", "used to reference an # environment variable. name: !env:str BOARD_NAME", "PID 1, we don't want to let # sysmted-journald or", "single YAML configuration file. This file will be used to", "the userspace. .. note:: A number of situations prevent from", "'List of devlib modules to *not* load', [TypedList[str]]), KeyDesc('file-xfer', 'File", "else use_scp, ) # Configure password or SSH keyfile if", "password elif kind == 'host': devlib_target_cls = devlib.LocalLinuxTarget # If", "'tools' field :param tools: The list of names of tools", "lead to a # number of problems that could appear", "import os.path import contextlib import shlex from collections.abc import Mapping", "defaults to sftp. if devlib_file_xfer and devlib_file_xfer not in ('scp',", "\"keyfile\". All other fields are optional if the relevant features", "will crash if this is frozen # for too long.", "is not f and can_include(_f): add_func(_f, _name) modules.update( (name, mod)", "PlatformInfo() else: # Make a copy of the PlatformInfo so", "= sig.bind(*args, **kwargs).arguments closure_vars = inspect.getclosurevars(f) name, code_str = self._get_code(f)", "module: str .. note:: This will attempt to load the", "module): \"\"\" Check if the given devlib module is available.", "in compliance with the License. # You may obtain a", "# Devlib Platform configuration ######################################################################## if not devlib_platform: devlib_platform =", "their definition depends on # external callable we cannot control", "the results directory :type name: str :param append_time: If True,", "copy.copy(self.CRITICAL_TASKS[self.target.os]) # Do not freeze the process in charge of", "software # distributed under the License is distributed on an", "(the \"License\"); you may # not use this file except", "Make sure all submodules of devlib.module are imported so the", "in the host caller. :Variable keyword arguments: Forwarded to :meth:`execute`", "``True`` if module is available, ``False`` otherwise. :param module: Devlib", "additional features on top of it. {configurable_params} :param devlib_platform: Instance", "the provided configuration in order to run a test. EXAMPLES", "__dir__(self): \"\"\" List our attributes plus the ones from the", "if the given devlib module is available. :returns: ``True`` if", "Devlib module to check. :type module: str .. note:: This", "to use to build the :class:`devlib.target.Target` :type devlib_platform: devlib.platform.Platform :param", "the configuration file devlib_platform_conf = conf['devlib']['platform'] devlib_platform_cls = resolve_dotted_name(devlib_platform_conf['class']) devlib_platform_kwargs", "--kind linux --host 192.0.2.1 --username root --password <PASSWORD> In both", "free form # and no specific value should be expected", "disable=broad-except return False else: return True def __getattr__(self, attr): \"\"\"", "else: return True def __getattr__(self, attr): \"\"\" Forward all non-overriden", "with tempfile.TemporaryDirectory() as d: name = os.path.join(d, 'out') self.pull(path, name)", "the YAML configuration file pointed by ``LISA_CONF`` environment variable. ..", "\"-o\", help=\"Result directory of the created Target. If no directory", "**kwargs, } # Treat the modules separately as they cannot", "specified in the test config 'tools' field :param tools: The", "devlib except EOFError: raise err if err is not None", "deferred so they will only be # computed when actually", "Forward all non-overriden attributes/method accesses to the underlying :class:`devlib.target.Target`. ..", "LevelKeyDesc, TopLevelKeyDesc,Configurable from lisa.generic import TypedList from lisa.platforms.platinfo import PlatformInfo", ".. note:: This method should not be relied upon to", "Get gem5 binary arguments simulator_args.append('--listener-mode=on') simulator_args.append(system_platform['description']) simulator_args.extend(system_platform.get('args', [])) simulator_args.extend(( f\"--kernel", "not a devlib module') try: getattr(self, module) except Exception: #", "becomes # available for later inspection. That board name is", "\"host\", \"username\" and either \"password\" or \"keyfile\". All other fields", "= _DEVLIB_AVAILABLE_MODULES - devlib_excluded_modules # Initialize binary tools to deploy", "textwrap.dedent(''.join(lines)) name = f.__name__ return (name, code_str) def execute_python(self, f,", "used if this is ``None``. :type argv: list(str) :param params:", "can be used to reference an # environment variable. name:", "exact class used under the hood, and avoids messing up", "default='info', choices=('warning', 'info', 'debug'), help=\"Verbosity level of the logs.\") parser.add_argument(\"--res-dir\",", "on the target \"\"\" def wrapper_param(f): @functools.wraps(f) def wrapper(*f_args, **f_kwargs):", "the classes # are all created before we list them", "# Brackets are there to let the output dir be", "22 CRITICAL_TASKS = { 'linux': [ 'init', # We want", "can lead to abritrary code execution. \"\"\".format( script=os.path.basename(sys.argv[0]) ))) parser.add_argument(\"--conf\",", "the whole world, which could lead to a # number", "lisa.generic import TypedList from lisa.platforms.platinfo import PlatformInfo class PasswordKeyDesc(KeyDesc): def", "def bin_path(tool): binary = os.path.join(ASSETS_PATH, 'binaries', self.abi, tool) if not", "and getattr(cls, 'stage') != 'early' ) } class TargetConf(SimpleMultiSrcConf, HideExekallID):", "early as possible conn_settings['adb_as_root'] = (username == 'root') elif kind", "None]), KeyDesc('workdir', 'Remote target workdir', [str]), KeyDesc('tools', 'List of tools", ":param tools: The list of names of tools to install", "if it's not loaded already, and bail out if it", "else: controllers = [s.name for s in self.cgroups.list_subsystems()] if 'freezer'", "is done using composition, as opposed to inheritance. This allows", "{modules} {code} return {f}({kwargs}) try: out = eval(wrapper.__code__, pickle.loads({globals})) except", "let the output dir be created automatically virtio_args = [", "below are allowed, with the given meaning and type: {generated_help}", "gem5_args = ' '.join(shlex.quote(a) for a in simulator_args) super().__init__( gem5_args=gem5_args,", "ValueError): platform_info = PlatformInfo.from_yaml_map(args.conf) # Load the TargetConf from the", "from lisa.utils import Loggable, HideExekallID, resolve_dotted_name, get_subclasses, import_all_submodules, LISA_HOME, RESULT_DIR,", "= f\"{name}-{time_str}\" # If we were given an ArtifactPath with", "return res_dir def install_tools(self, tools): \"\"\" Install tools additional to", "a best-effort approach. \"\"\" logger = self.get_logger() if not self.is_rooted:", "a YAML configuration file: Content of target_conf.yml: .. literalinclude:: ../target_conf.yml", "devlib_platform: devlib_platform = devlib.platform.Platform() ######################################################################## # Create devlib Target object", "finally: logger.info('Re-enabling idle states for all domains') for cpu in", "could appear after another module # is updated or so.", "set() self.target = self._init_target( kind=kind, name=name, workdir=workdir, device=device, host=host, port=port,", "point to a YAML target configuration file with all the", "\"\"\" CONF_CLASS = TargetConf INIT_KWARGS_KEY_MAP = { 'devlib_excluded_modules': ['devlib', 'excluded-modules'],", "@classmethod def _make_remote_snippet(cls, name, code_str, module, kwargs, global_vars, out_tempfiles): #", "freeze userspace: target is not rooted') cm = nullcontext elif", "if this is ``None``. :type argv: list(str) :param params: Dictionary", "ID of the target. Superseeds --host. Only applies to Android", "computations are deferred so they will only be # computed", "conf['devlib']['platform'] devlib_platform_cls = resolve_dotted_name(devlib_platform_conf['class']) devlib_platform_kwargs = copy.copy(devlib_platform_conf.get('args', {})) # Hack", "'--workload-automation-vio={}', ] simulator_args.extend(virtio_args) # Quote/escape arguments and build the command", "could be found: {e}') plat_info = None return cls.from_conf(conf=conf, plat_info=plat_info)", "\"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY", "ArtifactPath.join(self._res_dir, 'rta_calib') os.makedirs(rta_calib_res_dir) self.plat_info.add_target_src(self, rta_calib_res_dir, deferred=lazy_platinfo, fallback=True) logger.info(f'Effective platform information:\\n{self.plat_info}')", "race condition with another tool trying to create # the", "cm = nullcontext else: exclude = copy.copy(self.CRITICAL_TASKS[self.target.os]) # Do not", ":class:`TargetConf` and :class:`lisa.platforms.platinfo.PlatformInfo` instances. .. note:: Only load trusted YAML", "store results. Usage of that function is reserved to interactive", "lisa.assets import ASSETS_PATH from lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc,Configurable", "'.join(shlex.quote(a) for a in simulator_args) super().__init__( gem5_args=gem5_args, gem5_bin=simulator['bin'], **kwargs )", "connecting to the target. if kind == 'android' and username", "code_str, module, kwargs, global_vars, out_tempfiles): # Inject the parameters inside", "root, relative, name, append_time, symlink): logger = self.get_logger() while True:", "one we were passed when adding the target source to", "avaliable options # are 'sftp' and 'scp', defaults to sftp.", "_name) modules.update( (name, mod) for name, mod in closure_vars.items() if", "isinstance(mod, ModuleType) ) funcs = {} for f_name, f in", "ADB_PORT_DEFAULT = 5555 SSH_PORT_DEFAULT = 22 CRITICAL_TASKS = { 'linux':", "Or alternatively, from a YAML configuration file: Content of target_conf.yml:", "path = os.environ['LISA_CONF'] return cls.from_one_conf(path) @classmethod def from_one_conf(cls, path): \"\"\"", "is None, password=password, ) else: raise ValueError(f'Unsupported platform type {kind}')", "from the underlying target, and the devlib modules that could", "set(dir(self.target)) | self._devlib_loadable_modules return sorted(attrs) @classmethod def from_conf(cls, conf: TargetConf,", "if plat_info is None: plat_info = PlatformInfo() else: # Make", "`target-conf` key is not needed here: .. code-block:: python TargetConf({{", "self.install_tools(tools) # Autodetect information from the target, after the Target", "A tuple ``(args, target)`` .. note:: This method should not", "designed for quick scripting. \"\"\" parser = argparse.ArgumentParser( formatter_class=argparse.RawDescriptionHelpFormatter, description=textwrap.dedent(", "us lines = [ line for line in lines if", "with cm() as x: yield x @contextlib.contextmanager def disable_idle_states(self): \"\"\"", "{attr} was explicitly excluded, not loading it') # Something else", "not self.is_rooted: logger.warning('Could not freeze userspace: target is not rooted')", "# Similar issue with HiKey960, the board will crash if", "or value returned by the function') else: raise excep out_tempfiles", "if isinstance(self.target, devlib.LocalLinuxTarget): exclude.append(str(os.getpid())) @contextlib.contextmanager def cm(): logger.info(f\"Freezing all tasks", "self._res_dir.relative else: root = self._res_dir relative = '' return self._get_res_dir(", "ValueError(f'Invalid file transfer method: {devlib_file_xfer}') use_scp = devlib_file_xfer == 'scp'", "simulator_args.extend(( f\"--kernel {system['kernel']}\", f\"--dtb {system['dtb']}\", f\"--disk-image {system['disk']}\" )) diod_path =", "'out') self.pull(path, name) with open(name, 'rb') as f: return pickle.loads(f.read())", "logger.info(f'Effective platform information:\\n{self.plat_info}') @property @memoized def _uses_systemd(self): try: # Check", "with the License. # You may obtain a copy of", "wait_boot_timeout, ): \"\"\" Initialize the Target \"\"\" logger = self.get_logger()", "module') try: getattr(self, module) except Exception: # pylint: disable=broad-except return", "name, mod in global_vars.items() if isinstance(mod, ModuleType) } def can_include(f):", "\"host_output_dir\" # argument computed at runtime. # Note: lisa.target.Gem5SimulationPlatformWrapper should", "might be named \"foo-bar-juno-on-my-desk\") if name: self.plat_info.add_src('target-conf', dict(name=name)) # Determine", "{configurable_params} :param devlib_platform: Instance of :class:`devlib.platform.Platform` to use to build", "is None else use_scp, ) # Configure password or SSH", "base installation path logger.info(f'Creating result directory: {res_dir}') # It will", "a \"host_output_dir\" # argument computed at runtime. # Note: lisa.target.Gem5SimulationPlatformWrapper", "time_str elif append_time: name = f\"{name}-{time_str}\" # If we were", "exception is raised, so it's a best-effort approach. \"\"\" logger", "def wrapper(*f_args, **f_kwargs): return self.execute_python(f, f_args, f_kwargs, **kwargs) return wrapper", "lets you freeze the userspace. .. note:: A number of", "not needed here: .. code-block:: python TargetConf({{ 'name': 'myboard', 'host':", "= { param: value for param, value in vars(args).items() if", "number of situations prevent from freezing anything. When that happens,", "to Linux kind.\") parser.add_argument(\"--password\", <PASSWORD>\", help=\"Login password. Only applies to", "found: {e}') plat_info = None return cls.from_conf(conf=conf, plat_info=plat_info) @classmethod #", "the configuration file: .. code-block:: YAML target-conf: # \"!env:<type> ENV_VAR_NAME\"", "port: !env:int PORT .. note:: Only load trusted YAML files", "'watchdogd', ] } \"\"\" Dictionary mapping OS name to list", "is some hope that the # next time it will", "they would shadow any global name # anyway, and it's", "returned/raised in the host caller. :Variable keyword arguments: Forwarded to", "pylint: disable=raise-missing-from try: excep = read_output(excep) # If we can't", "self._devlib_loadable_modules return sorted(attrs) @classmethod def from_conf(cls, conf: TargetConf, res_dir: ArtifactPath", "build the devlib Platform object devlib_platform = devlib_platform_cls(**devlib_platform_kwargs) kwargs['devlib_platform'] =", "def remote_func(self, **kwargs): \"\"\" Decorates a given function to execute", "the command line gem5_args = ' '.join(shlex.quote(a) for a in", "val = foo(1, y=2) :Variable keyword arguments: Forwarded to :meth:`execute`", "Android kind.\") device_group.add_argument(\"--host\", \"-n\", help=\"The hostname/IP of the target.\") parser.add_argument(\"--username\",", "Devlib Platform configuration ######################################################################## if not devlib_platform: devlib_platform = devlib.platform.Platform()", "import textwrap import functools import inspect import pickle import tempfile", "_init_target(self, kind, name, workdir, device, host, port, username, password, keyfile,", "if device: pass elif host: port = port or self.ADB_PORT_DEFAULT", "for name, mod in global_vars.items() if isinstance(mod, ModuleType) } def", "you do that. Messing with cgroups in a systemd #", "freeze everything except PID 1, we don't want to let", "'<PASSWORD>', }}) Or alternatively, from a YAML configuration file: Content", "pylint: disable=broad-except return False else: return True def __getattr__(self, attr):", "along the relevant credentials: $ {script} --kind linux --host 192.0.2.1", "f\"--disk-image {system['disk']}\" )) diod_path = which('diod') if diod_path is None:", "Copyright (C) 2018, ARM Limited and contributors. # # Licensed", "# Hack for Gem5 devlib Platform, that requires a \"host_output_dir\"", "# early modules try to connect to UART and do", "express or implied. # See the License for the specific", "default location under $LISA_HOME will be used.\") params = params", "necessary') cm = nullcontext else: controllers = [s.name for s", "except in compliance with the License. # You may obtain", "functools import inspect import pickle import tempfile from types import", "getattr(cls, 'name', None) # early modules try to connect to", "192.0.2.1, 'kind': 'linux', 'username': 'foo', 'password': '<PASSWORD>', }}) Or alternatively,", "choices=('warning', 'info', 'debug'), help=\"Verbosity level of the logs.\") parser.add_argument(\"--res-dir\", \"-o\",", "'root' logger.debug(f'Setting up {kind} target...') # If the target is", "so we are sure to be correct for tool in", "Context manager that lets you freeze the userspace. .. note::", "KeyDesc('name', 'Board name, free-form value only used to embelish logs',", "a target using the provided configuration in order to run", "f: f.write(out) ''').format( f=name, code=textwrap.dedent(code_str).replace('\\n', '\\n' + ' ' *", "in the same module * referred to by name, assuming", "variable. .. note:: Only load trusted YAML files as it", "in range(self.plat_info['cpus-count']): cpuidle.disable_all(cpu) yield finally: logger.info('Re-enabling idle states for all", "a YAML file is allowed and will work: * file", "on your host machine, check your installation or your PATH", "in target_conf: parser.error('--conf with target configuration or any of the", "a given kind of board # (i.e. a Juno board", "except EOFError: # pylint: disable=raise-missing-from try: excep = read_output(excep) #", "structure which would normally be invalid, but is handled as", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "need this task but on Google Pixel it apparently #", "Connect to a target using the provided configuration in order", "the function on the target transparently val = foo(1, y=2)", "KeyDesc('device', 'ADB device. Takes precedence over \"host\"', [str, None]), KeyDesc('keyfile',", "an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF", "kind.\") parser.add_argument(\"--password\", <PASSWORD>\", help=\"Login password. Only applies to Linux kind.\")", "@classmethod def from_default_conf(cls): \"\"\" Create a :class:`Target` from the YAML", "in the name, there is some hope that the #", "val in var_dct.items() } funcs[name] = (f, cls._get_code(f)[1]) for _name,", "= {k.replace('-', '_') for k in params.keys()} # Options that", "pickle import sys def wrapper(): {modules} {code} return {f}({kwargs}) try:", "name: name = time_str elif append_time: name = f\"{name}-{time_str}\" #", "CONDITIONS OF ANY KIND, either express or implied. # See", "no directory is specified, a default location under $LISA_HOME will", "build the command line gem5_args = ' '.join(shlex.quote(a) for a", "modules=modules, out_tempfiles=repr(out_tempfiles), globals=repr(pickle.dumps(global_vars)), kwargs=', '.join( f'{name}={name}' for name in kwargs.keys()", "execute_python(self, f, args, kwargs, **execute_kwargs): \"\"\" Executes the given Python", "= os.path.join(d, 'out') self.pull(path, name) with open(name, 'rb') as f:", "-> 'Target': \"\"\" Same as :meth:`from_custom_cli` without the custom parameters", "not in the loadable list, it # has been excluded", "): # pylint: disable=dangerous-default-value super().__init__() logger = self.get_logger() self.name =", "code=textwrap.dedent(code_str).replace('\\n', '\\n' + ' ' * 4), modules=modules, out_tempfiles=repr(out_tempfiles), globals=repr(pickle.dumps(global_vars)),", "{ 'linux': [ 'init', # We want to freeze everything", "name, mod in closure_vars.items() if isinstance(mod, ModuleType) ) funcs =", "be created automatically virtio_args = [ f'--which-diod={diod_path}', '--workload-automation-vio={}', ] simulator_args.extend(virtio_args)", "plat_info = PlatformInfo.from_yaml_map(path) except Exception as e: # pylint: disable=broad-except", "AttributeError: # Load the module on demand if attr in", "need to provide the information needed to connect to the", "# SPDX-License-Identifier: Apache-2.0 # # Copyright (C) 2018, ARM Limited", "target') cm = nullcontext else: exclude = copy.copy(self.CRITICAL_TASKS[self.target.os]) # Do", "# Using textual name of the Platform allows this YAML", "key is not needed here: .. code-block:: python TargetConf({{ 'name':", "transfer method. Currently avaliable options # are 'sftp' and 'scp',", "if module not in _DEVLIB_AVAILABLE_MODULES: raise ValueError(f'\"{module}\" is not a", "accessed. \"\"\" def get(): return getattr(self.target, attr) try: return get()", "Name of the results directory :type name: str :param append_time:", "in that structure which would normally be invalid, but is", "plus the ones from the underlying target, and the devlib", "device, host, port, username, password, keyfile, strict_host_check, use_scp, devlib_platform, wait_boot,", "cpuidle.disable_all(cpu) yield finally: logger.info('Re-enabling idle states for all domains') for", "or --device must be specified') if args.kind == 'linux': for", "name: self.plat_info.add_src('target-conf', dict(name=name)) # Determine file transfer method. Currently avaliable", "y=2) :Variable keyword arguments: Forwarded to :meth:`execute` that will spawn", "# If the username was explicitly set to \"root\", root", "the target and that this module is in scope. If", "run a test. EXAMPLES --conf can point to a YAML", "some hope that the # next time it will succeed", "'linux': [ 'init', # We want to freeze everything except", "the parser. It is in the form of ``{param_name: {dict", "bail out if it fails to load. \"\"\" if module", "to build the :class:`devlib.target.Target` :type devlib_platform: devlib.platform.Platform :param plat_info: Platform", "any other third-party code LevelKeyDesc('platform', 'devlib.platform.Platform subclass specification', ( KeyDesc('class',", "system, simulator, **kwargs): simulator_args = copy.copy(simulator.get('args', [])) system_platform = system['platform']", "= read_output(excep) # If we can't even read the exception,", "to install :type tools: list(str) \"\"\" def bin_path(tool): binary =", "logger.warning('Could not freeze userspace: \"cgroups\" devlib module is necessary') cm", "= False out = pickle.dumps(out) out_tempfile = {out_tempfiles}[1] if out_is_excep", "import argparse import textwrap import functools import inspect import pickle", "* 4), modules=modules, out_tempfiles=repr(out_tempfiles), globals=repr(pickle.dumps(global_vars)), kwargs=', '.join( f'{name}={name}' for name", "the target is Android, we need just (eventually) the device", "f_name, f in global_vars.items(): if can_include(f): add_func(f, f_name) code_str +=", "all non-overriden attributes/method accesses to the underlying :class:`devlib.target.Target`. .. note::", "is not None and k not in not_target_conf_opt }) #", "disable=raise-missing-from raise AttributeError(f'Devlib target module {attr} was explicitly excluded, not", "workdir, device, host, port, username, password, keyfile, strict_host_check, use_scp, devlib_platform,", "{script} --kind linux --host 192.0.2.1 --username root --password <PASSWORD> In", "self.get_logger().info(f'Loading target devlib module {attr}') self.target.install_module(attr) return get() # If", "@contextlib.contextmanager def disable_idle_states(self): \"\"\" Context manager that lets you disable", "if not line.strip().startswith('@') ] code_str = textwrap.dedent(''.join(lines)) name = f.__name__", "the file.\" ) parser.add_argument(\"--kind\", \"-k\", choices=[\"android\", \"linux\", \"host\"], help=\"The kind", "= { 'platform_info', 'log_level', 'res_dir', 'conf', } not_target_conf_opt.update(custom_params) args =", "and type: {generated_help} An instance can be created by calling", "logger.info(f\"Freezing all tasks except: {','.join(exclude)}\") try: yield self.cgroups.freeze(exclude) finally: logger.info('Un-freezing", "--host 192.0.2.1 --username root --password <PASSWORD> In both cases, --conf", "kind, name='<noname>', tools=[], res_dir=None, plat_info=None, lazy_platinfo=False, workdir=None, device=None, host=None, port=None,", "logger.debug(f'Target info: {dict(abi=target.abi, cpuinfo=target.cpuinfo, workdir=target.working_directory)}') target.setup() logger.info(f\"Connected to target {(name", "self.target.install_module(attr) return get() # If it was not in the", "{ 'platform_info', 'log_level', 'res_dir', 'conf', } not_target_conf_opt.update(custom_params) args = parser.parse_args(argv)", "self._installed_tools.add(tool) @contextlib.contextmanager def freeze_userspace(self): \"\"\" Context manager that lets you", "decorators, as they are either undefined or just were used", "to # feed the function to us lines = [", "If the file is empty, we probably got an exception", "except Exception as e: # pylint: disable=broad-except err = e", "and not applied. \"\"\" sig = inspect.signature(f) kwargs = sig.bind(*args,", "the devlib modules that could be loaded on-demand. \"\"\" attrs", "the ones from the underlying target, and the devlib modules", "userspace: target is not rooted') cm = nullcontext elif not", "Create a Target from command line arguments. :param argv: The", "to implement long-term scripts, it's more designed for quick scripting.", "else: # Make a copy of the PlatformInfo so we", "settings', ( KeyDesc('name', 'Board name, free-form value only used to", "super().__init__() logger = self.get_logger() self.name = name res_dir = res_dir", "shadow any global name # anyway, and it's restricted to", "tools to install :type tools: list(str) \"\"\" def bin_path(tool): binary", "for line in lines if not line.strip().startswith('@') ] code_str =", ":type params: dict(str, dict) :return: A tuple ``(args, target)`` ..", "mod) for name, mod in closure_vars.items() if isinstance(mod, ModuleType) )", "formatter_class=argparse.RawDescriptionHelpFormatter, description=textwrap.dedent( \"\"\" Connect to a target using the provided", "so that it's usable by exekall def from_cli(cls, argv=None, params=None)", "elif append_time: name = f\"{name}-{time_str}\" # If we were given", "def _init_target(self, kind, name, workdir, device, host, port, username, password,", "workdir or '/data/local/tmp/devlib-target' if device: pass elif host: port =", "The return value or any exception is pickled back and", "module so that: # 1. there is no name clash", "will be required if isinstance(self.target, devlib.LocalLinuxTarget): exclude.append(str(os.getpid())) @contextlib.contextmanager def cm():", "__init__(self, kind, name='<noname>', tools=[], res_dir=None, plat_info=None, lazy_platinfo=False, workdir=None, device=None, host=None,", "for s in self.cgroups.list_subsystems()] if 'freezer' not in controllers: logger.warning('Could", "systemd # system is pretty bad idea. if self._uses_systemd: logger.warning('Will", "functions since their definition depends on # external callable we", "as they are either undefined or just were used to", "= PlatformInfo.from_yaml_map(path) except Exception as e: # pylint: disable=broad-except cls.get_logger().warning(f'No", "on the target') cm = nullcontext else: exclude = copy.copy(self.CRITICAL_TASKS[self.target.os])", "# Make sure all submodules of devlib.module are imported so", "# Inject the parameters inside the wrapper's globals so that", ":type argv: list(str) :param params: Dictionary of custom parameters to", "funcs = {} for f_name, f in global_vars.items(): if can_include(f):", "note:: This method should not be relied upon to implement", "that are in # direct control f.__module__ == module )", "[Mapping]), )), KeyDesc('excluded-modules', 'List of devlib modules to *not* load',", "Other options will override what is specified in the file.\"", "but no exception is raised, so it's a best-effort approach.", "function to us lines = [ line for line in", "we # will freeze to death and a machine hard", ":param devlib_platform: Instance of :class:`devlib.platform.Platform` to use to build the", "supported, but mutating them will not be reflected in the", "benefits of user code. :type plat_info: lisa.platforms.platinfo.PlatformInfo You need to", "wrapper using eval() global_vars = { **global_vars, **kwargs, } #", "of devlib.module are imported so the classes # are all", "@contextlib.contextmanager def cm(): try: for cpu in range(self.plat_info['cpus-count']): cpuidle.disable_all(cpu) yield", "that happens, a warning is logged but no exception is", "we list them import_all_submodules(devlib.module) _DEVLIB_AVAILABLE_MODULES = { cls.name for cls", "arguments try: conf = TargetConf.from_yaml_map(args.conf) except (KeyError, ValueError): pass else:", "remote functions') closure_vars = { name: val for var_dct in", "= nullcontext else: @contextlib.contextmanager def cm(): try: for cpu in", "port', [int, None]), KeyDesc('device', 'ADB device. Takes precedence over \"host\"',", "\"\"\" Create a :class:`Target` from the YAML configuration file pointed", "contextlib.suppress(KeyError, ValueError): platform_info = PlatformInfo.from_yaml_map(args.conf) # Load the TargetConf from", "wait_boot, wait_boot_timeout, ): \"\"\" Initialize the Target \"\"\" logger =", "linux targets)', [TypedList[str]]), )) )) DEFAULT_SRC = { 'devlib': {", "'devlib.platform.Platform' } } } class Target(Loggable, HideExekallID, ExekallTaggable, Configurable): \"\"\"", "can_include(f): add_func(f, f_name) code_str += '\\n' + '\\n'.join(map(itemgetter(1), funcs.values())) non_pickled", "of devlib modules to *not* load', [TypedList[str]]), KeyDesc('file-xfer', 'File transfer", "usable by exekall def from_cli(cls, argv=None, params=None) -> 'Target': \"\"\"", "# next time it will succeed if append_time: logger.info('Directory already", "used, according to: # https://www.freedesktop.org/software/systemd/man/sd_booted.html self.execute('test -d /run/systemd/system/', check_exit_code=True) except", "with target configuration or any of the connection options is", "'enable'], 'wait_boot_timeout': ['wait-boot', 'timeout'], } def __init__(self, kind, name='<noname>', tools=[],", "[int]), )), LevelKeyDesc('devlib', 'devlib configuration', ( # Using textual name", "def __dir__(self): \"\"\" List our attributes plus the ones from", "ENV_VAR_NAME\" can be used to reference an # environment variable.", "sudo # password. conn_settings.update( unrooted=password is None, password=password, ) else:", "list of arguments. ``sys.argv[1:]`` will be used if this is", "} } } class Target(Loggable, HideExekallID, ExekallTaggable, Configurable): \"\"\" Wrap", "# anyway, and it's restricted to the wrapper using eval()", "ValueError): pass else: target_conf.add_src(args.conf, conf) target_conf.add_src('command-line', { k: v for", "KeyDesc('workdir', 'Remote target workdir', [str]), KeyDesc('tools', 'List of tools to", "target is using systemd, which already uses cgroups') devlib_excluded_modules.add('cgroups') self._devlib_loadable_modules", "except (KeyError, ValueError): pass else: target_conf.add_src(args.conf, conf) target_conf.add_src('command-line', { k:", "an ArtifactPath with an existing root, we # preserve that", "# Treat the modules separately as they cannot be pickled", ".. note:: This will attempt to load the module if", "and devlib_file_xfer not in ('scp', 'sftp'): raise ValueError(f'Invalid file transfer", "the original one to benefit from mapping configuration if issubclass(devlib_platform_cls,", "can # access them. It's harmless as they would shadow", "argv=None, params=None) -> 'Target': \"\"\" Same as :meth:`from_custom_cli` without the", "os.path.isfile(binary): binary = os.path.join(ASSETS_PATH, 'binaries', 'scripts', tool) return binary tools", "so it becomes # available for later inspection. That board", "help=\"The ADB ID of the target. Superseeds --host. Only applies", "tags, so TargetConf files can # be parsed and produced", "internal members. \"\"\" ADB_PORT_DEFAULT = 5555 SSH_PORT_DEFAULT = 22 CRITICAL_TASKS", "lost in some automated environment. :param name: Name of the", "keys defined below are allowed, with the given meaning and", "to check. :type module: str .. note:: This will attempt", "get better error messages if 'kind' not in target_conf: parser.error('--conf", "Context manager that lets you disable all idle states \"\"\"", "val for name, val in global_vars.items() if name not in", "| self._devlib_loadable_modules return sorted(attrs) @classmethod def from_conf(cls, conf: TargetConf, res_dir:", "'debug'), help=\"Verbosity level of the logs.\") parser.add_argument(\"--res-dir\", \"-o\", help=\"Result directory", "= {} for f_name, f in global_vars.items(): if can_include(f): add_func(f,", "in conn_settings.items() if key != 'password' ) logger.debug(f'{kind} {name} target", "TargetConf INIT_KWARGS_KEY_MAP = { 'devlib_excluded_modules': ['devlib', 'excluded-modules'], 'devlib_file_xfer': ['devlib', 'file-xfer'],", "to get better error messages if 'kind' not in target_conf:", "for f_name, f in global_vars.items(): if can_include(f): add_func(f, f_name) code_str", "on top of it. {configurable_params} :param devlib_platform: Instance of :class:`devlib.platform.Platform`", "wrapper return wrapper_param class Gem5SimulationPlatformWrapper(Gem5SimulationPlatform): def __init__(self, system, simulator, **kwargs):", "case:: target-conf: target-conf: name: myboard \"\"\" STRUCTURE = TopLevelKeyDesc('target-conf', 'target", "k in params.keys()} # Options that are not a key", "ones from the underlying target, and the devlib modules that", "to be correct for tool in map(bin_path, tools): self.target.install(tool) self._installed_tools.add(tool)", "ModuleType) ) funcs = {} for f_name, f in global_vars.items():", "will only be # computed when actually needed. rta_calib_res_dir =", "information to speed up the connection', [bool]), LevelKeyDesc('wait-boot', 'Wait for", "benefit from mapping configuration if issubclass(devlib_platform_cls, Gem5SimulationPlatform): devlib_platform_kwargs.setdefault('host_output_dir', res_dir) #", "quick scripting. \"\"\" parser = argparse.ArgumentParser( formatter_class=argparse.RawDescriptionHelpFormatter, description=textwrap.dedent( \"\"\" Connect", "device: pass elif host: port = port or self.ADB_PORT_DEFAULT device", "file.\" ) parser.add_argument(\"--kind\", \"-k\", choices=[\"android\", \"linux\", \"host\"], help=\"The kind of", "the # next time it will succeed if append_time: logger.info('Directory", "# are all created before we list them import_all_submodules(devlib.module) _DEVLIB_AVAILABLE_MODULES", "handled as a special case:: target-conf: target-conf: name: myboard \"\"\"", "# Get gem5 binary arguments simulator_args.append('--listener-mode=on') simulator_args.append(system_platform['description']) simulator_args.extend(system_platform.get('args', [])) simulator_args.extend((", "a in simulator_args) super().__init__( gem5_args=gem5_args, gem5_bin=simulator['bin'], **kwargs ) # vim", "open(out_tempfile, 'wb') as f: f.write(out) ''').format( f=name, code=textwrap.dedent(code_str).replace('\\n', '\\n' +", "interpreter bypasses __getattr__ when looking them up. .. note:: Devlib", "are referred to will be: * bundled in the script", ":type plat_info: lisa.platforms.platinfo.PlatformInfo You need to provide the information needed", "created by calling :class:`~TargetConf` with a dictionary. The top-level `target-conf`", "files as it can lead to abritrary code execution. \"\"\"", "installed on the target and that this module is in", ")), KeyDesc('excluded-modules', 'List of devlib modules to *not* load', [TypedList[str]]),", "a copy of the PlatformInfo so we don't modify the", "as username means adb root will be attempted, but failure", "will be lost in some automated environment. :param name: Name", "and is returned/raised in the host caller. :Variable keyword arguments:", "'ssh', 'rsyslogd', 'jbd2' ], 'android': [ 'sh', 'adbd', 'usb', 'transport',", "'platform': { 'class': 'devlib.platform.Platform' } } } class Target(Loggable, HideExekallID,", "configuration file with all the necessary connection information: $ {script}", "in params.items(): parser.add_argument(f'--{param}', **settings) custom_params = {k.replace('-', '_') for k", "# preserve that root so it can be relocated as", "excep = read_output(excep) # If we can't even read the", "that it's usable by exekall def from_cli(cls, argv=None, params=None) ->", "lazy_platinfo=False, workdir=None, device=None, host=None, port=None, username=None, password=<PASSWORD>, keyfile=None, strict_host_check=None, devlib_platform=None,", "module, kwargs, global_vars, out_tempfiles): # Inject the parameters inside the", "current datetime. :type append_time: bool :param symlink: Create a symlink", "external entity, which means the results will be lost in", "variable. name: !env:str BOARD_NAME port: !env:int PORT .. note:: Only", "YAML configuration file pointed by ``LISA_CONF`` environment variable. .. note::", "in params.keys()} # Options that are not a key in", "KeyDesc('tools', 'List of tools to install on the target', [TypedList[str]]),", "number of problems that could appear after another module #", "bar.yml:: target-conf: !include foo.yml This will result in that structure", "'adbd', 'usb', 'transport', # We don't actually need this task", "custom parameters to add to the parser. It is in", "FunctionType) and # Only allow inlining of functions defined in", "used to # feed the function to us lines =", "not applied. \"\"\" sig = inspect.signature(f) kwargs = sig.bind(*args, **kwargs).arguments", "code-block:: python TargetConf({{ 'name': 'myboard', 'host': 192.0.2.1, 'kind': 'linux', 'username':", "to benefit from mapping configuration if issubclass(devlib_platform_cls, Gem5SimulationPlatform): devlib_platform_kwargs.setdefault('host_output_dir', res_dir)", "if can_include(f): add_func(f, f_name) code_str += '\\n' + '\\n'.join(map(itemgetter(1), funcs.values()))", "kwargs = sig.bind(*args, **kwargs).arguments closure_vars = inspect.getclosurevars(f) name, code_str =", "variable') # Setup virtio # Brackets are there to let", "restricted to the wrapper using eval() global_vars = { **global_vars,", "'Board name, free-form value only used to embelish logs', [str]),", "memoized from lisa.assets import ASSETS_PATH from lisa.conf import SimpleMultiSrcConf, KeyDesc,", "FileExistsError: # If the time is used in the name,", "module=f.__module__, kwargs=kwargs, global_vars={ **closure_vars.globals, **closure_vars.nonlocals, }, out_tempfiles=out_tempfiles ) cmd =", "exception is pickled back and is returned/raised in the host", "freeze the userspace. .. note:: A number of situations prevent", "remotely using :meth:`execute_python`:: target = Target(...) @target.remote_func(timeout=42) def foo(x, y):", "autodect the platform information to speed up the connection', [bool]),", "defined below are allowed, with the given meaning and type:", "return True def __getattr__(self, attr): \"\"\" Forward all non-overriden attributes/method", "following special YAML tags can be used in the configuration", "module is in scope. If that is not the case,", "{k.replace('-', '_') for k in params.keys()} # Options that are", "can't even read the exception, raise the initial one #", "e: # pylint: disable=broad-except cls.get_logger().warning(f'No platform information could be found:", "given an ArtifactPath with an existing root, we # preserve", "case, a :exc:`NameError` will be raised. .. attention:: Decorators are", "like __str__, since the interpreter bypasses __getattr__ when looking them", "ArtifactPath, nullcontext, ExekallTaggable, memoized from lisa.assets import ASSETS_PATH from lisa.conf", "name to list of task names that we can't afford", "be appended to the given ``name``. If ``name`` is None,", "as x: yield x @contextlib.contextmanager def disable_idle_states(self): \"\"\" Context manager", "or {} for param, settings in params.items(): parser.add_argument(f'--{param}', **settings) custom_params", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "err if err is not None else ValueError('No exception was", "if isinstance(var_dct, Mapping) for name, val in var_dct.items() } funcs[name]", "kwargs['devlib_platform'] = devlib_platform cls.check_init_param(**kwargs) return cls(**kwargs) @classmethod def from_default_conf(cls): \"\"\"", "failure will # not prevent from connecting to the target.", "read_output(path): with tempfile.TemporaryDirectory() as d: name = os.path.join(d, 'out') self.pull(path,", "# and no specific value should be expected for a", "= password elif kind == 'host': devlib_target_cls = devlib.LocalLinuxTarget #", "can be relocated as the caller wants it res_dir =", "f.__module__ == module ) def add_func(f, name): # Disallow decorated", "must be specified') custom_args = { param: value for param,", "it. {configurable_params} :param devlib_platform: Instance of :class:`devlib.platform.Platform` to use to", "inspect.getclosurevars(f) if isinstance(var_dct, Mapping) for name, val in var_dct.items() }", "inspect.getsourcelines(f) # Remove decorators, as they are either undefined or", "target_conf.add_src('command-line', { k: v for k, v in vars(args).items() if", "applies to Android kind.\") device_group.add_argument(\"--host\", \"-n\", help=\"The hostname/IP of the", "'Target': \"\"\" Same as :meth:`from_custom_cli` without the custom parameters capabilities.", "else: exclude = copy.copy(self.CRITICAL_TASKS[self.target.os]) # Do not freeze the process", "YAML tags, so TargetConf files can # be parsed and", "platform_info = PlatformInfo.from_yaml_map(args.conf) # Load the TargetConf from the file,", "states for all domains') try: cpuidle = self.cpuidle except AttributeError:", "let # sysmted-journald or systemd-timesyncd running. 'systemd[^-]', 'dbus', 'sh', 'ssh',", "else ValueError('No exception was raised or value returned by the", "code execution. \"\"\" conf = TargetConf.from_yaml_map(path) try: plat_info = PlatformInfo.from_yaml_map(path)", "pickled modules = { name: mod for name, mod in", "itemgetter import devlib from devlib.exception import TargetStableError from devlib.utils.misc import", "# sysmted-journald or systemd-timesyncd running. 'systemd[^-]', 'dbus', 'sh', 'ssh', 'rsyslogd',", "{e}') plat_info = None return cls.from_conf(conf=conf, plat_info=plat_info) @classmethod # Keep", "self._res_dir relative = '' return self._get_res_dir( root=root, relative=relative, name=name, append_time=append_time,", "folder already exists. In that case, # append_time should be", "else: @contextlib.contextmanager def cm(): try: for cpu in range(self.plat_info['cpus-count']): cpuidle.disable_all(cpu)", "None]), KeyDesc('username', 'SSH username. On ADB connections, \"root\" username will", "raise def __dir__(self): \"\"\" List our attributes plus the ones", "+ ' ' * 4), modules=modules, out_tempfiles=repr(out_tempfiles), globals=repr(pickle.dumps(global_vars)), kwargs=', '.join(", "KeyDesc('username', 'SSH username. On ADB connections, \"root\" username will root", "Target. If no directory is specified, a default location under", "when using :meth:`freeze_userspace`. \"\"\" CONF_CLASS = TargetConf INIT_KWARGS_KEY_MAP = {", "cls.from_conf(conf=conf, plat_info=plat_info) @classmethod # Keep the signature without *args and", "# There may be a race condition with another tool", "the given ``name``. If ``name`` is None, the directory name", "check to get better error messages if 'kind' not in", "# argument computed at runtime. # Note: lisa.target.Gem5SimulationPlatformWrapper should be", "machine hard reboot will be required if isinstance(self.target, devlib.LocalLinuxTarget): exclude.append(str(os.getpid()))", "original # one we were passed when adding the target", "system_platform = system['platform'] # Get gem5 binary arguments simulator_args.append('--listener-mode=on') simulator_args.append(system_platform['description'])", "if tools: logger.info(f'Tools to install: {tools}') self.install_tools(tools) # Autodetect information", "_DEVLIB_AVAILABLE_MODULES: raise ValueError(f'\"{module}\" is not a devlib module') try: getattr(self,", "the conf file with contextlib.suppress(KeyError, ValueError): platform_info = PlatformInfo.from_yaml_map(args.conf) #", "modules = { name: mod for name, mod in global_vars.items()", "finally: logger.info('Un-freezing userspace tasks') self.cgroups.freeze(thaw=True) with cm() as x: yield", "'linux': devlib_target_cls = devlib.LinuxTarget conn_settings.update( username=resolved_username, port=port or self.SSH_PORT_DEFAULT, host=host,", "'Name of the class to use', [str]), KeyDesc('args', 'Keyword arguments", "either undefined or just were used to # feed the", "Compute base installation path logger.info(f'Creating result directory: {res_dir}') # It", "'rb') as f: return pickle.loads(f.read()) def parse_output(paths, err): val, excep", ":exc:`NameError` will be raised. .. attention:: Decorators are ignored and", "with cm() as x: yield x def get_tags(self): return {'board':", "it can lead to abritrary code execution. \"\"\" conf =", "None, the directory name will be the current datetime. :type", "let you do that. Messing with cgroups in a systemd", "If it was not in the loadable list, it #", "are not a key in TargetConf must be listed here", "f in global_vars.items(): if can_include(f): add_func(f, f_name) code_str += '\\n'", "when looking them up. .. note:: Devlib modules are loaded", "not_target_conf_opt.update(custom_params) args = parser.parse_args(argv) setup_logging(level=args.log_level.upper()) target_conf = TargetConf() platform_info =", "the board will crash if this is frozen # for", "with cgroups in a systemd # system is pretty bad", "harmless as they would shadow any global name # anyway,", "is ``None``. :type argv: list(str) :param params: Dictionary of custom", "will freeze to death and a machine hard reboot will", "2.0 (the \"License\"); you may # not use this file", "will be used.\") params = params or {} for param,", "{system['dtb']}\", f\"--disk-image {system['disk']}\" )) diod_path = which('diod') if diod_path is", "= PlatformInfo.from_yaml_map(args.conf) # Load the TargetConf from the file, and", "( KeyDesc('class', 'Name of the class to use', [str]), KeyDesc('args',", "'excluded-modules'], 'devlib_file_xfer': ['devlib', 'file-xfer'], 'wait_boot': ['wait-boot', 'enable'], 'wait_boot_timeout': ['wait-boot', 'timeout'],", "on the target and that this module is in scope.", "try to freeze it. 'thermal-engine', # Similar issue with HiKey960,", "will root adb upon target connection', [str, None]), PasswordKeyDesc('password', 'SSH", "nullcontext, ExekallTaggable, memoized from lisa.assets import ASSETS_PATH from lisa.conf import", "files as it can lead to abritrary code execution. \"\"\".format(", "created before we list them import_all_submodules(devlib.module) _DEVLIB_AVAILABLE_MODULES = { cls.name", "var_dct.items() } funcs[name] = (f, cls._get_code(f)[1]) for _name, _f in", "target, after the Target is # initialized. Expensive computations are", "line.strip().startswith('@') ] code_str = textwrap.dedent(''.join(lines)) name = f.__name__ return (name,", "Messing with cgroups in a systemd # system is pretty", "tuple ``(args, target)`` .. note:: This method should not be", "res_dir = ArtifactPath(root, os.path.join(relative, name)) # Compute base installation path", "= set(devlib_excluded_modules) # Sorry, can't let you do that. Messing", "# is updated or so. We only inline local things", "by applicable law or agreed to in writing, software #", "copy of the PlatformInfo so we don't modify the original", "nullcontext else: exclude = copy.copy(self.CRITICAL_TASKS[self.target.os]) # Do not freeze the", "[ line for line in lines if not line.strip().startswith('@') ]", "Takes precedence over \"host\"', [str, None]), KeyDesc('keyfile', 'SSH private key", "plat_info = copy.copy(plat_info) logger.info(f'User-defined platform information:\\n{plat_info}') self.plat_info = plat_info #", "precedence over \"host\"', [str, None]), KeyDesc('keyfile', 'SSH private key file',", "@contextlib.contextmanager def freeze_userspace(self): \"\"\" Context manager that lets you freeze", "the link with contextlib.suppress(FileExistsError): os.symlink(res_dir, res_lnk) return res_dir def install_tools(self,", "a dictionary. The top-level `target-conf` key is not needed here:", "= argparse.ArgumentParser( formatter_class=argparse.RawDescriptionHelpFormatter, description=textwrap.dedent( \"\"\" Connect to a target using", "symlink=symlink, ) def _get_res_dir(self, root, relative, name, append_time, symlink): logger", "to execute remotely using :meth:`execute_python`:: target = Target(...) @target.remote_func(timeout=42) def", "} def can_include(f): return ( isinstance(f, FunctionType) and # Only", "a Target from command line arguments. :param argv: The list", "a special case:: target-conf: target-conf: name: myboard \"\"\" STRUCTURE =", "Android, we need just (eventually) the device if kind ==", "functions defined in the same module so that: # 1.", "on # external callable we cannot control if hasattr(f, '__wrapped__'):", "provided positional and keyword arguments. The return value or any", ":class:`Target` \"\"\" _, target = cls.from_custom_cli(argv=argv, params=params) return target @classmethod", "that will spawn the Python interpreter on the target ..", "functions that are referred to will be: * bundled in", "disable all idle states \"\"\" logger = self.get_logger() logger.info('Disabling idle", "loadable list, it # has been excluded explicitly elif attr", "If the time is used in the name, there is", "= 22 CRITICAL_TASKS = { 'linux': [ 'init', # We", "is not a devlib module') try: getattr(self, module) except Exception:", "'Equivalent to StrictHostKeyChecking option of OpenSSH', [bool, None]), KeyDesc('workdir', 'Remote", "interpreter on the target \"\"\" def wrapper_param(f): @functools.wraps(f) def wrapper(*f_args,", "it fails to load. \"\"\" if module not in _DEVLIB_AVAILABLE_MODULES:", "are not interested in and getattr(cls, 'stage') != 'early' )", "if _f is not f and can_include(_f): add_func(_f, _name) modules.update(", "loaded') cm = nullcontext else: @contextlib.contextmanager def cm(): try: for", "``name`` is None, the directory name will be the current", "the boot check', [bool]), KeyDesc('timeout', 'Timeout of the boot check',", "name, code_str = self._get_code(f) def mktemp(): return self.execute( f'mktemp -p", "'username', 'password']: if required not in target_conf: parser.error(f'--{required} must be", "(C) 2018, ARM Limited and contributors. # # Licensed under", "if v is not None and k not in not_target_conf_opt", "and 'scp', defaults to sftp. if devlib_file_xfer and devlib_file_xfer not", "a key in TargetConf must be listed here not_target_conf_opt =", "return pickle.loads(f.read()) def parse_output(paths, err): val, excep = paths try:", "# Workaround for ARM-software/devlib#225 workdir = workdir or '/data/local/tmp/devlib-target' if", "file is allowed and will work: * file foo.yml:: target-conf:", "of user code. :type plat_info: lisa.platforms.platinfo.PlatformInfo You need to provide", "messing up with ``devlib`` internal members. \"\"\" ADB_PORT_DEFAULT = 5555", "the test config 'tools' field :param tools: The list of", "list of task names that we can't afford to freeze", "target, and the devlib modules that could be loaded on-demand.", "tools to deploy if tools: logger.info(f'Tools to install: {tools}') self.install_tools(tools)", "parameters capabilities. :return: A connected :class:`Target` \"\"\" _, target =", "configuration so it becomes # available for later inspection. That", "automated environment. :param name: Name of the results directory :type", "it. 'thermal-engine', # Similar issue with HiKey960, the board will", "# are 'sftp' and 'scp', defaults to sftp. if devlib_file_xfer", "def wrapper(): {modules} {code} return {f}({kwargs}) try: out = eval(wrapper.__code__,", "{})) # Hack for Gem5 devlib Platform, that requires a", "root=os.path.join(LISA_HOME, RESULT_DIR), relative='', name=f'{self.__class__.__qualname__}-{self.name}', append_time=True, symlink=True ) self._res_dir = res_dir", "= res_dir if res_dir else self._get_res_dir( root=os.path.join(LISA_HOME, RESULT_DIR), relative='', name=f'{self.__class__.__qualname__}-{self.name}',", "of the created Target. If no directory is specified, a", "['python3', '-c', snippet] cmd = ' '.join(map(shlex.quote, cmd)) try: self.execute(cmd,", "fails to load. \"\"\" if module not in _DEVLIB_AVAILABLE_MODULES: raise", "snippet = self._make_remote_snippet( name=name, code_str=code_str, module=f.__module__, kwargs=kwargs, global_vars={ **closure_vars.globals, **closure_vars.nonlocals,", "out if it fails to load. \"\"\" if module not", "connection information: $ {script} --conf my_target.yml Alternatively, --kind must be", "######################################################################## target = devlib_target_cls( platform=devlib_platform, load_default_modules=False, connection_settings=conn_settings, working_directory=workdir, connect=False, )", "_f in closure_vars.items(): if _f is not f and can_include(_f):", "== 'root') elif kind == 'linux': devlib_target_cls = devlib.LinuxTarget conn_settings.update(", "``results_latest`` to the newly created results directory :type symlink: bool", "= conf['devlib']['platform'] devlib_platform_cls = resolve_dotted_name(devlib_platform_conf['class']) devlib_platform_kwargs = copy.copy(devlib_platform_conf.get('args', {})) #", "that this module is in scope. If that is not", "(default) or \"scp\". (Only valid for linux targets)', [TypedList[str]]), ))", "the parameters inside the wrapper's globals so that it can", "custom_args = { param: value for param, value in vars(args).items()", "in vars(args).items() if param in custom_params } custom_args = argparse.Namespace(**custom_args)", "the :class:`devlib.target.Target` :type devlib_platform: devlib.platform.Platform :param plat_info: Platform information attached", "one to benefit from mapping configuration if issubclass(devlib_platform_cls, Gem5SimulationPlatform): devlib_platform_kwargs.setdefault('host_output_dir',", "devlib Target object ######################################################################## target = devlib_target_cls( platform=devlib_platform, load_default_modules=False, connection_settings=conn_settings,", "}}) Or alternatively, from a YAML configuration file: Content of", "self.get_logger() if not self.is_rooted: logger.warning('Could not freeze userspace: target is", "Keep the signature without *args and **kwargs so that it's", "applicable law or agreed to in writing, software # distributed", "system['platform'] # Get gem5 binary arguments simulator_args.append('--listener-mode=on') simulator_args.append(system_platform['description']) simulator_args.extend(system_platform.get('args', []))", "approach. \"\"\" logger = self.get_logger() if not self.is_rooted: logger.warning('Could not", "logger.info('Re-enabling idle states for all domains') for cpu in range(self.plat_info['cpus-count']):", "parameters inside the wrapper's globals so that it can #", "literalinclude:: ../target_conf.yml :language: YAML :: TargetConf.from_yaml_map('target_conf.yml') The following special YAML", "raise ValueError(f'Unsupported platform type {kind}') settings = '\\n '.join( f'", "can lead to abritrary code execution. .. note:: That structure", "function ``f`` with the provided positional and keyword arguments. The", "ArgumentParser.add_argument() options}}``. :type params: dict(str, dict) :return: A tuple ``(args,", "add_func(_f, _name) modules.update( (name, mod) for name, mod in closure_vars.items()", "subclass specification', ( KeyDesc('class', 'Name of the class to use',", "_get_code(f): lines, _ = inspect.getsourcelines(f) # Remove decorators, as they", "sys def wrapper(): {modules} {code} return {f}({kwargs}) try: out =", "the target', [TypedList[str]]), KeyDesc('lazy-platinfo', 'Lazily autodect the platform information to", "and will work: * file foo.yml:: target-conf: name: myboard *", "# Tentatively load a PlatformInfo from the conf file with", "board name from the target configuration so it becomes #", "explicitly set to \"root\", root the target as # early", "a :class:`TargetConf` and :class:`lisa.platforms.platinfo.PlatformInfo` instances. .. note:: Only load trusted", "as x: yield x def get_tags(self): return {'board': self.name} @classmethod", "the target .. note:: Closure variables are supported, but mutating", "WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express", "import sys import argparse import textwrap import functools import inspect", "transparently val = foo(1, y=2) :Variable keyword arguments: Forwarded to", "assuming it comes from a module that is installed on", "'scp', defaults to sftp. if devlib_file_xfer and devlib_file_xfer not in", "specified') custom_args = { param: value for param, value in", "lisa.conf import SimpleMultiSrcConf, KeyDesc, LevelKeyDesc, TopLevelKeyDesc,Configurable from lisa.generic import TypedList", "def freeze_userspace(self): \"\"\" Context manager that lets you freeze the", "tools to install on the target', [TypedList[str]]), KeyDesc('lazy-platinfo', 'Lazily autodect", "_ = inspect.getsourcelines(f) # Remove decorators, as they are either", "modules are loaded on demand when accessed. \"\"\" def get():", "name: str :param append_time: If True, the current datetime will", "finish booting', ( KeyDesc('enable', 'Enable the boot check', [bool]), KeyDesc('timeout',", "import os import os.path import contextlib import shlex from collections.abc", "as it can lead to abritrary code execution. \"\"\" path", "'android': [ 'sh', 'adbd', 'usb', 'transport', # We don't actually", "\"root\", root the target as # early as possible conn_settings['adb_as_root']", "appended to the given ``name``. If ``name`` is None, the", "should not rely on that as the created folder will", "name = time_str elif append_time: name = f\"{name}-{time_str}\" # If", "not in target_conf) and ('device' not in target_conf): parser.error('--host or", "use any python-specific YAML tags, so TargetConf files can #", "a warning is logged but no exception is raised, so", "need just (eventually) the device if kind == 'android': devlib_target_cls", "devlib from devlib.exception import TargetStableError from devlib.utils.misc import which from", "disable=broad-except cls.get_logger().warning(f'No platform information could be found: {e}') plat_info =", "so the cgroup state gets stuck in FREEZING if we", "Inject the parameters inside the wrapper's globals so that it", "loaded on demand when accessed. \"\"\" def get(): return getattr(self.target,", "cgroup controller not available on the target') cm = nullcontext", "= nullcontext else: exclude = copy.copy(self.CRITICAL_TASKS[self.target.os]) # Do not freeze", "f' {key}: {val}' for key, val in conn_settings.items() if key", "# You may obtain a copy of the License at", "load cgroups devlib module: target is using systemd, which already", "_DEVLIB_AVAILABLE_MODULES - devlib_excluded_modules # Initialize binary tools to deploy if", "of the configuration file devlib_platform_conf = conf['devlib']['platform'] devlib_platform_cls = resolve_dotted_name(devlib_platform_conf['class'])", "return self._get_res_dir( root=root, relative=relative, name=name, append_time=append_time, symlink=symlink, ) def _get_res_dir(self,", "name=name, append_time=append_time, symlink=symlink, ) def _get_res_dir(self, root, relative, name, append_time,", "environment variable. .. note:: Only load trusted YAML files as", "if args.kind == 'linux': for required in ['host', 'username', 'password']:", "hostname/IP of the target.\") parser.add_argument(\"--username\", \"-u\", help=\"Login username. Only applies", "not forward special methods like __str__, since the interpreter bypasses", "script @staticmethod def _get_code(f): lines, _ = inspect.getsourcelines(f) # Remove", "set to \"root\", root the target as # early as", "\"-n\", help=\"The hostname/IP of the target.\") parser.add_argument(\"--username\", \"-u\", help=\"Login username.", "to abritrary code execution. \"\"\".format( script=os.path.basename(sys.argv[0]) ))) parser.add_argument(\"--conf\", '-c', help=\"Path", "no name clash risk # 2. we don't inline the", "created automatically virtio_args = [ f'--which-diod={diod_path}', '--workload-automation-vio={}', ] simulator_args.extend(virtio_args) #", "self.plat_info.add_src('target-conf', dict(name=name)) # Determine file transfer method. Currently avaliable options", "Note: lisa.target.Gem5SimulationPlatformWrapper should be used instead # of the original", "on Google Pixel it apparently # cannot be frozen, so", "\"\"\" Context manager that lets you disable all idle states", "'.join(sorted(modules))}\" else: modules = '' script = textwrap.dedent(''' import pickle", "de-freezing, otherwise we # will freeze to death and a", "'Lazily autodect the platform information to speed up the connection',", "tools additional to those specified in the test config 'tools'", ")) )) DEFAULT_SRC = { 'devlib': { 'platform': { 'class':", "board might be named \"foo-bar-juno-on-my-desk\") if name: self.plat_info.add_src('target-conf', dict(name=name)) #", "caller. :Variable keyword arguments: Forwarded to :meth:`execute` that will spawn", "does not exist ... else: raise def __dir__(self): \"\"\" List", "cls.from_conf(conf=target_conf, plat_info=platform_info, res_dir=args.res_dir) def _init_target(self, kind, name, workdir, device, host,", "we don't inline the whole world, which could lead to", "states for all domains') for cpu in range(self.plat_info['cpus-count']): cpuidle.enable_all(cpu) with", "transfer method: {devlib_file_xfer}') use_scp = devlib_file_xfer == 'scp' self._installed_tools =", "the folder already exists. In that case, # append_time should", "Target(...) @target.remote_func(timeout=42) def foo(x, y): return x + y #", "listed here not_target_conf_opt = { 'platform_info', 'log_level', 'res_dir', 'conf', }", "must be listed here not_target_conf_opt = { 'platform_info', 'log_level', 'res_dir',", "will be attempted, but failure will # not prevent from", "'android': if ('host' not in target_conf) and ('device' not in", "return wrapper_param class Gem5SimulationPlatformWrapper(Gem5SimulationPlatform): def __init__(self, system, simulator, **kwargs): simulator_args", "binary = os.path.join(ASSETS_PATH, 'binaries', 'scripts', tool) return binary tools =", "freeze_userspace(self): \"\"\" Context manager that lets you freeze the userspace.", "must be empty: {self._res_dir}') if plat_info is None: plat_info =", "conn_settings['adb_as_root'] = (username == 'root') elif kind == 'linux': devlib_target_cls", "help=\"Result directory of the created Target. If no directory is", "in custom_params } custom_args = argparse.Namespace(**custom_args) return custom_args, cls.from_conf(conf=target_conf, plat_info=platform_info,", "host=host, port=port, username=username, password=password, keyfile=keyfile, strict_host_check=strict_host_check, use_scp=use_scp, devlib_platform=devlib_platform, wait_boot=wait_boot, wait_boot_timeout=wait_boot_timeout,", "dict(name=name)) # Determine file transfer method. Currently avaliable options #", "governing permissions and # limitations under the License. # from", "module # is updated or so. We only inline local", ":param argv: The list of arguments. ``sys.argv[1:]`` will be used", "root --password <PASSWORD> In both cases, --conf can also contain", "it can lead to abritrary code execution. \"\"\".format( script=os.path.basename(sys.argv[0]) )))", "read the exception, raise the initial one # from devlib", "target configuration file with all the necessary connection information: $", "a :exc:`NameError` will be raised. .. attention:: Decorators are ignored", "gem5_bin=simulator['bin'], **kwargs ) # vim :set tabstop=4 shiftwidth=4 expandtab textwidth=80", "any python-specific YAML tags, so TargetConf files can # be", "name: myboard * file bar.yml:: target-conf: !include foo.yml This will", "for a in simulator_args) super().__init__( gem5_args=gem5_args, gem5_bin=simulator['bin'], **kwargs ) #", "available. :returns: ``True`` if module is available, ``False`` otherwise. :param", "to list of task names that we can't afford to", "under the hood, and avoids messing up with ``devlib`` internal", "= keyfile else: conn_settings['password'] = password elif kind == 'host':", "= devlib_target_cls( platform=devlib_platform, load_default_modules=False, connection_settings=conn_settings, working_directory=workdir, connect=False, ) target.connect(check_boot_completed=wait_boot, timeout=wait_boot_timeout)", "argparse import textwrap import functools import inspect import pickle import", "external callable we cannot control if hasattr(f, '__wrapped__'): raise TypeError('Decorated", "target to connect to.\") device_group = parser.add_mutually_exclusive_group() device_group.add_argument(\"--device\", \"-d\", help=\"The", "self._devlib_loadable_modules: self.get_logger().info(f'Loading target devlib module {attr}') self.target.install_module(attr) return get() #", "scripts. Tests should not rely on that as the created", "target devlib module {attr}') self.target.install_module(attr) return get() # If it", "to add to the parser. It is in the form", "= devlib_file_xfer == 'scp' self._installed_tools = set() self.target = self._init_target(", "update it with command # line arguments try: conf =", "The top-level `target-conf` key is not needed here: .. code-block::", "it's more designed for quick scripting. \"\"\" parser = argparse.ArgumentParser(", "inspect import pickle import tempfile from types import ModuleType, FunctionType", "target module {attr} was explicitly excluded, not loading it') #", "from devlib.exception import TargetStableError from devlib.utils.misc import which from devlib.platform.gem5", "that requires a \"host_output_dir\" # argument computed at runtime. #", "conf = TargetConf.from_yaml_map(path) try: plat_info = PlatformInfo.from_yaml_map(path) except Exception as", "in vars(args).items() if v is not None and k not", "# It will fail if the folder already exists. In", "# try to freeze it. 'thermal-engine', # Similar issue with", "be used to provide a :class:`TargetConf` and :class:`lisa.platforms.platinfo.PlatformInfo` instances. ..", "test. EXAMPLES --conf can point to a YAML target configuration", "foo(x, y): return x + y # Execute the function", "../target_conf.yml :language: YAML :: TargetConf.from_yaml_map('target_conf.yml') The following special YAML tags", "sanity check to get better error messages if 'kind' not", "paths try: return read_output(val) # If the file is empty,", "If that is not the case, a :exc:`NameError` will be", "{} for param, settings in params.items(): parser.add_argument(f'--{param}', **settings) custom_params =", "if we # try to freeze it. 'thermal-engine', # Similar", "ValueError(f'Unsupported platform type {kind}') settings = '\\n '.join( f' {key}:", "are in # direct control f.__module__ == module ) def", "inlining of functions defined in the same module so that:", "Determine file transfer method. Currently avaliable options # are 'sftp'", "global_vars.items() if isinstance(mod, ModuleType) } def can_include(f): return ( isinstance(f,", "['host', 'username', 'password']: if required not in target_conf: parser.error(f'--{required} must", "If we were given an ArtifactPath with an existing root,", "target = cls.from_custom_cli(argv=argv, params=params) return target @classmethod def from_custom_cli(cls, argv=None,", "= textwrap.dedent(''.join(lines)) name = f.__name__ return (name, code_str) def execute_python(self,", "return value or any exception is pickled back and is", "with contextlib.suppress(FileNotFoundError): os.remove(res_lnk) # There may be a race condition", "os.path.join(d, 'out') self.pull(path, name) with open(name, 'rb') as f: return", "devlib_excluded_modules.add('cgroups') self._devlib_loadable_modules = _DEVLIB_AVAILABLE_MODULES - devlib_excluded_modules # Initialize binary tools", "from collections.abc import Mapping import copy import sys import argparse", "YAML configuration # to not use any python-specific YAML tags,", "results will be lost in some automated environment. :param name:", "not devlib_platform: devlib_platform = devlib.platform.Platform() ######################################################################## # Create devlib Target", "Forwarded to :meth:`execute` that will spawn the Python interpreter on", "params=None): \"\"\" Create a Target from command line arguments. :param", "password', [str, None]), KeyDesc('port', 'SSH or ADB server port', [int,", "manager that lets you freeze the userspace. .. note:: A", "nullcontext else: controllers = [s.name for s in self.cgroups.list_subsystems()] if", "Make a copy of the PlatformInfo so we don't modify", "kind of target to connect to.\") device_group = parser.add_mutually_exclusive_group() device_group.add_argument(\"--device\",", "lisa.utils import Loggable, HideExekallID, resolve_dotted_name, get_subclasses, import_all_submodules, LISA_HOME, RESULT_DIR, LATEST_LINK,", "if the relevant features aren't needed. .. note:: The wrapping", "getattr(self.target, attr) try: return get() except AttributeError: # Load the", "'timeout'], } def __init__(self, kind, name='<noname>', tools=[], res_dir=None, plat_info=None, lazy_platinfo=False,", "issue with HiKey960, the board will crash if this is", "class to use', [str]), KeyDesc('args', 'Keyword arguments to build the", "= set(modules.keys()) | set(funcs.keys()) global_vars = { name: val for", "from connecting to the target. if kind == 'android' and", "It's harmless as they would shadow any global name #", "f'mktemp -p {shlex.quote(self.working_directory)}' ).strip() def read_output(path): with tempfile.TemporaryDirectory() as d:", "as d: name = os.path.join(d, 'out') self.pull(path, name) with open(name,", "will spawn the Python interpreter on the target \"\"\" def", "name=f'{self.__class__.__qualname__}-{self.name}', append_time=True, symlink=True ) self._res_dir = res_dir os.makedirs(self._res_dir, exist_ok=True) if", "# has been excluded explicitly elif attr in _DEVLIB_AVAILABLE_MODULES: #", "# feed the function to us lines = [ line", "'Keyword arguments to build the Platform object', [Mapping]), )), KeyDesc('excluded-modules',", "logger = self.get_logger() while True: time_str = datetime.now().strftime('%Y%m%d_%H%M%S.%f') if not", "global_vars = { name: val for name, val in global_vars.items()", "lead to abritrary code execution. .. note:: That structure in", "devlib.LinuxTarget conn_settings.update( username=resolved_username, port=port or self.SSH_PORT_DEFAULT, host=host, strict_host_check=True if strict_host_check", "# Make a copy of the PlatformInfo so we don't", "install location and keep # that in _installed_tools, so we", "# 1. there is no name clash risk # 2.", "None as username means adb root will be attempted, but", ":param append_time: If True, the current datetime will be appended", "password=password, keyfile=keyfile, strict_host_check=strict_host_check, use_scp=use_scp, devlib_platform=devlib_platform, wait_boot=wait_boot, wait_boot_timeout=wait_boot_timeout, ) devlib_excluded_modules =", "conn_settings = {} resolved_username = username or 'root' logger.debug(f'Setting up", "[])) simulator_args.extend(( f\"--kernel {system['kernel']}\", f\"--dtb {system['dtb']}\", f\"--disk-image {system['disk']}\" )) diod_path", "the given devlib module is available. :returns: ``True`` if module", "'\\n '.join( f' {key}: {val}' for key, val in conn_settings.items()", "they will only be # computed when actually needed. rta_calib_res_dir", "devlib_platform = devlib_platform_cls(**devlib_platform_kwargs) kwargs['devlib_platform'] = devlib_platform cls.check_init_param(**kwargs) return cls(**kwargs) @classmethod", "continue else: raise if symlink: res_lnk = os.path.join(LISA_HOME, LATEST_LINK) with", "settings. Only keys defined below are allowed, with the given", "under the License. # from datetime import datetime import os", "we were given an ArtifactPath with an existing root, we", "target configuration so it becomes # available for later inspection.", "f_name) code_str += '\\n' + '\\n'.join(map(itemgetter(1), funcs.values())) non_pickled = set(modules.keys())", "os.makedirs(rta_calib_res_dir) self.plat_info.add_target_src(self, rta_calib_res_dir, deferred=lazy_platinfo, fallback=True) logger.info(f'Effective platform information:\\n{self.plat_info}') @property @memoized", "the given Python function ``f`` with the provided positional and", "wait_boot=wait_boot, wait_boot_timeout=wait_boot_timeout, ) devlib_excluded_modules = set(devlib_excluded_modules) # Sorry, can't let", "class used under the hood, and avoids messing up with", "wrapper's globals so that it can # access them. It's", "of target_conf.yml: .. literalinclude:: ../target_conf.yml :language: YAML :: TargetConf.from_yaml_map('target_conf.yml') The", "TargetStableError: return False else: return True def is_module_available(self, module): \"\"\"", "forward special methods like __str__, since the interpreter bypasses __getattr__", "in global_vars.items() if name not in non_pickled } if modules:", "import contextlib import shlex from collections.abc import Mapping import copy" ]
[ "GetNodeInfoCommand(FilterCommand): \"\"\" Executes `getNodeInfo` command. See :py:meth:`iota.api.StrictIota.get_node_info`. \"\"\" command =", "] class GetNodeInfoCommand(FilterCommand): \"\"\" Executes `getNodeInfo` command. See :py:meth:`iota.api.StrictIota.get_node_info`. \"\"\"", "GetNodeInfoRequestFilter() def get_response_filter(self): return GetNodeInfoResponseFilter() class GetNodeInfoRequestFilter(RequestFilter): def __init__(self) ->", "that the request is empty. super(GetNodeInfoRequestFilter, self).__init__({}) class GetNodeInfoResponseFilter(ResponseFilter): def", "import filters as f from iota import TransactionHash, Address from", "enforce that the request is empty. super(GetNodeInfoRequestFilter, self).__init__({}) class GetNodeInfoResponseFilter(ResponseFilter):", "'getNodeInfo' def get_request_filter(self): return GetNodeInfoRequestFilter() def get_response_filter(self): return GetNodeInfoResponseFilter() class", "super(GetNodeInfoRequestFilter, self).__init__({}) class GetNodeInfoResponseFilter(ResponseFilter): def __init__(self) -> None: super(GetNodeInfoResponseFilter, self).__init__({", ":py:meth:`iota.api.StrictIota.get_node_info`. \"\"\" command = 'getNodeInfo' def get_request_filter(self): return GetNodeInfoRequestFilter() def", "None: # ``getNodeInfo`` does not accept any parameters. # Using", "return GetNodeInfoRequestFilter() def get_response_filter(self): return GetNodeInfoResponseFilter() class GetNodeInfoRequestFilter(RequestFilter): def __init__(self)", "def __init__(self) -> None: # ``getNodeInfo`` does not accept any", "| Trytes(Address), 'latestMilestone': f.ByteString(encoding='ascii') | Trytes(TransactionHash), 'latestSolidSubtangleMilestone': f.ByteString(encoding='ascii') | Trytes(TransactionHash),", "Using a filter here just to enforce that the request", "import TransactionHash, Address from iota.commands import FilterCommand, RequestFilter, ResponseFilter from", "= 'getNodeInfo' def get_request_filter(self): return GetNodeInfoRequestFilter() def get_response_filter(self): return GetNodeInfoResponseFilter()", "\"\"\" command = 'getNodeInfo' def get_request_filter(self): return GetNodeInfoRequestFilter() def get_response_filter(self):", "not accept any parameters. # Using a filter here just", "empty. super(GetNodeInfoRequestFilter, self).__init__({}) class GetNodeInfoResponseFilter(ResponseFilter): def __init__(self) -> None: super(GetNodeInfoResponseFilter,", "__all__ = [ 'GetNodeInfoCommand', ] class GetNodeInfoCommand(FilterCommand): \"\"\" Executes `getNodeInfo`", "is empty. super(GetNodeInfoRequestFilter, self).__init__({}) class GetNodeInfoResponseFilter(ResponseFilter): def __init__(self) -> None:", "iota.commands import FilterCommand, RequestFilter, ResponseFilter from iota.filters import Trytes __all__", "self).__init__({}) class GetNodeInfoResponseFilter(ResponseFilter): def __init__(self) -> None: super(GetNodeInfoResponseFilter, self).__init__({ 'coordinatorAddress':", "GetNodeInfoRequestFilter(RequestFilter): def __init__(self) -> None: # ``getNodeInfo`` does not accept", "self).__init__({ 'coordinatorAddress': f.ByteString(encoding='ascii') | Trytes(Address), 'latestMilestone': f.ByteString(encoding='ascii') | Trytes(TransactionHash), 'latestSolidSubtangleMilestone':", "Address from iota.commands import FilterCommand, RequestFilter, ResponseFilter from iota.filters import", "GetNodeInfoResponseFilter(ResponseFilter): def __init__(self) -> None: super(GetNodeInfoResponseFilter, self).__init__({ 'coordinatorAddress': f.ByteString(encoding='ascii') |", "request is empty. super(GetNodeInfoRequestFilter, self).__init__({}) class GetNodeInfoResponseFilter(ResponseFilter): def __init__(self) ->", "import FilterCommand, RequestFilter, ResponseFilter from iota.filters import Trytes __all__ =", "to enforce that the request is empty. super(GetNodeInfoRequestFilter, self).__init__({}) class", "__init__(self) -> None: super(GetNodeInfoResponseFilter, self).__init__({ 'coordinatorAddress': f.ByteString(encoding='ascii') | Trytes(Address), 'latestMilestone':", "get_request_filter(self): return GetNodeInfoRequestFilter() def get_response_filter(self): return GetNodeInfoResponseFilter() class GetNodeInfoRequestFilter(RequestFilter): def", "accept any parameters. # Using a filter here just to", "from iota.filters import Trytes __all__ = [ 'GetNodeInfoCommand', ] class", "class GetNodeInfoRequestFilter(RequestFilter): def __init__(self) -> None: # ``getNodeInfo`` does not", "just to enforce that the request is empty. super(GetNodeInfoRequestFilter, self).__init__({})", "class GetNodeInfoResponseFilter(ResponseFilter): def __init__(self) -> None: super(GetNodeInfoResponseFilter, self).__init__({ 'coordinatorAddress': f.ByteString(encoding='ascii')", "a filter here just to enforce that the request is", "f from iota import TransactionHash, Address from iota.commands import FilterCommand,", "RequestFilter, ResponseFilter from iota.filters import Trytes __all__ = [ 'GetNodeInfoCommand',", "# Using a filter here just to enforce that the", "[ 'GetNodeInfoCommand', ] class GetNodeInfoCommand(FilterCommand): \"\"\" Executes `getNodeInfo` command. See", "def __init__(self) -> None: super(GetNodeInfoResponseFilter, self).__init__({ 'coordinatorAddress': f.ByteString(encoding='ascii') | Trytes(Address),", "iota import TransactionHash, Address from iota.commands import FilterCommand, RequestFilter, ResponseFilter", "TransactionHash, Address from iota.commands import FilterCommand, RequestFilter, ResponseFilter from iota.filters", "'GetNodeInfoCommand', ] class GetNodeInfoCommand(FilterCommand): \"\"\" Executes `getNodeInfo` command. See :py:meth:`iota.api.StrictIota.get_node_info`.", "command = 'getNodeInfo' def get_request_filter(self): return GetNodeInfoRequestFilter() def get_response_filter(self): return", "the request is empty. super(GetNodeInfoRequestFilter, self).__init__({}) class GetNodeInfoResponseFilter(ResponseFilter): def __init__(self)", "import Trytes __all__ = [ 'GetNodeInfoCommand', ] class GetNodeInfoCommand(FilterCommand): \"\"\"", "iota.filters import Trytes __all__ = [ 'GetNodeInfoCommand', ] class GetNodeInfoCommand(FilterCommand):", "does not accept any parameters. # Using a filter here", "get_response_filter(self): return GetNodeInfoResponseFilter() class GetNodeInfoRequestFilter(RequestFilter): def __init__(self) -> None: #", "here just to enforce that the request is empty. super(GetNodeInfoRequestFilter,", "filter here just to enforce that the request is empty.", "def get_request_filter(self): return GetNodeInfoRequestFilter() def get_response_filter(self): return GetNodeInfoResponseFilter() class GetNodeInfoRequestFilter(RequestFilter):", "def get_response_filter(self): return GetNodeInfoResponseFilter() class GetNodeInfoRequestFilter(RequestFilter): def __init__(self) -> None:", "# ``getNodeInfo`` does not accept any parameters. # Using a", "from iota import TransactionHash, Address from iota.commands import FilterCommand, RequestFilter,", "-> None: super(GetNodeInfoResponseFilter, self).__init__({ 'coordinatorAddress': f.ByteString(encoding='ascii') | Trytes(Address), 'latestMilestone': f.ByteString(encoding='ascii')", "GetNodeInfoResponseFilter() class GetNodeInfoRequestFilter(RequestFilter): def __init__(self) -> None: # ``getNodeInfo`` does", "None: super(GetNodeInfoResponseFilter, self).__init__({ 'coordinatorAddress': f.ByteString(encoding='ascii') | Trytes(Address), 'latestMilestone': f.ByteString(encoding='ascii') |", "``getNodeInfo`` does not accept any parameters. # Using a filter", "= [ 'GetNodeInfoCommand', ] class GetNodeInfoCommand(FilterCommand): \"\"\" Executes `getNodeInfo` command.", "Executes `getNodeInfo` command. See :py:meth:`iota.api.StrictIota.get_node_info`. \"\"\" command = 'getNodeInfo' def", "`getNodeInfo` command. See :py:meth:`iota.api.StrictIota.get_node_info`. \"\"\" command = 'getNodeInfo' def get_request_filter(self):", "ResponseFilter from iota.filters import Trytes __all__ = [ 'GetNodeInfoCommand', ]", "as f from iota import TransactionHash, Address from iota.commands import", "filters as f from iota import TransactionHash, Address from iota.commands", "f.ByteString(encoding='ascii') | Trytes(Address), 'latestMilestone': f.ByteString(encoding='ascii') | Trytes(TransactionHash), 'latestSolidSubtangleMilestone': f.ByteString(encoding='ascii') |", "command. See :py:meth:`iota.api.StrictIota.get_node_info`. \"\"\" command = 'getNodeInfo' def get_request_filter(self): return", "parameters. # Using a filter here just to enforce that", "\"\"\" Executes `getNodeInfo` command. See :py:meth:`iota.api.StrictIota.get_node_info`. \"\"\" command = 'getNodeInfo'", "__init__(self) -> None: # ``getNodeInfo`` does not accept any parameters.", "See :py:meth:`iota.api.StrictIota.get_node_info`. \"\"\" command = 'getNodeInfo' def get_request_filter(self): return GetNodeInfoRequestFilter()", "-> None: # ``getNodeInfo`` does not accept any parameters. #", "'coordinatorAddress': f.ByteString(encoding='ascii') | Trytes(Address), 'latestMilestone': f.ByteString(encoding='ascii') | Trytes(TransactionHash), 'latestSolidSubtangleMilestone': f.ByteString(encoding='ascii')", "super(GetNodeInfoResponseFilter, self).__init__({ 'coordinatorAddress': f.ByteString(encoding='ascii') | Trytes(Address), 'latestMilestone': f.ByteString(encoding='ascii') | Trytes(TransactionHash),", "FilterCommand, RequestFilter, ResponseFilter from iota.filters import Trytes __all__ = [", "Trytes __all__ = [ 'GetNodeInfoCommand', ] class GetNodeInfoCommand(FilterCommand): \"\"\" Executes", "from iota.commands import FilterCommand, RequestFilter, ResponseFilter from iota.filters import Trytes", "return GetNodeInfoResponseFilter() class GetNodeInfoRequestFilter(RequestFilter): def __init__(self) -> None: # ``getNodeInfo``", "class GetNodeInfoCommand(FilterCommand): \"\"\" Executes `getNodeInfo` command. See :py:meth:`iota.api.StrictIota.get_node_info`. \"\"\" command", "Trytes(Address), 'latestMilestone': f.ByteString(encoding='ascii') | Trytes(TransactionHash), 'latestSolidSubtangleMilestone': f.ByteString(encoding='ascii') | Trytes(TransactionHash), })", "any parameters. # Using a filter here just to enforce" ]
[ "request) if controller.usuarioIniciado() and request.method== 'GET': return controller.mostrarRutinas(mysql, request) else:", "return redirect(url_for('Index')) def registrarEliminacion(mysql, request): if request.method=='POST' and controller.usuarioIniciado(): return", "ejercicios=datosEjer, rutinaEjer=rutinaEjercicios) else: return redirect(url_for('Index')) def registrarModiciaciones(mysql, request): if request.method", "request) else: flash(\"Alguno de los datos es incorrecto\") return redirect(url_for('actualizar_perfil',", "registerForm.validate(): return controller.registraUsuario(mysql, request, registerForm) return render_template('register.html', form=registerForm) def Index(mysql,", "form=actualize_form, datos=datos) return redirect(url_for('perfil')) def administracionRutinas(mysql, request): if controller.usuarioIniciado(): return", "request.method == 'POST' and controller.usuarioIniciado: if actualize_form.validate(): return controller.actualizar_perfil(mysql, request)", "return redirect(url_for('Index')) def registrarEjerciciosRutina(mysql, request): if request.method == 'POST': return", "request.method == 'POST': return controller.registrarModificaciones(mysql, request) return redirect(url_for('adm_rutinas')) def eliminarRutina(mysql,request):", "Index(mysql, request): if request.method=='GET': success= request.args.get('success') if success==None: if controller.usuarioIniciado():", "else: return redirect(url_for('Index')) def historial_usuario(mysql, request): if controller.usuarioIniciado() and request.method==", "and request.method== 'GET': return controller.mostrar_historial_usuario(mysql, request) else: return redirect(url_for('Index')) def", "register(mysql, request): registerForm= forms.RegisterForm(request.form) if request.method == 'POST' and registerForm.validate():", "m_basal= controller.calcular_metabolismo_basal(mysql, usuario[7], usuario[8]) return render_template('perfil.html', success=success, usuario=usuario, imc=imc, evaluacion=functions.evaluarIMC(imc),", "request): if controller.usuarioIniciado(): return render_template('administracion_rutinas.html') else: return redirect(url_for('Index')) def crearRutina(mysql,", "if request.method=='POST' and controller.usuarioIniciado(): return controller.registrarEliminacion(mysql, request) else: return redirect(url_for('Index'))", "if request.method== 'POST': controller.iniciarSesion(mysql, request) if controller.usuarioIniciado() and request.method== 'GET':", "request.method == 'POST': return controller.registrarEjerciciosRutina(mysql, request) return redirect(url_for('adm_rutinas')) def modificarRutina(mysql,", "controller.registrarEjerciciosRutina(mysql, request) return redirect(url_for('adm_rutinas')) def modificarRutina(mysql, request): if controller.usuarioIniciado(): rutinas=controller.rutinasUsuario(mysql)", "return controller.registrarModificaciones(mysql, request) return redirect(url_for('adm_rutinas')) def eliminarRutina(mysql,request): if controller.usuarioIniciado(): rutinas=controller.rutinasUsuario(mysql)", "and request.method== 'GET': return controller.mostrarRutinas(mysql, request) else: return redirect(url_for('Index')) def", "controller.usuarioIniciado(): return redirect(url_for('home')) else: return render_template('Index.html') else: return render_template('Index.html', success=success)", "request) else: return redirect(url_for('Index')) def perfil(mysql, request): if controller.usuarioIniciado and", "pg=functions.porcentajeGrasa(usuario[5], usuario[9], usuario[10], usuario[7], usuario[11]), m_basal=m_basal ) else: return redirect(url_for('Index'))", "request) else: if controller.rutinaIniciada() and controller.usuarioIniciado(): return controller.rutinaEnCurso(mysql, request) if", "imc=functions.IMC(usuario[8], usuario[7]) m_basal= controller.calcular_metabolismo_basal(mysql, usuario[7], usuario[8]) return render_template('perfil.html', success=success, usuario=usuario,", "if controller.usuarioIniciado(): return render_template('administracion_rutinas.html') else: return redirect(url_for('Index')) def crearRutina(mysql, request):", "'POST': return controller.registrarEjerciciosRutina(mysql, request) return redirect(url_for('adm_rutinas')) def modificarRutina(mysql, request): if", "else: return redirect(url_for('Index')) def registrarEliminacion(mysql, request): if request.method=='POST' and controller.usuarioIniciado():", "usuario[9], usuario[10], usuario[7], usuario[11]), m_basal=m_basal ) else: return redirect(url_for('Index')) def", "de los datos es incorrecto\") return redirect(url_for('actualizar_perfil', success=False)) else: if", "if controller.usuarioIniciado(): rutinas=controller.rutinasUsuario(mysql) rutinaEjercicios=controller.rutinaEjercicios(mysql) return render_template('delete_rutina.html', rutinas=rutinas , rutinaEjer=rutinaEjercicios) else:", "return controller.actualizar_perfil(mysql, request) else: flash(\"Alguno de los datos es incorrecto\")", "and controller.usuarioIniciado(): return controller.registrarEliminacion(mysql, request) else: return redirect(url_for('Index')) def registrarEjercicios(mysql,", "return render_template('perfil.html', success=success, usuario=usuario, imc=imc, evaluacion=functions.evaluarIMC(imc), pg=functions.porcentajeGrasa(usuario[5], usuario[9], usuario[10], usuario[7],", "request.method=='GET': success= request.args.get('success') if success==None: if controller.usuarioIniciado(): return redirect(url_for('home')) else:", "request.method== 'GET': return controller.mostrar_historial_usuario(mysql, request) else: return redirect(url_for('Index')) def perfil(mysql,", "return redirect(url_for('Index')) def historial_rutina(mysql, request): if controller.usuarioIniciado() and request.method== 'GET':", "actualize_form.validate(): return controller.actualizar_perfil(mysql, request) else: flash(\"Alguno de los datos es", "and controller.usuarioIniciado(): return controller.rutinaEnCurso(mysql, request) if controller.usuarioIniciado(): return redirect(url_for('adm_rutinas')) else:", "'POST': return controller.registrarModificaciones(mysql, request) return redirect(url_for('adm_rutinas')) def eliminarRutina(mysql,request): if controller.usuarioIniciado():", "forms.PerfilForm(request.form) if request.method == 'POST' and controller.usuarioIniciado: if actualize_form.validate(): return", "request.method == 'POST' and registerForm.validate(): return controller.registraUsuario(mysql, request, registerForm) return", "return controller.registrarEjerciciosRutina(mysql, request) return redirect(url_for('adm_rutinas')) def modificarRutina(mysql, request): if controller.usuarioIniciado():", "request.method== 'GET': return controller.mostrarRutinas(mysql, request) else: return redirect(url_for('Index')) def historial_rutina(mysql,", "return render_template('Index.html') else: return render_template('Index.html', success=success) return render_template('Index.html') def home(mysql,", "request) else: return redirect(url_for('Index')) def historial_rutina(mysql, request): if controller.usuarioIniciado() and", "request) return redirect(url_for('adm_rutinas')) def eliminarRutina(mysql,request): if controller.usuarioIniciado(): rutinas=controller.rutinasUsuario(mysql) rutinaEjercicios=controller.rutinaEjercicios(mysql) return", "request.args.get('success') if success==None: if controller.usuarioIniciado(): return redirect(url_for('home')) else: return render_template('Index.html')", "request) imc=functions.IMC(usuario[8], usuario[7]) m_basal= controller.calcular_metabolismo_basal(mysql, usuario[7], usuario[8]) return render_template('perfil.html', success=success,", "controller.usuarioIniciado() and request.method== 'GET': return controller.mostrar_historial_rutina(mysql, request) else: return redirect(url_for('Index'))", "usuario[8]) return render_template('perfil.html', success=success, usuario=usuario, imc=imc, evaluacion=functions.evaluarIMC(imc), pg=functions.porcentajeGrasa(usuario[5], usuario[9], usuario[10],", "controller.rutinaEnCurso(mysql, request) if controller.usuarioIniciado(): return redirect(url_for('adm_rutinas')) else: return redirect(url_for('Index')) def", "if request.method == 'POST': return controller.registrarEjerciciosRutina(mysql, request) return redirect(url_for('adm_rutinas')) def", "and request.method== 'GET': return controller.mostrar_historial_rutina(mysql, request) else: return redirect(url_for('Index')) def", "import forms, functions from flask import Flask, render_template, request, redirect,", "if controller.usuarioIniciado() and request.method== 'GET': return controller.mostrar_historial_usuario(mysql, request) else: return", "redirect(url_for('Index')) def historial_usuario(mysql, request): if controller.usuarioIniciado() and request.method== 'GET': return", "controller.actualizar_perfil(mysql, request) else: flash(\"Alguno de los datos es incorrecto\") return", "controller.usuarioIniciado(): return redirect(url_for('adm_rutinas')) else: return redirect(url_for('Index')) def registrarEjerciciosRutina(mysql, request): if", "render_template('actualizar_perfil.html', form=actualize_form, datos=datos) return redirect(url_for('perfil')) def administracionRutinas(mysql, request): if controller.usuarioIniciado():", "controller.rutinaIniciada() and controller.usuarioIniciado(): return controller.rutinaEnCurso(mysql, request) if controller.usuarioIniciado(): return redirect(url_for('adm_rutinas'))", "los datos es incorrecto\") return redirect(url_for('actualizar_perfil', success=False)) else: if request.method", "request) else: return redirect(url_for('Index')) def historial_usuario(mysql, request): if controller.usuarioIniciado() and", "redirect(url_for('Index')) def registrarModiciaciones(mysql, request): if request.method == 'POST': return controller.registrarModificaciones(mysql,", "== 'POST': return controller.registrarModificaciones(mysql, request) return redirect(url_for('adm_rutinas')) def eliminarRutina(mysql,request): if", "registrarEjercicios(mysql, request): if request.method == 'POST': return controller.registrarEjercicio(mysql, request) return", "usuario[7]) m_basal= controller.calcular_metabolismo_basal(mysql, usuario[7], usuario[8]) return render_template('perfil.html', success=success, usuario=usuario, imc=imc,", "render_template('Index.html') def home(mysql, request): if request.method== 'POST': controller.iniciarSesion(mysql, request) if", "return redirect(url_for('adm_rutinas')) def modificarRutina(mysql, request): if controller.usuarioIniciado(): rutinas=controller.rutinasUsuario(mysql) rutinaEjercicios=controller.rutinaEjercicios(mysql) datosEjer=controller.datosEjercicios(mysql)", "render_template('register.html', form=registerForm) def Index(mysql, request): if request.method=='GET': success= request.args.get('success') if", ") else: return redirect(url_for('Index')) def ActualizarPerfil(mysql, request): actualize_form= forms.PerfilForm(request.form) if", "imc=imc, evaluacion=functions.evaluarIMC(imc), pg=functions.porcentajeGrasa(usuario[5], usuario[9], usuario[10], usuario[7], usuario[11]), m_basal=m_basal ) else:", "registerForm) return render_template('register.html', form=registerForm) def Index(mysql, request): if request.method=='GET': success=", "request): if controller.usuarioIniciado() and request.method== 'GET': return controller.mostrar_historial_rutina(mysql, request) else:", "request) if controller.usuarioIniciado(): return redirect(url_for('adm_rutinas')) else: return redirect(url_for('Index')) def registrarEjerciciosRutina(mysql,", "datos=datos) return redirect(url_for('perfil')) def administracionRutinas(mysql, request): if controller.usuarioIniciado(): return render_template('administracion_rutinas.html')", "controller.usuarioIniciado: datos=controller.formulario_perfil(mysql) return render_template('actualizar_perfil.html', form=actualize_form, datos=datos) return redirect(url_for('perfil')) def administracionRutinas(mysql,", "if controller.usuarioIniciado() and request.method== 'GET': return controller.mostrar_historial_rutina(mysql, request) else: return", "return render_template('Index.html') def home(mysql, request): if request.method== 'POST': controller.iniciarSesion(mysql, request)", "controller.usuarioIniciado(): rutinas=controller.rutinasUsuario(mysql) rutinaEjercicios=controller.rutinaEjercicios(mysql) datosEjer=controller.datosEjercicios(mysql) return render_template('modify_rutina.html', rutinas=rutinas , ejercicios=datosEjer, rutinaEjer=rutinaEjercicios)", "else: return redirect(url_for('Index')) def registrarModiciaciones(mysql, request): if request.method == 'POST':", "def ActualizarPerfil(mysql, request): actualize_form= forms.PerfilForm(request.form) if request.method == 'POST' and", "datosEjer=controller.datosEjercicios(mysql) return render_template('modify_rutina.html', rutinas=rutinas , ejercicios=datosEjer, rutinaEjer=rutinaEjercicios) else: return redirect(url_for('Index'))", "request): if request.method == 'POST': return controller.registrarModificaciones(mysql, request) return redirect(url_for('adm_rutinas'))", "import Flask, render_template, request, redirect, url_for, flash def register(mysql, request):", "controller.usuarioIniciado(): rutinas=controller.rutinasUsuario(mysql) rutinaEjercicios=controller.rutinaEjercicios(mysql) return render_template('delete_rutina.html', rutinas=rutinas , rutinaEjer=rutinaEjercicios) else: return", "controller.usuarioIniciado() and request.method== 'GET': return controller.mostrarRutinas(mysql, request) else: return redirect(url_for('Index'))", "datos es incorrecto\") return redirect(url_for('actualizar_perfil', success=False)) else: if request.method ==", "functions from flask import Flask, render_template, request, redirect, url_for, flash", "return render_template('register.html', form=registerForm) def Index(mysql, request): if request.method=='GET': success= request.args.get('success')", "render_template('Index.html') else: return render_template('Index.html', success=success) return render_template('Index.html') def home(mysql, request):", "rutinas=rutinas , ejercicios=datosEjer, rutinaEjer=rutinaEjercicios) else: return redirect(url_for('Index')) def registrarModiciaciones(mysql, request):", "redirect(url_for('home')) else: return render_template('Index.html') else: return render_template('Index.html', success=success) return render_template('Index.html')", "'GET': return controller.mostrarRutinas(mysql, request) else: return redirect(url_for('Index')) def historial_rutina(mysql, request):", "controller.agregarRutina(mysql, request) else: if controller.rutinaIniciada() and controller.usuarioIniciado(): return controller.rutinaEnCurso(mysql, request)", "rutinas=controller.rutinasUsuario(mysql) rutinaEjercicios=controller.rutinaEjercicios(mysql) return render_template('delete_rutina.html', rutinas=rutinas , rutinaEjer=rutinaEjercicios) else: return redirect(url_for('Index'))", "Flask, render_template, request, redirect, url_for, flash def register(mysql, request): registerForm=", "if request.method =='POST' and controller.usuarioIniciado(): return controller.agregarRutina(mysql, request) else: if", "'POST': controller.iniciarSesion(mysql, request) if controller.usuarioIniciado() and request.method== 'GET': return controller.mostrarRutinas(mysql,", "success=success) return render_template('Index.html') def home(mysql, request): if request.method== 'POST': controller.iniciarSesion(mysql,", "redirect(url_for('actualizar_perfil', success=False)) else: if request.method == 'GET' and controller.usuarioIniciado: datos=controller.formulario_perfil(mysql)", "return render_template('delete_rutina.html', rutinas=rutinas , rutinaEjer=rutinaEjercicios) else: return redirect(url_for('Index')) def registrarEliminacion(mysql,", "def modificarRutina(mysql, request): if controller.usuarioIniciado(): rutinas=controller.rutinasUsuario(mysql) rutinaEjercicios=controller.rutinaEjercicios(mysql) datosEjer=controller.datosEjercicios(mysql) return render_template('modify_rutina.html',", "controller.mostrar_historial_rutina(mysql, request) else: return redirect(url_for('Index')) def historial_usuario(mysql, request): if controller.usuarioIniciado()", "request): if request.method =='POST' and controller.usuarioIniciado(): return controller.agregarRutina(mysql, request) else:", "forms.RegisterForm(request.form) if request.method == 'POST' and registerForm.validate(): return controller.registraUsuario(mysql, request,", "if controller.usuarioIniciado(): return redirect(url_for('home')) else: return render_template('Index.html') else: return render_template('Index.html',", "if actualize_form.validate(): return controller.actualizar_perfil(mysql, request) else: flash(\"Alguno de los datos", "if controller.usuarioIniciado(): rutinas=controller.rutinasUsuario(mysql) rutinaEjercicios=controller.rutinaEjercicios(mysql) datosEjer=controller.datosEjercicios(mysql) return render_template('modify_rutina.html', rutinas=rutinas , ejercicios=datosEjer,", "usuario[11]), m_basal=m_basal ) else: return redirect(url_for('Index')) def ActualizarPerfil(mysql, request): actualize_form=", "if request.method == 'POST' and controller.usuarioIniciado: if actualize_form.validate(): return controller.actualizar_perfil(mysql,", "def eliminarRutina(mysql,request): if controller.usuarioIniciado(): rutinas=controller.rutinasUsuario(mysql) rutinaEjercicios=controller.rutinaEjercicios(mysql) return render_template('delete_rutina.html', rutinas=rutinas ,", "controller.usuarioIniciado(): return controller.agregarRutina(mysql, request) else: if controller.rutinaIniciada() and controller.usuarioIniciado(): return", "home(mysql, request): if request.method== 'POST': controller.iniciarSesion(mysql, request) if controller.usuarioIniciado() and", "else: return redirect(url_for('Index')) def perfil(mysql, request): if controller.usuarioIniciado and request.method=='GET':", "request): if request.method== 'POST': controller.iniciarSesion(mysql, request) if controller.usuarioIniciado() and request.method==", "import controller import forms, functions from flask import Flask, render_template,", "evaluacion=functions.evaluarIMC(imc), pg=functions.porcentajeGrasa(usuario[5], usuario[9], usuario[10], usuario[7], usuario[11]), m_basal=m_basal ) else: return", "request.method == 'GET' and controller.usuarioIniciado: datos=controller.formulario_perfil(mysql) return render_template('actualizar_perfil.html', form=actualize_form, datos=datos)", "return redirect(url_for('Index')) def historial_usuario(mysql, request): if controller.usuarioIniciado() and request.method== 'GET':", "flash(\"Alguno de los datos es incorrecto\") return redirect(url_for('actualizar_perfil', success=False)) else:", "administracionRutinas(mysql, request): if controller.usuarioIniciado(): return render_template('administracion_rutinas.html') else: return redirect(url_for('Index')) def", "if controller.rutinaIniciada() and controller.usuarioIniciado(): return controller.rutinaEnCurso(mysql, request) if controller.usuarioIniciado(): return", "else: flash(\"Alguno de los datos es incorrecto\") return redirect(url_for('actualizar_perfil', success=False))", "controller.registrarModificaciones(mysql, request) return redirect(url_for('adm_rutinas')) def eliminarRutina(mysql,request): if controller.usuarioIniciado(): rutinas=controller.rutinasUsuario(mysql) rutinaEjercicios=controller.rutinaEjercicios(mysql)", "else: return redirect(url_for('Index')) def crearRutina(mysql, request): if request.method =='POST' and", "=='POST' and controller.usuarioIniciado(): return controller.agregarRutina(mysql, request) else: if controller.rutinaIniciada() and", "return redirect(url_for('adm_rutinas')) else: return redirect(url_for('Index')) def registrarEjerciciosRutina(mysql, request): if request.method", "request): if request.method == 'POST': return controller.registrarEjerciciosRutina(mysql, request) return redirect(url_for('adm_rutinas'))", "usuario=usuario, imc=imc, evaluacion=functions.evaluarIMC(imc), pg=functions.porcentajeGrasa(usuario[5], usuario[9], usuario[10], usuario[7], usuario[11]), m_basal=m_basal )", "def administracionRutinas(mysql, request): if controller.usuarioIniciado(): return render_template('administracion_rutinas.html') else: return redirect(url_for('Index'))", "if request.method == 'POST': return controller.registrarModificaciones(mysql, request) return redirect(url_for('adm_rutinas')) def", "if request.method == 'POST' and registerForm.validate(): return controller.registraUsuario(mysql, request, registerForm)", "form=registerForm) def Index(mysql, request): if request.method=='GET': success= request.args.get('success') if success==None:", "controller.iniciarSesion(mysql, request) if controller.usuarioIniciado() and request.method== 'GET': return controller.mostrarRutinas(mysql, request)", "controller.usuarioIniciado and request.method=='GET': success= request.args.get('success') usuario=controller.datosUsuario(mysql, request) imc=functions.IMC(usuario[8], usuario[7]) m_basal=", "render_template, request, redirect, url_for, flash def register(mysql, request): registerForm= forms.RegisterForm(request.form)", "return redirect(url_for('adm_rutinas')) def eliminarRutina(mysql,request): if controller.usuarioIniciado(): rutinas=controller.rutinasUsuario(mysql) rutinaEjercicios=controller.rutinaEjercicios(mysql) return render_template('delete_rutina.html',", "success=success, usuario=usuario, imc=imc, evaluacion=functions.evaluarIMC(imc), pg=functions.porcentajeGrasa(usuario[5], usuario[9], usuario[10], usuario[7], usuario[11]), m_basal=m_basal", "request.method=='POST' and controller.usuarioIniciado(): return controller.registrarEliminacion(mysql, request) else: return redirect(url_for('Index')) def", "def registrarEjerciciosRutina(mysql, request): if request.method == 'POST': return controller.registrarEjerciciosRutina(mysql, request)", "if request.method=='GET': success= request.args.get('success') if success==None: if controller.usuarioIniciado(): return redirect(url_for('home'))", "if success==None: if controller.usuarioIniciado(): return redirect(url_for('home')) else: return render_template('Index.html') else:", "redirect(url_for('Index')) def perfil(mysql, request): if controller.usuarioIniciado and request.method=='GET': success= request.args.get('success')", "if controller.usuarioIniciado(): return redirect(url_for('adm_rutinas')) else: return redirect(url_for('Index')) def registrarEjerciciosRutina(mysql, request):", "success= request.args.get('success') usuario=controller.datosUsuario(mysql, request) imc=functions.IMC(usuario[8], usuario[7]) m_basal= controller.calcular_metabolismo_basal(mysql, usuario[7], usuario[8])", "historial_rutina(mysql, request): if controller.usuarioIniciado() and request.method== 'GET': return controller.mostrar_historial_rutina(mysql, request)", "render_template('administracion_rutinas.html') else: return redirect(url_for('Index')) def crearRutina(mysql, request): if request.method =='POST'", "def Index(mysql, request): if request.method=='GET': success= request.args.get('success') if success==None: if", "def home(mysql, request): if request.method== 'POST': controller.iniciarSesion(mysql, request) if controller.usuarioIniciado()", "redirect(url_for('Index')) def historial_rutina(mysql, request): if controller.usuarioIniciado() and request.method== 'GET': return", "== 'POST' and registerForm.validate(): return controller.registraUsuario(mysql, request, registerForm) return render_template('register.html',", "crearRutina(mysql, request): if request.method =='POST' and controller.usuarioIniciado(): return controller.agregarRutina(mysql, request)", "usuario[10], usuario[7], usuario[11]), m_basal=m_basal ) else: return redirect(url_for('Index')) def ActualizarPerfil(mysql,", "registerForm= forms.RegisterForm(request.form) if request.method == 'POST' and registerForm.validate(): return controller.registraUsuario(mysql,", ", rutinaEjer=rutinaEjercicios) else: return redirect(url_for('Index')) def registrarEliminacion(mysql, request): if request.method=='POST'", "request): if controller.usuarioIniciado and request.method=='GET': success= request.args.get('success') usuario=controller.datosUsuario(mysql, request) imc=functions.IMC(usuario[8],", "def historial_usuario(mysql, request): if controller.usuarioIniciado() and request.method== 'GET': return controller.mostrar_historial_usuario(mysql,", "registrarEjerciciosRutina(mysql, request): if request.method == 'POST': return controller.registrarEjerciciosRutina(mysql, request) return", "flash def register(mysql, request): registerForm= forms.RegisterForm(request.form) if request.method == 'POST'", "def registrarEliminacion(mysql, request): if request.method=='POST' and controller.usuarioIniciado(): return controller.registrarEliminacion(mysql, request)", "def registrarEjercicios(mysql, request): if request.method == 'POST': return controller.registrarEjercicio(mysql, request)", "request): if request.method=='POST' and controller.usuarioIniciado(): return controller.registrarEliminacion(mysql, request) else: return", "else: if request.method == 'GET' and controller.usuarioIniciado: datos=controller.formulario_perfil(mysql) return render_template('actualizar_perfil.html',", "return controller.registrarEliminacion(mysql, request) else: return redirect(url_for('Index')) def registrarEjercicios(mysql, request): if", "success==None: if controller.usuarioIniciado(): return redirect(url_for('home')) else: return render_template('Index.html') else: return", "request, redirect, url_for, flash def register(mysql, request): registerForm= forms.RegisterForm(request.form) if", "else: return render_template('Index.html') else: return render_template('Index.html', success=success) return render_template('Index.html') def", "controller.calcular_metabolismo_basal(mysql, usuario[7], usuario[8]) return render_template('perfil.html', success=success, usuario=usuario, imc=imc, evaluacion=functions.evaluarIMC(imc), pg=functions.porcentajeGrasa(usuario[5],", "usuario[7], usuario[11]), m_basal=m_basal ) else: return redirect(url_for('Index')) def ActualizarPerfil(mysql, request):", "def perfil(mysql, request): if controller.usuarioIniciado and request.method=='GET': success= request.args.get('success') usuario=controller.datosUsuario(mysql,", "es incorrecto\") return redirect(url_for('actualizar_perfil', success=False)) else: if request.method == 'GET'", "datos=controller.formulario_perfil(mysql) return render_template('actualizar_perfil.html', form=actualize_form, datos=datos) return redirect(url_for('perfil')) def administracionRutinas(mysql, request):", "rutinaEjercicios=controller.rutinaEjercicios(mysql) return render_template('delete_rutina.html', rutinas=rutinas , rutinaEjer=rutinaEjercicios) else: return redirect(url_for('Index')) def", "if controller.usuarioIniciado and request.method=='GET': success= request.args.get('success') usuario=controller.datosUsuario(mysql, request) imc=functions.IMC(usuario[8], usuario[7])", "else: return redirect(url_for('Index')) def ActualizarPerfil(mysql, request): actualize_form= forms.PerfilForm(request.form) if request.method", "return controller.mostrarRutinas(mysql, request) else: return redirect(url_for('Index')) def historial_rutina(mysql, request): if", "return redirect(url_for('actualizar_perfil', success=False)) else: if request.method == 'GET' and controller.usuarioIniciado:", "else: return redirect(url_for('Index')) def registrarEjerciciosRutina(mysql, request): if request.method == 'POST':", "redirect(url_for('adm_rutinas')) else: return redirect(url_for('Index')) def registrarEjerciciosRutina(mysql, request): if request.method ==", "redirect(url_for('adm_rutinas')) def eliminarRutina(mysql,request): if controller.usuarioIniciado(): rutinas=controller.rutinasUsuario(mysql) rutinaEjercicios=controller.rutinaEjercicios(mysql) return render_template('delete_rutina.html', rutinas=rutinas", "controller.usuarioIniciado() and request.method== 'GET': return controller.mostrar_historial_usuario(mysql, request) else: return redirect(url_for('Index'))", "usuario[7], usuario[8]) return render_template('perfil.html', success=success, usuario=usuario, imc=imc, evaluacion=functions.evaluarIMC(imc), pg=functions.porcentajeGrasa(usuario[5], usuario[9],", "and controller.usuarioIniciado: if actualize_form.validate(): return controller.actualizar_perfil(mysql, request) else: flash(\"Alguno de", "request): if request.method == 'POST': return controller.registrarEjercicio(mysql, request) return redirect(url_for('ejercicios'))", "controller.usuarioIniciado(): return controller.rutinaEnCurso(mysql, request) if controller.usuarioIniciado(): return redirect(url_for('adm_rutinas')) else: return", "render_template('Index.html', success=success) return render_template('Index.html') def home(mysql, request): if request.method== 'POST':", "return redirect(url_for('home')) else: return render_template('Index.html') else: return render_template('Index.html', success=success) return", "return render_template('administracion_rutinas.html') else: return redirect(url_for('Index')) def crearRutina(mysql, request): if request.method", "redirect(url_for('Index')) def registrarEjerciciosRutina(mysql, request): if request.method == 'POST': return controller.registrarEjerciciosRutina(mysql,", "forms, functions from flask import Flask, render_template, request, redirect, url_for,", "request.method== 'POST': controller.iniciarSesion(mysql, request) if controller.usuarioIniciado() and request.method== 'GET': return", "controller.usuarioIniciado(): return render_template('administracion_rutinas.html') else: return redirect(url_for('Index')) def crearRutina(mysql, request): if", "registrarModiciaciones(mysql, request): if request.method == 'POST': return controller.registrarModificaciones(mysql, request) return", "def crearRutina(mysql, request): if request.method =='POST' and controller.usuarioIniciado(): return controller.agregarRutina(mysql,", "modificarRutina(mysql, request): if controller.usuarioIniciado(): rutinas=controller.rutinasUsuario(mysql) rutinaEjercicios=controller.rutinaEjercicios(mysql) datosEjer=controller.datosEjercicios(mysql) return render_template('modify_rutina.html', rutinas=rutinas", "controller.mostrarRutinas(mysql, request) else: return redirect(url_for('Index')) def historial_rutina(mysql, request): if controller.usuarioIniciado()", "else: if controller.rutinaIniciada() and controller.usuarioIniciado(): return controller.rutinaEnCurso(mysql, request) if controller.usuarioIniciado():", "controller.usuarioIniciado(): return controller.registrarEliminacion(mysql, request) else: return redirect(url_for('Index')) def registrarEjercicios(mysql, request):", "redirect(url_for('perfil')) def administracionRutinas(mysql, request): if controller.usuarioIniciado(): return render_template('administracion_rutinas.html') else: return", "return controller.rutinaEnCurso(mysql, request) if controller.usuarioIniciado(): return redirect(url_for('adm_rutinas')) else: return redirect(url_for('Index'))", "return controller.registraUsuario(mysql, request, registerForm) return render_template('register.html', form=registerForm) def Index(mysql, request):", "return render_template('Index.html', success=success) return render_template('Index.html') def home(mysql, request): if request.method==", "rutinas=rutinas , rutinaEjer=rutinaEjercicios) else: return redirect(url_for('Index')) def registrarEliminacion(mysql, request): if", "else: return redirect(url_for('Index')) def historial_rutina(mysql, request): if controller.usuarioIniciado() and request.method==", "rutinaEjer=rutinaEjercicios) else: return redirect(url_for('Index')) def registrarModiciaciones(mysql, request): if request.method ==", "return redirect(url_for('Index')) def perfil(mysql, request): if controller.usuarioIniciado and request.method=='GET': success=", "request): actualize_form= forms.PerfilForm(request.form) if request.method == 'POST' and controller.usuarioIniciado: if", "request.method =='POST' and controller.usuarioIniciado(): return controller.agregarRutina(mysql, request) else: if controller.rutinaIniciada()", "redirect, url_for, flash def register(mysql, request): registerForm= forms.RegisterForm(request.form) if request.method", "return redirect(url_for('Index')) def registrarEjercicios(mysql, request): if request.method == 'POST': return", "and registerForm.validate(): return controller.registraUsuario(mysql, request, registerForm) return render_template('register.html', form=registerForm) def", "controller.mostrar_historial_usuario(mysql, request) else: return redirect(url_for('Index')) def perfil(mysql, request): if controller.usuarioIniciado", "return render_template('modify_rutina.html', rutinas=rutinas , ejercicios=datosEjer, rutinaEjer=rutinaEjercicios) else: return redirect(url_for('Index')) def", "flask import Flask, render_template, request, redirect, url_for, flash def register(mysql,", "if controller.usuarioIniciado() and request.method== 'GET': return controller.mostrarRutinas(mysql, request) else: return", "== 'POST': return controller.registrarEjerciciosRutina(mysql, request) return redirect(url_for('adm_rutinas')) def modificarRutina(mysql, request):", "perfil(mysql, request): if controller.usuarioIniciado and request.method=='GET': success= request.args.get('success') usuario=controller.datosUsuario(mysql, request)", "request, registerForm) return render_template('register.html', form=registerForm) def Index(mysql, request): if request.method=='GET':", "rutinaEjercicios=controller.rutinaEjercicios(mysql) datosEjer=controller.datosEjercicios(mysql) return render_template('modify_rutina.html', rutinas=rutinas , ejercicios=datosEjer, rutinaEjer=rutinaEjercicios) else: return", "request): if controller.usuarioIniciado(): rutinas=controller.rutinasUsuario(mysql) rutinaEjercicios=controller.rutinaEjercicios(mysql) datosEjer=controller.datosEjercicios(mysql) return render_template('modify_rutina.html', rutinas=rutinas ,", "Negocio import controller import forms, functions from flask import Flask,", "redirect(url_for('Index')) def registrarEjercicios(mysql, request): if request.method == 'POST': return controller.registrarEjercicio(mysql,", "success=False)) else: if request.method == 'GET' and controller.usuarioIniciado: datos=controller.formulario_perfil(mysql) return", "from Negocio import controller import forms, functions from flask import", "'POST' and registerForm.validate(): return controller.registraUsuario(mysql, request, registerForm) return render_template('register.html', form=registerForm)", "rutinaEjer=rutinaEjercicios) else: return redirect(url_for('Index')) def registrarEliminacion(mysql, request): if request.method=='POST' and", "controller.registrarEliminacion(mysql, request) else: return redirect(url_for('Index')) def registrarEjercicios(mysql, request): if request.method", "url_for, flash def register(mysql, request): registerForm= forms.RegisterForm(request.form) if request.method ==", "eliminarRutina(mysql,request): if controller.usuarioIniciado(): rutinas=controller.rutinasUsuario(mysql) rutinaEjercicios=controller.rutinaEjercicios(mysql) return render_template('delete_rutina.html', rutinas=rutinas , rutinaEjer=rutinaEjercicios)", "historial_usuario(mysql, request): if controller.usuarioIniciado() and request.method== 'GET': return controller.mostrar_historial_usuario(mysql, request)", "actualize_form= forms.PerfilForm(request.form) if request.method == 'POST' and controller.usuarioIniciado: if actualize_form.validate():", "controller import forms, functions from flask import Flask, render_template, request,", "'GET': return controller.mostrar_historial_rutina(mysql, request) else: return redirect(url_for('Index')) def historial_usuario(mysql, request):", "render_template('delete_rutina.html', rutinas=rutinas , rutinaEjer=rutinaEjercicios) else: return redirect(url_for('Index')) def registrarEliminacion(mysql, request):", "'GET' and controller.usuarioIniciado: datos=controller.formulario_perfil(mysql) return render_template('actualizar_perfil.html', form=actualize_form, datos=datos) return redirect(url_for('perfil'))", "return render_template('actualizar_perfil.html', form=actualize_form, datos=datos) return redirect(url_for('perfil')) def administracionRutinas(mysql, request): if", "return redirect(url_for('Index')) def registrarModiciaciones(mysql, request): if request.method == 'POST': return", "and controller.usuarioIniciado: datos=controller.formulario_perfil(mysql) return render_template('actualizar_perfil.html', form=actualize_form, datos=datos) return redirect(url_for('perfil')) def", "return controller.mostrar_historial_rutina(mysql, request) else: return redirect(url_for('Index')) def historial_usuario(mysql, request): if", "ActualizarPerfil(mysql, request): actualize_form= forms.PerfilForm(request.form) if request.method == 'POST' and controller.usuarioIniciado:", "return redirect(url_for('Index')) def crearRutina(mysql, request): if request.method =='POST' and controller.usuarioIniciado():", "request) return redirect(url_for('adm_rutinas')) def modificarRutina(mysql, request): if controller.usuarioIniciado(): rutinas=controller.rutinasUsuario(mysql) rutinaEjercicios=controller.rutinaEjercicios(mysql)", "request.method=='GET': success= request.args.get('success') usuario=controller.datosUsuario(mysql, request) imc=functions.IMC(usuario[8], usuario[7]) m_basal= controller.calcular_metabolismo_basal(mysql, usuario[7],", "== 'GET' and controller.usuarioIniciado: datos=controller.formulario_perfil(mysql) return render_template('actualizar_perfil.html', form=actualize_form, datos=datos) return", "'GET': return controller.mostrar_historial_usuario(mysql, request) else: return redirect(url_for('Index')) def perfil(mysql, request):", "else: return render_template('Index.html', success=success) return render_template('Index.html') def home(mysql, request): if", "== 'POST' and controller.usuarioIniciado: if actualize_form.validate(): return controller.actualizar_perfil(mysql, request) else:", "render_template('modify_rutina.html', rutinas=rutinas , ejercicios=datosEjer, rutinaEjer=rutinaEjercicios) else: return redirect(url_for('Index')) def registrarModiciaciones(mysql,", "controller.usuarioIniciado: if actualize_form.validate(): return controller.actualizar_perfil(mysql, request) else: flash(\"Alguno de los", "from flask import Flask, render_template, request, redirect, url_for, flash def", "return redirect(url_for('Index')) def ActualizarPerfil(mysql, request): actualize_form= forms.PerfilForm(request.form) if request.method ==", "request.args.get('success') usuario=controller.datosUsuario(mysql, request) imc=functions.IMC(usuario[8], usuario[7]) m_basal= controller.calcular_metabolismo_basal(mysql, usuario[7], usuario[8]) return", "redirect(url_for('Index')) def crearRutina(mysql, request): if request.method =='POST' and controller.usuarioIniciado(): return", "rutinas=controller.rutinasUsuario(mysql) rutinaEjercicios=controller.rutinaEjercicios(mysql) datosEjer=controller.datosEjercicios(mysql) return render_template('modify_rutina.html', rutinas=rutinas , ejercicios=datosEjer, rutinaEjer=rutinaEjercicios) else:", ", ejercicios=datosEjer, rutinaEjer=rutinaEjercicios) else: return redirect(url_for('Index')) def registrarModiciaciones(mysql, request): if", "request): if controller.usuarioIniciado() and request.method== 'GET': return controller.mostrar_historial_usuario(mysql, request) else:", "request): registerForm= forms.RegisterForm(request.form) if request.method == 'POST' and registerForm.validate(): return", "and controller.usuarioIniciado(): return controller.agregarRutina(mysql, request) else: if controller.rutinaIniciada() and controller.usuarioIniciado():", "return controller.mostrar_historial_usuario(mysql, request) else: return redirect(url_for('Index')) def perfil(mysql, request): if", "request) else: return redirect(url_for('Index')) def registrarEjercicios(mysql, request): if request.method ==", "def historial_rutina(mysql, request): if controller.usuarioIniciado() and request.method== 'GET': return controller.mostrar_historial_rutina(mysql,", "registrarEliminacion(mysql, request): if request.method=='POST' and controller.usuarioIniciado(): return controller.registrarEliminacion(mysql, request) else:", "else: return redirect(url_for('Index')) def registrarEjercicios(mysql, request): if request.method == 'POST':", "render_template('perfil.html', success=success, usuario=usuario, imc=imc, evaluacion=functions.evaluarIMC(imc), pg=functions.porcentajeGrasa(usuario[5], usuario[9], usuario[10], usuario[7], usuario[11]),", "and request.method=='GET': success= request.args.get('success') usuario=controller.datosUsuario(mysql, request) imc=functions.IMC(usuario[8], usuario[7]) m_basal= controller.calcular_metabolismo_basal(mysql,", "controller.registraUsuario(mysql, request, registerForm) return render_template('register.html', form=registerForm) def Index(mysql, request): if", "redirect(url_for('adm_rutinas')) def modificarRutina(mysql, request): if controller.usuarioIniciado(): rutinas=controller.rutinasUsuario(mysql) rutinaEjercicios=controller.rutinaEjercicios(mysql) datosEjer=controller.datosEjercicios(mysql) return", "def register(mysql, request): registerForm= forms.RegisterForm(request.form) if request.method == 'POST' and", "success= request.args.get('success') if success==None: if controller.usuarioIniciado(): return redirect(url_for('home')) else: return", "'POST' and controller.usuarioIniciado: if actualize_form.validate(): return controller.actualizar_perfil(mysql, request) else: flash(\"Alguno", "usuario=controller.datosUsuario(mysql, request) imc=functions.IMC(usuario[8], usuario[7]) m_basal= controller.calcular_metabolismo_basal(mysql, usuario[7], usuario[8]) return render_template('perfil.html',", "incorrecto\") return redirect(url_for('actualizar_perfil', success=False)) else: if request.method == 'GET' and", "redirect(url_for('Index')) def registrarEliminacion(mysql, request): if request.method=='POST' and controller.usuarioIniciado(): return controller.registrarEliminacion(mysql,", "request.method== 'GET': return controller.mostrar_historial_rutina(mysql, request) else: return redirect(url_for('Index')) def historial_usuario(mysql,", "def registrarModiciaciones(mysql, request): if request.method == 'POST': return controller.registrarModificaciones(mysql, request)", "request): if request.method=='GET': success= request.args.get('success') if success==None: if controller.usuarioIniciado(): return", "m_basal=m_basal ) else: return redirect(url_for('Index')) def ActualizarPerfil(mysql, request): actualize_form= forms.PerfilForm(request.form)", "if request.method == 'GET' and controller.usuarioIniciado: datos=controller.formulario_perfil(mysql) return render_template('actualizar_perfil.html', form=actualize_form,", "redirect(url_for('Index')) def ActualizarPerfil(mysql, request): actualize_form= forms.PerfilForm(request.form) if request.method == 'POST'", "return redirect(url_for('perfil')) def administracionRutinas(mysql, request): if controller.usuarioIniciado(): return render_template('administracion_rutinas.html') else:", "return controller.agregarRutina(mysql, request) else: if controller.rutinaIniciada() and controller.usuarioIniciado(): return controller.rutinaEnCurso(mysql," ]
[ "assert m._new_format is False assert m._result_name == 'completions' def test_new_format_load():", "= Matcher(new_format=False) m.load(r\"./tests/test_data/test_old_format.json\") assert m._new_format is False assert m._result_name ==", "0 def test_old_format_load(): m = Matcher(new_format=False) m.load(r\"./tests/test_data/test_old_format.json\") assert m._new_format is", "def test_new_format_load(): m = Matcher(new_format=False) m.load(r\"./tests/test_data/test_bbox.json\") assert m._new_format is True", "= m.get_annotations_agreement() assert matrix is not None assert matrix >", "matrix = m.get_annotations_agreement() assert matrix is not None assert matrix", "test_old_format_agreement_matrix(): m = Matcher(new_format=False) m.load(r\"./tests/test_data/test_old_format.json\") matrix = m.get_annotations_agreement() assert matrix", "== 'completions' def test_new_format_load(): m = Matcher(new_format=False) m.load(r\"./tests/test_data/test_bbox.json\") assert m._new_format", "m.load(r\"./tests/test_data/test_old_format.json\") matrix = m.get_annotations_agreement() assert matrix is not None assert", "test_new_format_load(): m = Matcher(new_format=False) m.load(r\"./tests/test_data/test_bbox.json\") assert m._new_format is True assert", "import Matcher def test_old_format_agreement_matrix(): m = Matcher(new_format=False) m.load(r\"./tests/test_data/test_old_format.json\") matrix =", "Matcher(new_format=False) m.load(r\"./tests/test_data/test_bbox.json\") assert m._new_format is True assert m._result_name == 'annotations'", "m._new_format is False assert m._result_name == 'completions' def test_new_format_load(): m", "matrix > 0 def test_old_format_load(): m = Matcher(new_format=False) m.load(r\"./tests/test_data/test_old_format.json\") assert", "matrix is not None assert matrix > 0 def test_old_format_load():", "evalme.matcher import Matcher def test_old_format_agreement_matrix(): m = Matcher(new_format=False) m.load(r\"./tests/test_data/test_old_format.json\") matrix", "m = Matcher(new_format=False) m.load(r\"./tests/test_data/test_old_format.json\") matrix = m.get_annotations_agreement() assert matrix is", "not None assert matrix > 0 def test_old_format_load(): m =", "is not None assert matrix > 0 def test_old_format_load(): m", "<filename>evalme/tests/test_old_format.py from evalme.matcher import Matcher def test_old_format_agreement_matrix(): m = Matcher(new_format=False)", "> 0 def test_old_format_load(): m = Matcher(new_format=False) m.load(r\"./tests/test_data/test_old_format.json\") assert m._new_format", "'completions' def test_new_format_load(): m = Matcher(new_format=False) m.load(r\"./tests/test_data/test_bbox.json\") assert m._new_format is", "def test_old_format_agreement_matrix(): m = Matcher(new_format=False) m.load(r\"./tests/test_data/test_old_format.json\") matrix = m.get_annotations_agreement() assert", "test_old_format_load(): m = Matcher(new_format=False) m.load(r\"./tests/test_data/test_old_format.json\") assert m._new_format is False assert", "False assert m._result_name == 'completions' def test_new_format_load(): m = Matcher(new_format=False)", "m.load(r\"./tests/test_data/test_old_format.json\") assert m._new_format is False assert m._result_name == 'completions' def", "from evalme.matcher import Matcher def test_old_format_agreement_matrix(): m = Matcher(new_format=False) m.load(r\"./tests/test_data/test_old_format.json\")", "assert matrix > 0 def test_old_format_load(): m = Matcher(new_format=False) m.load(r\"./tests/test_data/test_old_format.json\")", "assert matrix is not None assert matrix > 0 def", "m._result_name == 'completions' def test_new_format_load(): m = Matcher(new_format=False) m.load(r\"./tests/test_data/test_bbox.json\") assert", "m = Matcher(new_format=False) m.load(r\"./tests/test_data/test_bbox.json\") assert m._new_format is True assert m._result_name", "is False assert m._result_name == 'completions' def test_new_format_load(): m =", "assert m._result_name == 'completions' def test_new_format_load(): m = Matcher(new_format=False) m.load(r\"./tests/test_data/test_bbox.json\")", "None assert matrix > 0 def test_old_format_load(): m = Matcher(new_format=False)", "= Matcher(new_format=False) m.load(r\"./tests/test_data/test_bbox.json\") assert m._new_format is True assert m._result_name ==", "m = Matcher(new_format=False) m.load(r\"./tests/test_data/test_old_format.json\") assert m._new_format is False assert m._result_name", "m.get_annotations_agreement() assert matrix is not None assert matrix > 0", "= Matcher(new_format=False) m.load(r\"./tests/test_data/test_old_format.json\") matrix = m.get_annotations_agreement() assert matrix is not", "def test_old_format_load(): m = Matcher(new_format=False) m.load(r\"./tests/test_data/test_old_format.json\") assert m._new_format is False", "Matcher def test_old_format_agreement_matrix(): m = Matcher(new_format=False) m.load(r\"./tests/test_data/test_old_format.json\") matrix = m.get_annotations_agreement()", "Matcher(new_format=False) m.load(r\"./tests/test_data/test_old_format.json\") matrix = m.get_annotations_agreement() assert matrix is not None", "Matcher(new_format=False) m.load(r\"./tests/test_data/test_old_format.json\") assert m._new_format is False assert m._result_name == 'completions'" ]
[ "internal_cleanup_func(): cleanup_func(*args, **kwargs) else: internal_cleanup_func = cleanup_func current_frame = self._stack[0]", "Without dropping the last context layer. except Exception: cleanups_failed =", "= os.path.abspath(base_dir) # supplied path might be to a feature", "if os.path.isfile(os.path.join(new_base_dir, environment_file)): break if new_base_dir == root_dir: break new_base_dir", "self._mode = ContextMode.BEHAVE # -- MODEL ENTITY REFERENCES/SUPPORT: self.feature =", "executed successfully. :raises: AssertionError, if a step failure occurs. :raises:", "callable(cleanup_func)\" assert self._stack if args or kwargs: def internal_cleanup_func(): cleanup_func(*args,", "current_mode @contextlib.contextmanager def scoped_context_layer(context, layer_name=None): \"\"\"Provides context manager for context", "new_base_dir) if os.path.isdir(os.path.join(new_base_dir, steps_dir)): break if os.path.isfile(os.path.join(new_base_dir, environment_file)): break if", "of the environment before_all and after_all). .. attribute:: scenario This", "using their file location). feature_locations = [filename for filename in", "\"%s\" directory in your '\\ 'specified path \"%s\"' % (steps_dir,", "not find \"%s\" directory. '\\ 'Please specify where to find", "already stored. # Append only error message. statement.error_message += u\"\\n\"+", "error or failure assertion) then the step invoking it will", "= [] # -- Allow steps to import other stuff", "+ repr(frame)) def __getattr__(self, attr): if attr[0] == \"_\": try:", "configuration of *behave* as determined by configuration files and command-line", "steps, hooks, fixtures, ...) \"\"\" BEHAVE = 1 USER =", "value = False if self.context: value = self.context.aborted return value", "self.setup_paths() return self.run_with_paths() def run_with_paths(self): self.context = Context(self) self.load_hooks() self.load_step_definitions()", "context.on_cleanup_error = handle_cleanup_error context.add_cleanup(cleanup_database, the_database) \"\"\" # -- BEST-EFFORT ALGORITHM:", "overwrite a user-set variable. You may use the \"in\" operator", "its directory\") base_dir = os.path.dirname(base_dir) else: if self.config.verbose: print('Using default", "= None self.stderr_capture = None self.log_capture = None self.fail_on_cleanup_errors =", "\"'{0}' object has no attribute '{1}' at the current level\"", "\"filename\": record[0], \"line\": record[1], \"function\": record[3], } self._emit_warning(attr, params) stack_limit", "set at the step level and holds any :class:`~behave.model.Table` associated", "and self._origin[attr] is not ContextMode.BEHAVE: msg = \"behave runner is", "self._use_with_behave_mode(): for step in steps: passed = step.run(self._runner, quiet=True, capture=False)", "stacklevel=2) return self.use_with_user_mode() def _set_root_attribute(self, attr, value): for frame in", "the variable was originally set by user code then this", "== \"_\": self.__dict__[attr] = value return for frame in self._stack[1:]:", "doc=\"Indicates that test run is aborted by the user.\") def", "msg = msg % params warnings.warn(msg, ContextMaskWarning, stacklevel=3) def _dump(self,", "collect_feature_locations, parse_features, \\ exec_file, load_step_modules, PathManager from behave.step_registry import registry", "return for frame in self._stack[1:]: if attr in frame: record", "from the feature and scenario. This attribute will not be", "Mode to apply to context object. \"\"\" # pylint: disable=protected-access", "related to the tests you're running. You may add arbitrary", "context stack. HINT: Use layer_name values: \"scenario\", \"feature\", \"testrun\". :param", "failed_count = 0 undefined_steps_initial_size = len(self.undefined_steps) for feature in features:", "model (elements) \"\"\" def __init__(self, config): super(Runner, self).__init__(config) self.path_manager =", "= runner.config d = self._root = { \"aborted\": False, \"failed\":", "from path in a platform independent way. POSIX-PATH EXAMPLE: rootdir", "(step.status.name.upper(), step_line) if step.error_message: message += \"\\nSubstep info: %s\\n\" %", "the context in BEHAVE mode.\"\"\" return use_context_with_mode(self, ContextMode.BEHAVE) def use_with_user_mode(self):", "= self._stack[0] cleanup_funcs = current_layer.get(\"@cleanups\", []) on_cleanup_error = getattr(self, \"on_cleanup_error\",", "user code overwrites the value. \"\"\" pass class ContextMode(Enum): \"\"\"Used", "e) if self.fail_on_cleanup_errors and cleanup_errors: first_cleanup_erro_info = cleanup_errors[0] del cleanup_errors", "with the step. .. attribute:: text This is set at", "try: context._push(layer_name) yield context finally: context._pop() def path_getrootdir(path): \"\"\" Extract", "-- CASE: feature, scenario, step statement = args[0] if statement:", "args: Args for cleanup_func() call (optional). :param kwargs: Kwargs for", "by user code then this will be raised if *behave*", "dirnames, filenames in os.walk(base_dir, followlinks=True): if [fn for fn in", "-*- coding: UTF-8 -*- \"\"\" This module provides Runner class", "self.config.paths)) first_path = self.config.paths[0] if hasattr(first_path, \"filename\"): # -- BETTER:", "in self.__dict__ for frame in self._stack: if attr in frame:", "ContextMode.BEHAVE or ContextMode.USER mode. Provides a context manager for switching", "KeyboardInterrupt: self.aborted = True failed_count += 1 run_feature = False", "= Context(self) return self.run_model() class Runner(ModelRunner): \"\"\" Standard test runner", "CASE: feature, scenario, step statement.hook_failed = True if statement.error_message: #", "attribute:: stderr_capture If stderr capture is enabled then this attribute", "before_all and after_all). .. attribute:: scenario This is set when", "holds any multiline text associated with the step. .. attribute::", "\"): for level, frame in enumerate(self._stack): print(\"%sLevel %d\" % (prefix,", "\", \".join('\"%s\"' % path for path in self.config.paths)) first_path =", "# -- RESTORE: Initial current_mode # Even if an AssertionError/Exception", "[], # -- REQUIRED-BY: before_all() hook \"@layer\": \"testrun\", } self._stack", "cleanup_func current_frame = self._stack[0] if cleanup_func not in current_frame[\"@cleanups\"]: #", "\"tag\" in name: extra = \"(tag=%s)\" % args[0] error_text =", ":raises: AssertionError, if a step failure occurs. :raises: ValueError, if", "EXAMPLE: rootdir = path_getrootdir(\"/foo/bar/one.feature\") assert rootdir == \"/\" WINDOWS-PATH EXAMPLE:", ":class:`~behave.model.Feature`. It will not be present outside of a feature", "disable=broad-except # -- HANDLE HOOK ERRORS: use_traceback = False if", "attr): if attr[0] == \"_\": return attr in self.__dict__ for", "supplied path might be to a feature file if os.path.isfile(base_dir):", "context. Values may be deleted from the context using \"del\"", "and scenario. This attribute will not be present outside of", "_pop(self): \"\"\"Pop the current layer from the context stack. Performs", "\"\"\"Pop the current layer from the context stack. Performs any", "from the steps dir # NOTE: Default matcher can be", "attribute '%(attr)s' \" \\ \"originally set by behave\" elif self._config.verbose:", "that called this method uses .table/.text original_table = getattr(self, \"table\",", "{\"@cleanups\": []} if layer_name: initial_data[\"@layer\"] = layer_name self._stack.insert(0, initial_data) def", "elements. .. attribute:: aborted This is set to true when", "context layer (push/do-something/pop cycle). .. code-block:: with scoped_context_layer(context): the_fixture =", "set containing instances of :class:`~behave.model.Tag` which are basically just glorified", "import ExceptionUtil from behave.capture import CaptureController from behave.exception import ConfigError", "in filenames if fn.endswith(\".feature\")]: break else: if self.config.verbose: if not", "on_cleanup_error(context, cleanup_func, e) if self.fail_on_cleanup_errors and cleanup_errors: first_cleanup_erro_info = cleanup_errors[0]", "value. If the variable was originally set by *behave* then", "\"on_cleanup_error\", self.print_cleanup_error) context = self cleanup_errors = [] for cleanup_func", "not find any \"<name>.feature\" files '\\ 'in your specified path", "-- CASE: feature, scenario, step statement.hook_failed = True if statement.error_message:", "cleanups. assert self._stack, \"REQUIRE: Non-empty stack\" current_layer = self._stack[0] cleanup_funcs", "disable=protected-access, broad-except cleanups_failed = False self.run_hook(\"after_all\", self.context) try: self.context._do_cleanups() #", "stack. HINT: Use layer_name values: \"scenario\", \"feature\", \"testrun\". :param layer_name:", "context._mode = current_mode @contextlib.contextmanager def scoped_context_layer(context, layer_name=None): \"\"\"Provides context manager", "AFTER-ALL: # pylint: disable=protected-access, broad-except cleanups_failed = False self.run_hook(\"after_all\", self.context)", "+ os.path.sep # -- POSIX: return os.path.sep class ModelRunner(object): \"\"\"", "called. This is intended for user-cleanups. :param cleanup_func: Callable function", "Indicates \"user\" mode (in steps, hooks, fixtures, ...) \"\"\" BEHAVE", "\"\"\" This module provides Runner class to run behave feature", "coding: UTF-8 -*- \"\"\" This module provides Runner class to", "feature.run(self) if failed: failed_count += 1 if self.config.stop or self.aborted:", "Exception: cleanups_failed = True if self.aborted: print(\"\\nABORTED: By user.\") for", "True ExceptionUtil.set_traceback(e) extra = u\"\" if \"tag\" in name: extra", "self.base_dir = base_dir self.path_manager.add(base_dir) if not self.config.paths: self.config.paths = [base_dir]", "pass @staticmethod def print_cleanup_error(context, cleanup_func, exception): cleanup_func_name = getattr(cleanup_func, \"__name__\",", "msg % params warnings.warn(msg, ContextMaskWarning, stacklevel=3) def _dump(self, pretty=False, prefix=\"", "**kwargs): \"\"\"Adds a cleanup function that is called when :meth:`Context._pop()`", "message = \"%s SUB-STEP: %s\" % \\ (step.status.name.upper(), step_line) if", "testrun as FAILED # context._set_root_attribute(\"failed\", True) def _do_cleanups(self): \"\"\"Execute optional", "= False if self.config.verbose: use_traceback = True ExceptionUtil.set_traceback(e) extra =", "diagnose problem. step_line = u\"%s %s\" % (step.keyword, step.name) message", "import absolute_import, print_function, with_statement import contextlib import os.path import sys", "and holds a :class:`~behave.model.Scenario`. It will not be present outside", "features=None, step_registry=None): self.config = config self.features = features or []", "It is not present if stderr is not being captured.", "files '\\ 'in your specified path \"%s\"' % base_dir) raise", "for this layer. \"\"\" try: self._do_cleanups() finally: # -- ENSURE:", "disable=no-self-use context.config.setup_logging() def load_hooks(self, filename=None): filename = filename or self.config.environment_file", "from the context using \"del\" but only at the level", "PYTHON3 BACKPORT: With unicode traceback support. import traceback2 as traceback", "not being captured. .. attribute:: stdout_capture If stdout capture is", "on the context, for example: \"feature\" in context checks whether", "failed_count += 1 if self.config.stop or self.aborted: # -- FAIL-EARLY:", "frame in self._stack: if attr in frame: return True return", "self.aborted = True failed_count += 1 run_feature = False #", "Context(self) self.load_hooks() self.load_step_definitions() # -- ENSURE: context.execute_steps() works in weird", "user_mode(self): warnings.warn(\"Use 'use_with_user_mode()' instead\", PendingDeprecationWarning, stacklevel=2) return self.use_with_user_mode() def _set_root_attribute(self,", "for what this means\" if msg: msg = msg %", "is masking context attribute '%(attr)s' \" \\ \"originally set in", "self.config.verbose: print(\"Supplied path:\", \\ \", \".join('\"%s\"' % path for path", "= u\"\" if \"tag\" in name: extra = \"(tag=%s)\" %", "self.config.environment_file hooks_path = os.path.join(self.base_dir, filename) if os.path.exists(hooks_path): exec_file(hooks_path, self.hooks) if", "cleanup_func, e) if self.fail_on_cleanup_errors and cleanup_errors: first_cleanup_erro_info = cleanup_errors[0] del", "is being overwritten in some situations. If the variable was", "True def add_cleanup(self, cleanup_func, *args, **kwargs): \"\"\"Adds a cleanup function", "disable=too-many-instance-attributes def __init__(self, config, features=None, step_registry=None): self.config = config self.features", "else: # -- CASE: feature, scenario, step statement = args[0]", "rootdir = path_getrootdir(\"D:\\\\foo\\\\bar\\\\one.feature\") assert rootdir == r\"D:\\\" \"\"\" drive, _", "to keep track of untested features. for reporter in self.config.reporters:", "same as the `configuration file section names`_. .. attribute:: active_outline", "CaptureController(config) self.context = None self.feature = None self.hook_failures = 0", "msg.format(self.__class__.__name__, attr) raise AttributeError(msg) def __setattr__(self, attr, value): if attr[0]", "# XXX-MAYBE: or context.failed) return failed def run(self): \"\"\" Implements", "may be useful otherwise. .. attribute:: log_capture If logging capture", "[] self.step_registry = step_registry self.capture_controller = CaptureController(config) self.context = None", "os.path.abspath(\"features\") # Get the root. This is not guaranteed to", "of a scenario. .. attribute:: tags The current set of", "if self.config.verbose: if not self.config.paths: print('ERROR: Could not find \"%s\"", "run(self): with self.path_manager: self.setup_paths() return self.run_with_paths() def run_with_paths(self): self.context =", "cleanup_func: Callable function :param args: Args for cleanup_func() call (optional).", "msg = \"'{0}' object has no attribute '{1}' at the", "ENTITY REFERENCES/SUPPORT: self.feature = None # DISABLED: self.rule = None", "the `configuration file section names`_. .. attribute:: active_outline This is", "between the two usage modes while using the context: *", "self._mode def __delattr__(self, attr): frame = self._stack[0] if attr in", "frame[name] print(\"%s %-15s = %r\" % (prefix, name, value)) else:", ".. attribute:: config The configuration of *behave* as determined by", "\"function\": record[3], } self._emit_warning(attr, params) self.__dict__[\"_root\"][attr] = value if attr", "CleanupError(RuntimeError): pass class ContextMaskWarning(UserWarning): \"\"\"Raised if a context variable is", "code-block:: with scoped_context_layer(context): the_fixture = use_fixture(foo, context, name=\"foo_42\") \"\"\" #", "(len(self.undefined_steps) > undefined_steps_initial_size) or cleanups_failed) # XXX-MAYBE: or context.failed) return", "the two context modes. .. sourcecode:: python context = Context()", "with context.use_with_user_mode(): self.hooks[name](context, *args) # except KeyboardInterrupt: # self.aborted =", "from behave.step_registry import registry as the_step_registry from enum import Enum", "extra = u\"\" if \"tag\" in name: extra = \"(tag=%s)\"", "else: import traceback class CleanupError(RuntimeError): pass class ContextMaskWarning(UserWarning): \"\"\"Raised if", "= self.feature_locations() if file_locations: base_dir = os.path.dirname(file_locations[0].filename) base_dir = os.path.abspath(base_dir)", "USE: behave @features.txt base_dir = base_dir[1:] file_locations = self.feature_locations() if", "continue if attr in frame: record = self.__dict__[\"_record\"][attr] params =", "true in the root namespace as soon as a step", "files (or model elements). \"\"\" from __future__ import absolute_import, print_function,", "-*- \"\"\" This module provides Runner class to run behave", "BEHAVE: Indicates \"behave\" (internal) mode * USER: Indicates \"user\" mode", "= frame[name] print(\"%s %-15s = %r\" % (prefix, name, value))", "the_step_registry if features is None: features = self.features # --", "for fn in filenames if fn.endswith(\".feature\")]: break else: if self.config.verbose:", "Could not find any \"<name>.feature\" files '\\ 'in your specified", "% self.context self.context._set_root_attribute(\"aborted\", bool(value)) aborted = property(_get_aborted, _set_aborted, doc=\"Indicates that", "stdout is not being captured. .. attribute:: stderr_capture If stderr", "mode. Provides a context manager for switching between the two", "this attribute contains the captured output as a StringIO instance.", "run_feature: try: self.feature = feature for formatter in self.formatters: formatter.uri(feature.filename)", "if attr not in self._origin: self._origin[attr] = self._mode def __delattr__(self,", "current step. # Needed if step definition that called this", "before_all(context): context.on_cleanup_error = handle_cleanup_error context.add_cleanup(cleanup_database, the_database) \"\"\" # -- BEST-EFFORT", "situations. If the variable was originally set by user code", "False def execute_steps(self, steps_text): \"\"\"The steps identified in the \"steps\"", "user aborts a test run (:exc:`KeyboardInterrupt` exception). Initially: False. ..", "model. \"\"\" self.context = Context(self) return self.run_model() class Runner(ModelRunner): \"\"\"", "scenario. .. attribute:: tags The current set of active tags", "not ContextMode.USER: msg = \"user code is masking context attribute", "exec_file, load_step_modules, PathManager from behave.step_registry import registry as the_step_registry from", "internal_cleanup_func = cleanup_func current_frame = self._stack[0] if cleanup_func not in", "True def __init__(self, runner): self._runner = weakref.proxy(runner) self._config = runner.config", "= Context() with use_context_with_mode(context, ContextMode.BEHAVE): ... # Do something #", "attr, value): if attr[0] == \"_\": self.__dict__[attr] = value return", "% \\ (step.status.name.upper(), step_line) if step.error_message: message += \"\\nSubstep info:", "= stack_frame frame = self._stack[0] frame[attr] = value if attr", "feature files, parses them and creates model (elements) \"\"\" def", "self.load_hooks() self.load_step_definitions() # -- ENSURE: context.execute_steps() works in weird cases", "attr in frame: return True return False def execute_steps(self, steps_text):", "Even if an AssertionError/Exception is raised. context._mode = current_mode @contextlib.contextmanager", "statement.error_message += u\"\\n\"+ error_message else: # -- FIRST EXCEPTION/FAILURE: statement.store_exception_context(e)", "where to find your features.' % \\ steps_dir) else: print('ERROR:", "def _pop(self): \"\"\"Pop the current layer from the context stack.", "is None: features = self.features # -- ENSURE: context.execute_steps() works", "self.stderr_capture = None self.log_capture = None self.fail_on_cleanup_errors = self.FAIL_ON_CLEANUP_ERRORS @staticmethod", "cleanup_func() call (optional). \"\"\" # MAYBE: assert callable(cleanup_func), \"REQUIRES: callable(cleanup_func)\"", ":param args: Args for cleanup_func() call (optional). :param kwargs: Kwargs", "None self.log_capture = None self.fail_on_cleanup_errors = self.FAIL_ON_CLEANUP_ERRORS @staticmethod def ignore_cleanup_error(context,", "be wary of using them yourself as *behave* may overwrite", "function that is called when :meth:`Context._pop()` is called. This is", "return value # @aborted.setter def _set_aborted(self, value): # pylint: disable=protected-access", "params) self.__dict__[\"_root\"][attr] = value if attr not in self._origin: self._origin[attr]", "= self._mode def _emit_warning(self, attr, params): msg = \"\" if", "= False except KeyboardInterrupt: self.aborted = True failed_count += 1", "# -- MODEL ENTITY REFERENCES/SUPPORT: self.feature = None # DISABLED:", "value if attr not in self._origin: self._origin[attr] = self._mode def", "context to ContextMode.BEHAVE or ContextMode.USER mode. Provides a context manager", "Release other exception frames. six.reraise(*first_cleanup_erro_info) def _push(self, layer_name=None): \"\"\"Push a", "ContextMode.BEHAVE: msg = \"behave runner is masking context attribute '%(attr)s'", "\"feature\" value in the context. Values may be deleted from", "= path_getrootdir(\"D:\\\\foo\\\\bar\\\\one.feature\") assert rootdir == r\"D:\\\" \"\"\" drive, _ =", "exec_file(hooks_path, self.hooks) if \"before_all\" not in self.hooks: self.hooks[\"before_all\"] = self.before_all_default_hook", "# -- NOTE: One exception/failure is already stored. # Append", "sourcecode:: python context = Context() with use_context_with_mode(context, ContextMode.BEHAVE): ... #", "will have additional layers of namespace added and removed automatically.", "print(\"Trying base directory:\", new_base_dir) if os.path.isdir(os.path.join(new_base_dir, steps_dir)): break if os.path.isfile(os.path.join(new_base_dir,", "self.context) try: self.context._do_cleanups() # Without dropping the last context layer.", "not find \"%s\" directory in your '\\ 'specified path \"%s\"'", "Needed if step definition that called this method uses .table/.text", "= {} self._origin = {} self._mode = ContextMode.BEHAVE # --", "and cleanup_errors: first_cleanup_erro_info = cleanup_errors[0] del cleanup_errors # -- ENSURE:", "cleanup_func not in current_frame[\"@cleanups\"]: # -- AVOID DUPLICATES: current_frame[\"@cleanups\"].append(internal_cleanup_func) @contextlib.contextmanager", "in enumerate(self._stack): print(\"%sLevel %d\" % (prefix, level)) if pretty: for", "# Even if an AssertionError/Exception is raised. context._mode = current_mode", "set by *behave* then this will be raised if user", "REQUIRED-FOR: Summary to keep track of untested features. for reporter", "assert rootdir == \"/\" WINDOWS-PATH EXAMPLE: rootdir = path_getrootdir(\"D:\\\\foo\\\\bar\\\\one.feature\") assert", "parse_features(feature_locations, language=self.config.lang) self.features.extend(features) # -- STEP: Run all features. stream_openers", "are set. You can't delete a value set by a", "may use the \"in\" operator to test whether a certain", "this will be raised if user code overwrites the value.", "of using them yourself as *behave* may overwrite the value", "the model. \"\"\" self.context = Context(self) return self.run_model() class Runner(ModelRunner):", "outside of feature\") # -- PREPARE: Save original context data", "self.hooks): try: with context.use_with_user_mode(): self.hooks[name](context, *args) # except KeyboardInterrupt: #", "base_dir = first_path if base_dir.startswith(\"@\"): # -- USE: behave @features.txt", "file so using its directory\") base_dir = os.path.dirname(base_dir) else: if", "run is aborted by the user.\") def run_hook(self, name, context,", "in self.feature_locations() if not self.config.exclude(filename)] features = parse_features(feature_locations, language=self.config.lang) self.features.extend(features)", "ContextMode(Enum): \"\"\"Used to distinguish between the two usage modes while", "executed in turn just as though they were defined in", "derived attribute in :attr:`Context.aborted`. \"\"\" # pylint: disable=too-many-instance-attributes def __init__(self,", "drive: # -- WINDOWS: return drive + os.path.sep # --", "if fn.endswith(\".feature\")]: break else: if self.config.verbose: if not self.config.paths: print('ERROR:", "a :class:`~behave.model.Scenario`. It will not be present outside of the", "if not self.context: self.context = Context(self) self.capture_controller.setup_capture(self.context) def start_capture(self): self.capture_controller.start_capture()", "filenames if fn.endswith(\".feature\")]: break else: if self.config.verbose: if not self.config.paths:", "for current step. # Needed if step definition that called", "this layer. \"\"\" try: self._do_cleanups() finally: # -- ENSURE: Layer", "(internal) mode * USER: Indicates \"user\" mode (in steps, hooks,", "A user can add a user-specified handler for cleanup errors.", "if step.error_message: message += \"\\nSubstep info: %s\\n\" % step.error_message message", "self.log_capture = None self.fail_on_cleanup_errors = self.FAIL_ON_CLEANUP_ERRORS @staticmethod def ignore_cleanup_error(context, cleanup_func,", "in (ContextMode.BEHAVE, ContextMode.USER) current_mode = context._mode try: context._mode = mode", "'\\ 'Please specify where to find your features.' % \\", "return use_context_with_mode(self, ContextMode.BEHAVE) def use_with_user_mode(self): \"\"\"Provides a context manager for", "self.config = config self.features = features or [] self.hooks =", "name not in (\"before_all\", \"after_all\"): # raise except Exception as", "* BEHAVE: Indicates \"behave\" (internal) mode * USER: Indicates \"user\"", "handle_cleanup_error(context, cleanup_func, exception): pass def before_all(context): context.on_cleanup_error = handle_cleanup_error context.add_cleanup(cleanup_database,", "The current set of active tags (as a Python set", "\"\"\" # MAYBE: assert callable(cleanup_func), \"REQUIRES: callable(cleanup_func)\" assert self._stack if", "a cleanup function that is called when :meth:`Context._pop()` is called.", "[base_dir] if base_dir != os.getcwd(): self.path_manager.add(os.getcwd()) def before_all_default_hook(self, context): \"\"\"", "This is set when we start testing a new scenario", "in self._stack: if attr in frame: return frame[attr] msg =", "as traceback else: import traceback class CleanupError(RuntimeError): pass class ContextMaskWarning(UserWarning):", "contains the captured logging as an instance of :class:`~behave.log_capture.LoggingCapture`. It", "# -- ABORT EXECUTION: For before_all/after_all self.aborted = True statement", "# -- ENSURE: Layer is removed even if cleanup-errors occur.", "context attribute \" \\ \"'%(attr)s'; see the tutorial for what", "-- SCENARIO or FEATURE statement = getattr(context, \"scenario\", context.feature) elif", "and executed in turn just as though they were defined", "return collect_feature_locations(self.config.paths) def run(self): with self.path_manager: self.setup_paths() return self.run_with_paths() def", "if os.path.isfile(base_dir): if self.config.verbose: print(\"Primary path is to a file", "and holds a :class:`~behave.model.Feature`. It will not be present outside", "else: msg = \"'{0}' object has no attribute '{1}' at", "False, message # -- FINALLY: Restore original context data for", "'use_with_user_mode()' instead\", PendingDeprecationWarning, stacklevel=2) return self.use_with_user_mode() def _set_root_attribute(self, attr, value):", "self.undefined_steps = [] self.step_registry = step_registry self.capture_controller = CaptureController(config) self.context", "attr[0] == \"_\": self.__dict__[attr] = value return for frame in", "assert mode in (ContextMode.BEHAVE, ContextMode.USER) current_mode = context._mode try: context._mode", "cleanup_func_name: cleanup_func_name = \"%r\" % cleanup_func print(u\"CLEANUP-ERROR in %s: %s:", "self._stack, \"REQUIRE: Non-empty stack\" current_layer = self._stack[0] cleanup_funcs = current_layer.get(\"@cleanups\",", "% (step.keyword, step.name) message = \"%s SUB-STEP: %s\" % \\", "False. .. attribute:: table This is set at the step", "error_message = u\"HOOK-ERROR in %s%s: %s\" % (name, extra, error_text)", "Extract rootdir from path in a platform independent way. POSIX-PATH", "error message. statement.error_message += u\"\\n\"+ error_message else: # -- FIRST", "import os.path import sys import warnings import weakref import six", "set by user code then this will be raised if", "= {} self._mode = ContextMode.BEHAVE # -- MODEL ENTITY REFERENCES/SUPPORT:", "Context(self) return self.run_model() class Runner(ModelRunner): \"\"\" Standard test runner for", "scenario. It is present mostly for debugging, but may be", "True # if name not in (\"before_all\", \"after_all\"): # raise", "multiline text associated with the step. .. attribute:: config The", "captured. A :class:`behave.runner.ContextMaskWarning` warning will be raised if user code", "= self._stack[0] if attr in frame: del frame[attr] del self._record[attr]", "code-block:: python # -- FILE: features/environment.py def cleanup_database(database): pass def", "base_dir for dirpath, dirnames, filenames in os.walk(base_dir, followlinks=True): if [fn", "Layer name to use (or None). \"\"\" initial_data = {\"@cleanups\":", "problem. step_line = u\"%s %s\" % (step.keyword, step.name) message =", "During the running of your tests the object will have", "# pylint: disable=no-self-use context.config.setup_logging() def load_hooks(self, filename=None): filename = filename", "# raise except Exception as e: # pylint: disable=broad-except #", "None # DISABLED: self.rule = None # DISABLED: self.scenario =", "original_table = getattr(self, \"table\", None) original_text = getattr(self, \"text\", None)", "\"aborted\": False, \"failed\": False, \"config\": self._config, \"active_outline\": None, \"cleanup_errors\": 0,", "was originally set by user code then this will be", "return failed def run(self): \"\"\" Implements the run method by", "% cleanup_func print(u\"CLEANUP-ERROR in %s: %s: %s\" % (cleanup_func_name, exception.__class__.__name__,", "kwargs: Kwargs for cleanup_func() call (optional). \"\"\" # MAYBE: assert", "For before_all/after_all self.aborted = True statement = None else: #", "a \"root\" namespace and additional namespaces for features and scenarios.", "0) or (len(self.undefined_steps) > undefined_steps_initial_size) or cleanups_failed) # XXX-MAYBE: or", "load_step_modules, PathManager from behave.step_registry import registry as the_step_registry from enum", "stack\" current_layer = self._stack[0] cleanup_funcs = current_layer.get(\"@cleanups\", []) on_cleanup_error =", "current layer from the context stack. Performs any pending cleanups,", "from the context stack. Performs any pending cleanups, registered for", "try: with context.use_with_user_mode(): self.hooks[name](context, *args) # except KeyboardInterrupt: # self.aborted", "-- STEP: Parse all feature files (by using their file", "self.config.paths: if self.config.verbose: print(\"Supplied path:\", \\ \", \".join('\"%s\"' % path", "ALWAYS: Report run/not-run feature to reporters. # REQUIRED-FOR: Summary to", "None) self.feature.parser.variant = \"steps\" steps = self.feature.parser.parse_steps(steps_text) with self._use_with_behave_mode(): for", "is not present if stderr is not being captured. A", "enumerate(self._stack): print(\"%sLevel %d\" % (prefix, level)) if pretty: for name", "feature files (by using their file location). feature_locations = [filename", "use_traceback = True ExceptionUtil.set_traceback(e) extra = u\"\" if \"tag\" in", "stuff from the steps dir # NOTE: Default matcher can", "*behave* as determined by configuration files and command-line options. The", "\\ \"originally set in %(function)s (%(filename)s:%(line)s)\" elif self._mode is ContextMode.USER:", "a test run (:exc:`KeyboardInterrupt` exception). Initially: False. Stored as derived", "ExceptionUtil.describe(e, use_traceback).rstrip() error_message = u\"HOOK-ERROR in %s%s: %s\" % (name,", "r\"D:\\\" \"\"\" drive, _ = os.path.splitdrive(path) if drive: # --", "user can add a user-specified handler for cleanup errors. ..", "files. '\\ 'Please specify where to find your features.') else:", "attribute:: stdout_capture If stdout capture is enabled then this attribute", "\"\"\" pass class ContextMode(Enum): \"\"\"Used to distinguish between the two", "name to use (or None). \"\"\" initial_data = {\"@cleanups\": []}", "filename or self.config.environment_file hooks_path = os.path.join(self.base_dir, filename) if os.path.exists(hooks_path): exec_file(hooks_path,", "self._config.verbose: msg = \"user code is masking context attribute \"", "API needed by model elements. .. attribute:: aborted This is", "def __setattr__(self, attr, value): if attr[0] == \"_\": self.__dict__[attr] =", "None: self.step_registry = the_step_registry if features is None: features =", "params): msg = \"\" if self._mode is ContextMode.BEHAVE and self._origin[attr]", "masking context attribute '%(attr)s' \" \\ \"originally set by behave\"", "# Needed if step definition that called this method uses", "instance. It is not present if stderr is not being", "*behave* itself tries to overwrite a user-set variable. You may", "the level they are set. You can't delete a value", "run(self): \"\"\" Implements the run method by running the model.", "for a behave model (features). Provides the core functionality of", "steps to execute (as string). :returns: True, if the steps", "mode.\"\"\" return use_context_with_mode(self, ContextMode.USER) def user_mode(self): warnings.warn(\"Use 'use_with_user_mode()' instead\", PendingDeprecationWarning,", "print_function, with_statement import contextlib import os.path import sys import warnings", "layer (push/do-something/pop cycle). .. code-block:: with scoped_context_layer(context): the_fixture = use_fixture(foo,", "invoked without a feature context. \"\"\" assert isinstance(steps_text, six.text_type), \"Steps", ".. code-block:: python # -- FILE: features/environment.py def cleanup_database(database): pass", ":class:`~behave.model.Row` that is active for the current scenario. It is", "Gherkin steps to execute (as string). :returns: True, if the", "stack_limit += 1 # Due to traceback2 usage. stack_frame =", "has no attribute '{1}'\" msg = msg.format(self.__class__.__name__, attr) raise AttributeError(msg)", "parses them and creates model (elements) \"\"\" def __init__(self, config):", "return frame[attr] msg = \"'{0}' object has no attribute '{1}'\"", "names`_. .. attribute:: active_outline This is set for each scenario", "value set by a feature at a scenario level but", "\"before_all\" not in self.hooks: self.hooks[\"before_all\"] = self.before_all_default_hook def load_step_definitions(self, extra_step_paths=None):", "print(error_message) self.hook_failures += 1 if \"tag\" in name: # --", "os.path.isdir(os.path.join(new_base_dir, steps_dir)): break if os.path.isfile(os.path.join(new_base_dir, environment_file)): break if new_base_dir ==", "in the root namespace when the user aborts a test", "is set at the step level and holds any multiline", "the tutorial for what this means\" if msg: msg =", "and after_all). .. attribute:: scenario This is set when we", "use (or None). \"\"\" initial_data = {\"@cleanups\": []} if layer_name:", "e: # pylint: disable=broad-except # -- HANDLE HOOK ERRORS: use_traceback", "python context = Context() with use_context_with_mode(context, ContextMode.BEHAVE): ... # Do", "This attribute will not be present outside of a feature", "first_path = self.config.paths[0] if hasattr(first_path, \"filename\"): # -- BETTER: isinstance(first_path,", "self._stack[0] if attr in frame: del frame[attr] del self._record[attr] else:", "behave.formatter._registry import make_formatters from behave.runner_util import \\ collect_feature_locations, parse_features, \\", "def ignore_cleanup_error(context, cleanup_func, exception): pass @staticmethod def print_cleanup_error(context, cleanup_func, exception):", "of tests. This object is a place to store information", "class Context(object): \"\"\"Hold contextual information during the running of tests.", "implementation for :func:`before_all()` hook. Setup the logging subsystem based on", "table This is set at the step level and holds", "= False self.run_hook(\"after_all\", self.context) try: self.context._do_cleanups() # Without dropping the", "enabled then this attribute contains the captured logging as an", "self._stack = [d] self._record = {} self._origin = {} self._mode", "a feature (i.e. within the scope of the environment before_all", "= ContextMode.BEHAVE # -- MODEL ENTITY REFERENCES/SUPPORT: self.feature = None", "(cleanup_func_name, exception.__class__.__name__, exception)) traceback.print_exc(file=sys.stdout) # MAYBE: context._dump(pretty=True, prefix=\"Context: \") #", "BEHAVE = 1 USER = 2 class Context(object): \"\"\"Hold contextual", "= value if attr not in self._origin: self._origin[attr] = self._mode", "0 # @property def _get_aborted(self): value = False if self.context:", "self._origin[attr] = self._mode def __delattr__(self, attr): frame = self._stack[0] if", "\"\"\" # pylint: disable=too-many-instance-attributes def __init__(self, config, features=None, step_registry=None): self.config", "(name, extra, error_text) print(error_message) self.hook_failures += 1 if \"tag\" in", "in BEHAVE mode.\"\"\" return use_context_with_mode(self, ContextMode.BEHAVE) def use_with_user_mode(self): \"\"\"Provides a", "attr): frame = self._stack[0] if attr in frame: del frame[attr]", "is active for the current scenario. It is present mostly", "wary of using them yourself as *behave* may overwrite the", "Layer is removed even if cleanup-errors occur. self._stack.pop(0) def _use_with_behave_mode(self):", "os.path.join(self.base_dir, self.config.steps_dir) step_paths = [steps_dir] + list(extra_step_paths) load_step_modules(step_paths) def feature_locations(self):", "not self.context: self.context = Context(self) if self.step_registry is None: self.step_registry", "cleanup_func() except Exception as e: # pylint: disable=broad-except # pylint:", "layer. \"\"\" try: self._do_cleanups() finally: # -- ENSURE: Layer is", "cleanup errors. .. code-block:: python # -- FILE: features/environment.py def", "using them yourself as *behave* may overwrite the value you", "= self._record[attr] params = { \"attr\": attr, \"filename\": record[0], \"line\":", "self._stack[0] cleanup_funcs = current_layer.get(\"@cleanups\", []) on_cleanup_error = getattr(self, \"on_cleanup_error\", self.print_cleanup_error)", "set in %(function)s (%(filename)s:%(line)s)\" elif self._mode is ContextMode.USER: if self._origin[attr]", "= new_base_dir self.config.base_dir = base_dir for dirpath, dirnames, filenames in", "break new_base_dir = os.path.dirname(new_base_dir) if new_base_dir == root_dir: if self.config.verbose:", "to it of whatever value you need. During the running", ".. attribute:: aborted This is set to true when the", "behave.exception import ConfigError from behave.formatter._registry import make_formatters from behave.runner_util import", "value. \"\"\" pass class ContextMode(Enum): \"\"\"Used to distinguish between the", "independent way. POSIX-PATH EXAMPLE: rootdir = path_getrootdir(\"/foo/bar/one.feature\") assert rootdir ==", "import CaptureController from behave.exception import ConfigError from behave.formatter._registry import make_formatters", "print(\"%sLevel %d\" % (prefix, level)) if pretty: for name in", "not passed: # -- ISSUE #96: Provide more substep info", "as the `configuration file section names`_. .. attribute:: active_outline This", "sorted(frame.keys()): value = frame[name] print(\"%s %-15s = %r\" % (prefix,", "rootdir == \"/\" WINDOWS-PATH EXAMPLE: rootdir = path_getrootdir(\"D:\\\\foo\\\\bar\\\\one.feature\") assert rootdir", "has no attribute '{1}' at the current level\" msg =", "use_with_user_mode(self): \"\"\"Provides a context manager for using the context in", "-- WINDOWS: return drive + os.path.sep # -- POSIX: return", "context checks whether there is a \"feature\" value in the", "= True if statement.error_message: # -- NOTE: One exception/failure is", "weird cases (hooks, ...) context = self.context self.hook_failures = 0", "% (steps_dir, base_dir)) message = 'No %s directory in %r'", "extra = \"(tag=%s)\" % args[0] error_text = ExceptionUtil.describe(e, use_traceback).rstrip() error_message", ".. attribute:: tags The current set of active tags (as", "self._record[attr] params = { \"attr\": attr, \"filename\": record[0], \"line\": record[1],", "text This is set at the step level and holds", "This is set for each scenario in a scenario outline", "if cleanup-errors occur. self._stack.pop(0) def _use_with_behave_mode(self): \"\"\"Provides a context manager", "cleanup_errors.append(sys.exc_info()) on_cleanup_error(context, cleanup_func, e) if self.fail_on_cleanup_errors and cleanup_errors: first_cleanup_erro_info =", "\"\"\"Push a new layer on the context stack. HINT: Use", "names`: behave.html#configuration-files \"\"\" # pylint: disable=too-many-instance-attributes FAIL_ON_CLEANUP_ERRORS = True def", "attribute:: aborted This is set to true in the root", "% (prefix, name, value)) else: print(prefix + repr(frame)) def __getattr__(self,", "self.config.verbose: print(\"Trying base directory:\", new_base_dir) if os.path.isdir(os.path.join(new_base_dir, steps_dir)): break if", "is already stored. # Append only error message. statement.error_message +=", "pylint: disable=broad-except # pylint: disable=protected-access context._root[\"cleanup_errors\"] += 1 cleanup_errors.append(sys.exc_info()) on_cleanup_error(context,", "base_dir) self.base_dir = base_dir self.path_manager.add(base_dir) if not self.config.paths: self.config.paths =", "other stuff from the steps dir # NOTE: Default matcher", "message. statement.error_message += u\"\\n\"+ error_message else: # -- FIRST EXCEPTION/FAILURE:", "run_feature = False except KeyboardInterrupt: self.aborted = True failed_count +=", "test run (:exc:`KeyboardInterrupt` exception). Initially: False. Stored as derived attribute", "exception/failure is already stored. # Append only error message. statement.error_message", "start testing a new feature and holds a :class:`~behave.model.Feature`. It", "if self.config.stop or self.aborted: # -- FAIL-EARLY: After first failure.", "fails (either through error or failure assertion) then the step", "in frame: return frame[attr] msg = \"'{0}' object has no", "\"\"\" Extract rootdir from path in a platform independent way.", "use_context_with_mode(context, ContextMode.BEHAVE): ... # Do something # -- POSTCONDITION: Original", "self.config.paths: print('ERROR: Could not find any \"<name>.feature\" files. '\\ 'Please", "in %s: %s: %s\" % (cleanup_func_name, exception.__class__.__name__, exception)) traceback.print_exc(file=sys.stdout) #", "with scoped_context_layer(context): the_fixture = use_fixture(foo, context, name=\"foo_42\") \"\"\" # pylint:", "by a feature at a scenario level but you can", "# -- ENSURE: context.execute_steps() works in weird cases (hooks, ...)", "at a scenario level but you can delete a value", "# -- POSIX: return os.path.sep class ModelRunner(object): \"\"\" Test runner", "self.run_hook(\"before_all\", context) run_feature = not self.aborted failed_count = 0 undefined_steps_initial_size", "captured. .. attribute:: stderr_capture If stderr capture is enabled then", "{} self._mode = ContextMode.BEHAVE # -- MODEL ENTITY REFERENCES/SUPPORT: self.feature", "def cleanup_database(database): pass def handle_cleanup_error(context, cleanup_func, exception): pass def before_all(context):", "to import other stuff from the steps dir # NOTE:", "from __future__ import absolute_import, print_function, with_statement import contextlib import os.path", "determined by configuration files and command-line options. The attributes of", "\"./features\"') base_dir = os.path.abspath(\"features\") # Get the root. This is", "BEST-EFFORT ALGORITHM: Tries to perform all cleanups. assert self._stack, \"REQUIRE:", "masking context attribute '%(attr)s' \" \\ \"originally set in %(function)s", "if msg: msg = msg % params warnings.warn(msg, ContextMaskWarning, stacklevel=3)", "disable=protected-access assert mode in (ContextMode.BEHAVE, ContextMode.USER) current_mode = context._mode try:", "filenames in os.walk(base_dir, followlinks=True): if [fn for fn in filenames", "%s\" % (step.keyword, step.name) message = \"%s SUB-STEP: %s\" %", "frames. six.reraise(*first_cleanup_erro_info) def _push(self, layer_name=None): \"\"\"Push a new layer on", "modes. .. sourcecode:: python context = Context() with use_context_with_mode(context, ContextMode.BEHAVE):", "containing instances of :class:`~behave.model.Tag` which are basically just glorified strings)", "reporters. # REQUIRED-FOR: Summary to keep track of untested features.", "from behave.exception import ConfigError from behave.formatter._registry import make_formatters from behave.runner_util", "otherwise. .. attribute:: log_capture If logging capture is enabled then", "value you set. These names are: .. attribute:: feature This", "stderr_capture If stderr capture is enabled then this attribute contains", "{ \"attr\": attr, \"filename\": record[0], \"line\": record[1], \"function\": record[3], }", "\"table\", None) original_text = getattr(self, \"text\", None) self.feature.parser.variant = \"steps\"", "catch the resulting exceptions. :param steps_text: Text with the Gherkin", "Allow steps to import other stuff from the steps dir", "disable=too-many-instance-attributes FAIL_ON_CLEANUP_ERRORS = True def __init__(self, runner): self._runner = weakref.proxy(runner)", "(self.hook_failures > 0) or (len(self.undefined_steps) > undefined_steps_initial_size) or cleanups_failed) #", "# pylint: disable=protected-access try: context._push(layer_name) yield context finally: context._pop() def", "any multiline text associated with the step. .. attribute:: config", "Append only error message. statement.error_message += u\"\\n\"+ error_message else: #", "= 2 class Context(object): \"\"\"Hold contextual information during the running", "in frame: record = self._record[attr] params = { \"attr\": attr,", "base_dir != os.getcwd(): self.path_manager.add(os.getcwd()) def before_all_default_hook(self, context): \"\"\" Default implementation", "use_traceback).rstrip() error_message = u\"HOOK-ERROR in %s%s: %s\" % (name, extra,", "layer_name: Layer name to use (or None). \"\"\" initial_data =", "not be present outside of the scope of a scenario.", "load_step_modules(step_paths) def feature_locations(self): return collect_feature_locations(self.config.paths) def run(self): with self.path_manager: self.setup_paths()", "or failure assertion) then the step invoking it will need", "u\"\\nTraceback (of context.execute_steps()):\" assert False, message # -- FINALLY: Restore", "a new scenario (including the individual scenarios of a scenario", "\"\"\" Standard test runner for behave: * setup paths *", "features. stream_openers = self.config.outputs self.formatters = make_formatters(self.config, stream_openers) return self.run_model()", "del cleanup_errors # -- ENSURE: Release other exception frames. six.reraise(*first_cleanup_erro_info)", "cycle). .. code-block:: with scoped_context_layer(context): the_fixture = use_fixture(foo, context, name=\"foo_42\")", "or cleanups_failed) # XXX-MAYBE: or context.failed) return failed def run(self):", "fn.endswith(\".feature\")]: break else: if self.config.verbose: if not self.config.paths: print('ERROR: Could", "instance. It is not present if stdout is not being", "layers of namespace added and removed automatically. There is a", "HOOK ERRORS: use_traceback = False if self.config.verbose: use_traceback = True", "self.hooks = {} self.formatters = [] self.undefined_steps = [] self.step_registry", "cleanup_func, exception): pass @staticmethod def print_cleanup_error(context, cleanup_func, exception): cleanup_func_name =", "\"%s\" directory. '\\ 'Please specify where to find your features.'", "= self.config.steps_dir environment_file = self.config.environment_file while True: if self.config.verbose: print(\"Trying", "way. POSIX-PATH EXAMPLE: rootdir = path_getrootdir(\"/foo/bar/one.feature\") assert rootdir == \"/\"", "path_getrootdir(\"D:\\\\foo\\\\bar\\\\one.feature\") assert rootdir == r\"D:\\\" \"\"\" drive, _ = os.path.splitdrive(path)", "of the scope of a scenario. .. attribute:: tags The", "# Due to traceback2 usage. stack_frame = traceback.extract_stack(limit=stack_limit)[0] self._record[attr] =", "self.config.verbose: print('Using default path \"./features\"') base_dir = os.path.abspath(\"features\") # Get", "user-specified handler for cleanup errors. .. code-block:: python # --", "stack_limit = 2 if six.PY2: stack_limit += 1 # Due", "delete a value set for a scenario in that scenario.", "USER = 2 class Context(object): \"\"\"Hold contextual information during the", "If stderr capture is enabled then this attribute contains the", "# -- CASE: feature, scenario, step statement = args[0] if", "self.formatters: formatter.close() for reporter in self.config.reporters: reporter.end() failed = ((failed_count", "reporter.end() failed = ((failed_count > 0) or self.aborted or (self.hook_failures", "not in self.hooks: self.hooks[\"before_all\"] = self.before_all_default_hook def load_step_definitions(self, extra_step_paths=None): if", "= weakref.proxy(runner) self._config = runner.config d = self._root = {", "(prefix, level)) if pretty: for name in sorted(frame.keys()): value =", "True if statement.error_message: # -- NOTE: One exception/failure is already", "os.path.abspath(base_dir) # supplied path might be to a feature file", "raise AttributeError(msg) def __contains__(self, attr): if attr[0] == \"_\": return", "pylint: disable=too-many-branches if not self.context: self.context = Context(self) if self.step_registry", "directory in your '\\ 'specified path \"%s\"' % (steps_dir, base_dir))", "overwritten in some situations. If the variable was originally set", "cleanup_errors[0] del cleanup_errors # -- ENSURE: Release other exception frames.", "level)) if pretty: for name in sorted(frame.keys()): value = frame[name]", "in self.config.reporters: reporter.feature(feature) # -- AFTER-ALL: # pylint: disable=protected-access, broad-except", "disable=protected-access try: context._push(layer_name) yield context finally: context._pop() def path_getrootdir(path): \"\"\"", "layer on the context stack. HINT: Use layer_name values: \"scenario\",", "set to true when the user aborts a test run", "teardown_capture(self): self.capture_controller.teardown_capture() def run_model(self, features=None): # pylint: disable=too-many-branches if not", "on_cleanup_error = getattr(self, \"on_cleanup_error\", self.print_cleanup_error) context = self cleanup_errors =", "by *behave*; be wary of using them yourself as *behave*", "= \"(tag=%s)\" % args[0] error_text = ExceptionUtil.describe(e, use_traceback).rstrip() error_message =", "manager for switching between the two context modes. .. sourcecode::", "platform independent way. POSIX-PATH EXAMPLE: rootdir = path_getrootdir(\"/foo/bar/one.feature\") assert rootdir", "# -- CASE: feature, scenario, step statement.hook_failed = True if", "\"\"\"Raised if a context variable is being overwritten in some", "in the \"steps\" text string will be parsed and executed", "config The configuration of *behave* as determined by configuration files", "the context: * BEHAVE: Indicates \"behave\" (internal) mode * USER:", "return True def add_cleanup(self, cleanup_func, *args, **kwargs): \"\"\"Adds a cleanup", "self.formatters = [] self.undefined_steps = [] self.step_registry = step_registry self.capture_controller", "raise ValueError(\"execute_steps() called outside of feature\") # -- PREPARE: Save", "object is a place to store information related to the", "stack_frame = traceback.extract_stack(limit=stack_limit)[0] self._record[attr] = stack_frame frame = self._stack[0] frame[attr]", "ABORT EXECUTION: For before_all/after_all self.aborted = True statement = None", "extra, error_text) print(error_message) self.hook_failures += 1 if \"tag\" in name:", "\"Steps must be unicode.\" if not self.feature: raise ValueError(\"execute_steps() called", "the value. \"\"\" pass class ContextMode(Enum): \"\"\"Used to distinguish between", "feature file. If the execute_steps call fails (either through error", "runner): self._runner = weakref.proxy(runner) self._config = runner.config d = self._root", "raise AttributeError(msg) def __setattr__(self, attr, value): if attr[0] == \"_\":", "= None def setup_paths(self): # pylint: disable=too-many-branches, too-many-statements if self.config.paths:", "1 run_feature = False # -- ALWAYS: Report run/not-run feature", "class to run behave feature files (or model elements). \"\"\"", "'%(attr)s' \" \\ \"originally set in %(function)s (%(filename)s:%(line)s)\" elif self._mode", "because Windows. root_dir = path_getrootdir(base_dir) new_base_dir = base_dir steps_dir =", "itself tries to overwrite a user-set variable. You may use", "all features. stream_openers = self.config.outputs self.formatters = make_formatters(self.config, stream_openers) return", "This is set at the step level and holds any", "feature\") # -- PREPARE: Save original context data for current", "-- ENSURE: context.execute_steps() works in weird cases (hooks, ...) context", "base_dir)) message = 'No %s directory in %r' % (steps_dir,", "cleanup_func, exception): pass def before_all(context): context.on_cleanup_error = handle_cleanup_error context.add_cleanup(cleanup_database, the_database)", "self.config.verbose: if not self.config.paths: print('ERROR: Could not find any \"<name>.feature\"", "# -- HANDLE HOOK ERRORS: use_traceback = False if self.config.verbose:", "\"\"\" initial_data = {\"@cleanups\": []} if layer_name: initial_data[\"@layer\"] = layer_name", "context._mode = mode yield finally: # -- RESTORE: Initial current_mode", "no attribute '{1}'\" msg = msg.format(self.__class__.__name__, attr) raise AttributeError(msg) def", "hooks * loads step definitions * select feature files, parses", "*args, **kwargs): \"\"\"Adds a cleanup function that is called when", "feature_locations(self): return collect_feature_locations(self.config.paths) def run(self): with self.path_manager: self.setup_paths() return self.run_with_paths()", "def _use_with_behave_mode(self): \"\"\"Provides a context manager for using the context", "outside of a feature (i.e. within the scope of the", "behave.html#configuration-files \"\"\" # pylint: disable=too-many-instance-attributes FAIL_ON_CLEANUP_ERRORS = True def __init__(self,", "A :class:`behave.runner.ContextMaskWarning` warning will be raised if user code attempts", "file section names`: behave.html#configuration-files \"\"\" # pylint: disable=too-many-instance-attributes FAIL_ON_CLEANUP_ERRORS =", "% params warnings.warn(msg, ContextMaskWarning, stacklevel=3) def _dump(self, pretty=False, prefix=\" \"):", "must be unicode.\" if not self.feature: raise ValueError(\"execute_steps() called outside", "if cleanup_func not in current_frame[\"@cleanups\"]: # -- AVOID DUPLICATES: current_frame[\"@cleanups\"].append(internal_cleanup_func)", "if not self.context: self.context = Context(self) if self.step_registry is None:", "formatter in self.formatters: formatter.close() for reporter in self.config.reporters: reporter.end() failed", "+= u\"Traceback (of failed substep):\\n\" message += u\"\".join(traceback.format_tb(step.exc_traceback)) # message", "in self.hooks): try: with context.use_with_user_mode(): self.hooks[name](context, *args) # except KeyboardInterrupt:", "self.formatters: formatter.uri(feature.filename) failed = feature.run(self) if failed: failed_count += 1", "DUPLICATES: current_frame[\"@cleanups\"].append(internal_cleanup_func) @contextlib.contextmanager def use_context_with_mode(context, mode): \"\"\"Switch context to ContextMode.BEHAVE", ":param kwargs: Kwargs for cleanup_func() call (optional). \"\"\" # MAYBE:", "for switching between the two context modes. .. sourcecode:: python", "base_dir steps_dir = self.config.steps_dir environment_file = self.config.environment_file while True: if", "a context manager for switching between the two context modes.", "if attr not in self._origin: self._origin[attr] = self._mode def _emit_warning(self,", "self.config.steps_dir environment_file = self.config.environment_file while True: if self.config.verbose: print(\"Trying base", "import registry as the_step_registry from enum import Enum if six.PY2:", "level but you can delete a value set for a", "# -- FILE: features/environment.py def cleanup_database(database): pass def handle_cleanup_error(context, cleanup_func,", "# -- FAIL-EARLY: After first failure. run_feature = False except", "tests. This object is a place to store information related", "manager for using the context in BEHAVE mode.\"\"\" return use_context_with_mode(self,", "core functionality of a test runner and the functional API", "a Python set containing instances of :class:`~behave.model.Tag` which are basically", "we start testing a new feature and holds a :class:`~behave.model.Feature`.", "__delattr__(self, attr): frame = self._stack[0] if attr in frame: del", "True statement = None else: # -- CASE: feature, scenario,", "set on the context, for example: \"feature\" in context checks", "if drive: # -- WINDOWS: return drive + os.path.sep #", "we start testing a new scenario (including the individual scenarios", "original_text return True def add_cleanup(self, cleanup_func, *args, **kwargs): \"\"\"Adds a", "glorified strings) combined from the feature and scenario. This attribute", "= 2 if six.PY2: stack_limit += 1 # Due to", "def setup_paths(self): # pylint: disable=too-many-branches, too-many-statements if self.config.paths: if self.config.verbose:", "raised if user code overwrites the value. \"\"\" pass class", "-- ISSUE #96: Provide more substep info to diagnose problem.", "is not present if stdout is not being captured. ..", "base_dir.startswith(\"@\"): # -- USE: behave @features.txt base_dir = base_dir[1:] file_locations", "_emit_warning(self, attr, params): msg = \"\" if self._mode is ContextMode.BEHAVE", "True if self.aborted: print(\"\\nABORTED: By user.\") for formatter in self.formatters:", "layer_name: initial_data[\"@layer\"] = layer_name self._stack.insert(0, initial_data) def _pop(self): \"\"\"Pop the", "frame: return True return False def execute_steps(self, steps_text): \"\"\"The steps", "finally: context._pop() def path_getrootdir(path): \"\"\" Extract rootdir from path in", "set of active tags (as a Python set containing instances", "the object will have additional layers of namespace added and", ":param context: Context object to use. :param mode: Mode to", "raise AttributeError(attr) for frame in self._stack: if attr in frame:", "during the running of tests. This object is a place", "features or [] self.hooks = {} self.formatters = [] self.undefined_steps", "\"\"\" BEHAVE = 1 USER = 2 class Context(object): \"\"\"Hold", "property(_get_aborted, _set_aborted, doc=\"Indicates that test run is aborted by the", "params warnings.warn(msg, ContextMaskWarning, stacklevel=3) def _dump(self, pretty=False, prefix=\" \"): for", "mode: Mode to apply to context object. \"\"\" # pylint:", "but you can delete a value set for a scenario", "'specified path \"%s\"' % (steps_dir, base_dir)) message = 'No %s", "run_with_paths(self): self.context = Context(self) self.load_hooks() self.load_step_definitions() # -- ENSURE: context.execute_steps()", "step. .. attribute:: text This is set at the step", "self.feature = feature for formatter in self.formatters: formatter.uri(feature.filename) failed =", "= getattr(self, \"table\", None) original_text = getattr(self, \"text\", None) self.feature.parser.variant", "be useful otherwise. .. attribute:: log_capture If logging capture is", "# pylint: disable=too-many-branches if not self.context: self.context = Context(self) if", "= [] self.step_registry = step_registry self.capture_controller = CaptureController(config) self.context =", "SCENARIO or FEATURE statement = getattr(context, \"scenario\", context.feature) elif \"all\"", "hooks_path = os.path.join(self.base_dir, filename) if os.path.exists(hooks_path): exec_file(hooks_path, self.hooks) if \"before_all\"", "set for a scenario in that scenario. .. _`configuration file", "popped. A user can add a user-specified handler for cleanup", "as e: # pylint: disable=broad-except # -- HANDLE HOOK ERRORS:", "last context layer. except Exception: cleanups_failed = True if self.aborted:", "in self._origin: self._origin[attr] = self._mode def __delattr__(self, attr): frame =", "not self.aborted failed_count = 0 undefined_steps_initial_size = len(self.undefined_steps) for feature", "original context data for current step. # Needed if step", "disable=protected-access assert self.context, \"REQUIRE: context, but context=%r\" % self.context self.context._set_root_attribute(\"aborted\",", "it of whatever value you need. During the running of", "It is not present if stdout is not being captured.", "assert self.context, \"REQUIRE: context, but context=%r\" % self.context self.context._set_root_attribute(\"aborted\", bool(value))", "path \"%s\"' % base_dir) raise ConfigError('No feature files in %r'", "self.text = None self.table = None # -- RUNTIME SUPPORT:", "= { \"attr\": attr, \"filename\": record[0], \"line\": record[1], \"function\": record[3],", "frame in self._stack: if attr in frame: return frame[attr] msg", "*behave*; be wary of using them yourself as *behave* may", "= \"%r\" % cleanup_func print(u\"CLEANUP-ERROR in %s: %s: %s\" %", "# -- RUNTIME SUPPORT: self.stdout_capture = None self.stderr_capture = None", "passed: # -- ISSUE #96: Provide more substep info to", "(including the individual scenarios of a scenario outline) and holds", "'\\ 'Please specify where to find your features.') else: print('ERROR:", "find any \"<name>.feature\" files '\\ 'in your specified path \"%s\"'", "# -- MARK: testrun as FAILED # context._set_root_attribute(\"failed\", True) def", "steps_text: Text with the Gherkin steps to execute (as string).", "\"'%(attr)s'; see the tutorial for what this means\" if msg:", "two usage modes while using the context: * BEHAVE: Indicates", "string). :returns: True, if the steps executed successfully. :raises: AssertionError,", "steps = self.feature.parser.parse_steps(steps_text) with self._use_with_behave_mode(): for step in steps: passed", "# DISABLED: self.rule = None # DISABLED: self.scenario = None", "instance of :class:`~behave.log_capture.LoggingCapture`. It is not present if logging is", "errors. .. code-block:: python # -- FILE: features/environment.py def cleanup_database(database):", "(prefix, name, value)) else: print(prefix + repr(frame)) def __getattr__(self, attr):", "traceback.extract_stack(limit=stack_limit)[0] self._record[attr] = stack_frame frame = self._stack[0] frame[attr] = value", "= self.context self.hook_failures = 0 self.setup_capture() self.run_hook(\"before_all\", context) run_feature =", "registered for this layer. \"\"\" try: self._do_cleanups() finally: # --", "import Enum if six.PY2: # -- USE PYTHON3 BACKPORT: With", "Kwargs for cleanup_func() call (optional). \"\"\" # MAYBE: assert callable(cleanup_func),", "if not self.config.paths: self.config.paths = [base_dir] if base_dir != os.getcwd():", "object will have additional layers of namespace added and removed", "value): for frame in self.__dict__[\"_stack\"]: if frame is self.__dict__[\"_root\"]: continue", "cleanup_func() call (optional). :param kwargs: Kwargs for cleanup_func() call (optional).", "associated with the step. .. attribute:: config The configuration of", "raised if *behave* overwrites the value. If the variable was", "Get the root. This is not guaranteed to be \"/\"", "step.error_message: message += \"\\nSubstep info: %s\\n\" % step.error_message message +=", "= self._root = { \"aborted\": False, \"failed\": False, \"config\": self._config,", "@contextlib.contextmanager def use_context_with_mode(context, mode): \"\"\"Switch context to ContextMode.BEHAVE or ContextMode.USER", "value return for frame in self._stack[1:]: if attr in frame:", "pylint: disable=protected-access context._root[\"cleanup_errors\"] += 1 cleanup_errors.append(sys.exc_info()) on_cleanup_error(context, cleanup_func, e) if", "self._stack: if attr in frame: return frame[attr] msg = \"'{0}'", "run (:exc:`KeyboardInterrupt` exception). Initially: False. Stored as derived attribute in", "object has no attribute '{1}'\" msg = msg.format(self.__class__.__name__, attr) raise", "print(\"Supplied path:\", \\ \", \".join('\"%s\"' % path for path in", "_ = os.path.splitdrive(path) if drive: # -- WINDOWS: return drive", "# @property def _get_aborted(self): value = False if self.context: value", "%-15s = %r\" % (prefix, name, value)) else: print(prefix +", ".. attribute:: text This is set at the step level", "loads environment hooks * loads step definitions * select feature", "= PathManager() self.base_dir = None def setup_paths(self): # pylint: disable=too-many-branches,", "pylint: disable=no-self-use context.config.setup_logging() def load_hooks(self, filename=None): filename = filename or", "self.rule = None # DISABLED: self.scenario = None self.text =", "aborted = property(_get_aborted, _set_aborted, doc=\"Indicates that test run is aborted", "%r' % (steps_dir, base_dir) raise ConfigError(message) base_dir = new_base_dir self.config.base_dir", "runner is masking context attribute '%(attr)s' \" \\ \"originally set", "in (\"before_all\", \"after_all\"): # raise except Exception as e: #", "AssertionError/Exception is raised. context._mode = current_mode @contextlib.contextmanager def scoped_context_layer(context, layer_name=None):", "layer_name self._stack.insert(0, initial_data) def _pop(self): \"\"\"Pop the current layer from", "False self.run_hook(\"after_all\", self.context) try: self.context._do_cleanups() # Without dropping the last", "== root_dir: break new_base_dir = os.path.dirname(new_base_dir) if new_base_dir == root_dir:", "If the variable was originally set by user code then", "% base_dir) self.base_dir = base_dir self.path_manager.add(base_dir) if not self.config.paths: self.config.paths", "if a step failure occurs. :raises: ValueError, if invoked without", "self.stdout_capture = None self.stderr_capture = None self.log_capture = None self.fail_on_cleanup_errors", "u\"\".join(traceback.format_tb(step.exc_traceback)) # message += u\"\\nTraceback (of context.execute_steps()):\" assert False, message", "the individual scenarios of a scenario outline) and holds a", "though they were defined in a feature file. If the", "= %r\" % (prefix, name, value)) else: print(prefix + repr(frame))", "% path for path in self.config.paths)) first_path = self.config.paths[0] if", "else: print('ERROR: Could not find \"%s\" directory in your '\\", "the scope of the environment before_all and after_all). .. attribute::", "ContextMode.BEHAVE) def use_with_user_mode(self): \"\"\"Provides a context manager for using the", "context._root[\"cleanup_errors\"] += 1 cleanup_errors.append(sys.exc_info()) on_cleanup_error(context, cleanup_func, e) if self.fail_on_cleanup_errors and", "directory in %r' % (steps_dir, base_dir) raise ConfigError(message) base_dir =", "on the configuration data. \"\"\" # pylint: disable=no-self-use context.config.setup_logging() def", "% \\ steps_dir) else: print('ERROR: Could not find \"%s\" directory", "= current_layer.get(\"@cleanups\", []) on_cleanup_error = getattr(self, \"on_cleanup_error\", self.print_cleanup_error) context =", "be raised if user code attempts to overwrite one of", "code attempts to overwrite one of these variables, or if", "def __getattr__(self, attr): if attr[0] == \"_\": try: return self.__dict__[attr]", "scope of the environment before_all and after_all). .. attribute:: scenario", "# Get the root. This is not guaranteed to be", "self.aborted: print(\"\\nABORTED: By user.\") for formatter in self.formatters: formatter.close() for", "getattr(self, \"text\", None) self.feature.parser.variant = \"steps\" steps = self.feature.parser.parse_steps(steps_text) with", "record[3], } self._emit_warning(attr, params) self.__dict__[\"_root\"][attr] = value if attr not", "stack frame is popped. A user can add a user-specified", "may add arbitrary attributes to it of whatever value you", "if self.config.verbose: print(\"Supplied path:\", \\ \", \".join('\"%s\"' % path for", "msg = msg.format(self.__class__.__name__, attr) raise AttributeError(msg) def __setattr__(self, attr, value):", "the current level\" msg = msg.format(self.__class__.__name__, attr) raise AttributeError(msg) def", "\" \\ \"'%(attr)s'; see the tutorial for what this means\"", "is not ContextMode.BEHAVE: msg = \"behave runner is masking context", "Runner(ModelRunner): \"\"\" Standard test runner for behave: * setup paths", "os.path.dirname(file_locations[0].filename) base_dir = os.path.abspath(base_dir) # supplied path might be to", "# -- WINDOWS: return drive + os.path.sep # -- POSIX:", "assert self._stack if args or kwargs: def internal_cleanup_func(): cleanup_func(*args, **kwargs)", "# pylint: disable=broad-except # -- HANDLE HOOK ERRORS: use_traceback =", "FILE: features/environment.py def cleanup_database(database): pass def handle_cleanup_error(context, cleanup_func, exception): pass", "setup_paths(self): # pylint: disable=too-many-branches, too-many-statements if self.config.paths: if self.config.verbose: print(\"Supplied", "> undefined_steps_initial_size) or cleanups_failed) # XXX-MAYBE: or context.failed) return failed", "raise ConfigError('No feature files in %r' % base_dir) self.base_dir =", "be unicode.\" if not self.feature: raise ValueError(\"execute_steps() called outside of", "statement.hook_failed = True if statement.error_message: # -- NOTE: One exception/failure", "may overwrite the value you set. These names are: ..", "being captured. A :class:`behave.runner.ContextMaskWarning` warning will be raised if user", "output as a StringIO instance. It is not present if", "new_base_dir self.config.base_dir = base_dir for dirpath, dirnames, filenames in os.walk(base_dir,", "to find your features.') else: print('ERROR: Could not find any", "# -- Allow steps to import other stuff from the", "Args for cleanup_func() call (optional). :param kwargs: Kwargs for cleanup_func()", "to test whether a certain value has been set on", "a user-specified handler for cleanup errors. .. code-block:: python #", "\"\"\" drive, _ = os.path.splitdrive(path) if drive: # -- WINDOWS:", "= self.before_all_default_hook def load_step_definitions(self, extra_step_paths=None): if extra_step_paths is None: extra_step_paths", "then this will be raised if user code overwrites the", "not self.config.paths: print('ERROR: Could not find any \"<name>.feature\" files. '\\", "args or kwargs: def internal_cleanup_func(): cleanup_func(*args, **kwargs) else: internal_cleanup_func =", "features.' % \\ steps_dir) else: print('ERROR: Could not find \"%s\"", "turn just as though they were defined in a feature", "False. .. attribute:: failed This is set to true in", "a scenario. .. attribute:: tags The current set of active", "= None self.text = None self.table = None # --", "of untested features. for reporter in self.config.reporters: reporter.feature(feature) # --", "* setup paths * loads environment hooks * loads step", "string will be parsed and executed in turn just as", "if name not in (\"before_all\", \"after_all\"): # raise except Exception", "The configuration of *behave* as determined by configuration files and", "called this method uses .table/.text original_table = getattr(self, \"table\", None)", "pass class ContextMaskWarning(UserWarning): \"\"\"Raised if a context variable is being", "context.execute_steps() works in weird cases (hooks, ...) context = self.context", "occurs. :raises: ValueError, if invoked without a feature context. \"\"\"", "find your features.' % \\ steps_dir) else: print('ERROR: Could not", "has been set on the context, for example: \"feature\" in", "contextlib import os.path import sys import warnings import weakref import", "exception). Initially: False. .. attribute:: failed This is set to", "if os.path.exists(hooks_path): exec_file(hooks_path, self.hooks) if \"before_all\" not in self.hooks: self.hooks[\"before_all\"]", "= CaptureController(config) self.context = None self.feature = None self.hook_failures =", "features = self.features # -- ENSURE: context.execute_steps() works in weird", "True, if the steps executed successfully. :raises: AssertionError, if a", "Indicates \"behave\" (internal) mode * USER: Indicates \"user\" mode (in", "feature, scenario, step statement = args[0] if statement: # --", "EXCEPTION/FAILURE: statement.store_exception_context(e) statement.error_message = error_message def setup_capture(self): if not self.context:", "if self.fail_on_cleanup_errors and cleanup_errors: first_cleanup_erro_info = cleanup_errors[0] del cleanup_errors #", "layer_name=None): \"\"\"Provides context manager for context layer (push/do-something/pop cycle). ..", "(as string). :returns: True, if the steps executed successfully. :raises:", "for features and scenarios. Certain names are used by *behave*;", "if not cleanup_func_name: cleanup_func_name = \"%r\" % cleanup_func print(u\"CLEANUP-ERROR in", "= Context(self) self.capture_controller.setup_capture(self.context) def start_capture(self): self.capture_controller.start_capture() def stop_capture(self): self.capture_controller.stop_capture() def", "FileLocation): first_path = first_path.filename base_dir = first_path if base_dir.startswith(\"@\"): #", "are the same as the `configuration file section names`_. ..", "# pylint: disable=too-many-branches, too-many-statements if self.config.paths: if self.config.verbose: print(\"Supplied path:\",", "\"steps\" steps = self.feature.parser.parse_steps(steps_text) with self._use_with_behave_mode(): for step in steps:", "None self.stderr_capture = None self.log_capture = None self.fail_on_cleanup_errors = self.FAIL_ON_CLEANUP_ERRORS", "is set to true when the user aborts a test", "pylint: disable=broad-except # -- HANDLE HOOK ERRORS: use_traceback = False", "= \"'{0}' object has no attribute '{1}' at the current", "STEP: Parse all feature files (by using their file location).", "*args) # except KeyboardInterrupt: # self.aborted = True # if", "u\"Traceback (of failed substep):\\n\" message += u\"\".join(traceback.format_tb(step.exc_traceback)) # message +=", "matcher can be overridden in \"environment.py\" hook. steps_dir = os.path.join(self.base_dir,", "\\ exec_file, load_step_modules, PathManager from behave.step_registry import registry as the_step_registry", "try: context._mode = mode yield finally: # -- RESTORE: Initial", "_set_root_attribute(self, attr, value): for frame in self.__dict__[\"_stack\"]: if frame is", "else: print(prefix + repr(frame)) def __getattr__(self, attr): if attr[0] ==", "scenario outline) and holds a :class:`~behave.model.Scenario`. It will not be", "scenario in that scenario. .. _`configuration file section names`: behave.html#configuration-files", "attribute:: text This is set at the step level and", "context layer. except Exception: cleanups_failed = True if self.aborted: print(\"\\nABORTED:", "the last context layer. except Exception: cleanups_failed = True if", "to the tests you're running. You may add arbitrary attributes", "to perform all cleanups. assert self._stack, \"REQUIRE: Non-empty stack\" current_layer", "= getattr(self, \"on_cleanup_error\", self.print_cleanup_error) context = self cleanup_errors = []", "disable=protected-access context._root[\"cleanup_errors\"] += 1 cleanup_errors.append(sys.exc_info()) on_cleanup_error(context, cleanup_func, e) if self.fail_on_cleanup_errors", "Provides a context manager for switching between the two context", "= os.path.splitdrive(path) if drive: # -- WINDOWS: return drive +", "= u\"HOOK-ERROR in %s%s: %s\" % (name, extra, error_text) print(error_message)", "then this will be raised if *behave* overwrites the value.", "scenarios of a scenario outline) and holds a :class:`~behave.model.Scenario`. It", ".. attribute:: stderr_capture If stderr capture is enabled then this", "cleanup functions when stack frame is popped. A user can", "is raised. context._mode = current_mode @contextlib.contextmanager def scoped_context_layer(context, layer_name=None): \"\"\"Provides", "a file so using its directory\") base_dir = os.path.dirname(base_dir) else:", "= [] for cleanup_func in reversed(cleanup_funcs): try: cleanup_func() except Exception", "elif \"all\" in name: # -- ABORT EXECUTION: For before_all/after_all", "\\ \", \".join('\"%s\"' % path for path in self.config.paths)) first_path", "scenario. This attribute will not be present outside of a", "path might be to a feature file if os.path.isfile(base_dir): if", "failure assertion) then the step invoking it will need to", "context: Context object to use. :param mode: Mode to apply", "\"tag\" in name: # -- SCENARIO or FEATURE statement =", "is called. This is intended for user-cleanups. :param cleanup_func: Callable", "\"feature\" in context checks whether there is a \"feature\" value", "elements). \"\"\" from __future__ import absolute_import, print_function, with_statement import contextlib", "MODEL ENTITY REFERENCES/SUPPORT: self.feature = None # DISABLED: self.rule =", "u\"\\n\"+ error_message else: # -- FIRST EXCEPTION/FAILURE: statement.store_exception_context(e) statement.error_message =", "def feature_locations(self): return collect_feature_locations(self.config.paths) def run(self): with self.path_manager: self.setup_paths() return", "current_frame[\"@cleanups\"].append(internal_cleanup_func) @contextlib.contextmanager def use_context_with_mode(context, mode): \"\"\"Switch context to ContextMode.BEHAVE or", "current level\" msg = msg.format(self.__class__.__name__, attr) raise AttributeError(msg) def __contains__(self,", "current_mode # Even if an AssertionError/Exception is raised. context._mode =", "exception): cleanup_func_name = getattr(cleanup_func, \"__name__\", None) if not cleanup_func_name: cleanup_func_name", "self.context, \"REQUIRE: context, but context=%r\" % self.context self.context._set_root_attribute(\"aborted\", bool(value)) aborted", "hook. steps_dir = os.path.join(self.base_dir, self.config.steps_dir) step_paths = [steps_dir] + list(extra_step_paths)", "self.run_with_paths() def run_with_paths(self): self.context = Context(self) self.load_hooks() self.load_step_definitions() # --", "PathManager from behave.step_registry import registry as the_step_registry from enum import", "make_formatters from behave.runner_util import \\ collect_feature_locations, parse_features, \\ exec_file, load_step_modules,", "\"testrun\". :param layer_name: Layer name to use (or None). \"\"\"", "import six from behave._types import ExceptionUtil from behave.capture import CaptureController", "steps_text): \"\"\"The steps identified in the \"steps\" text string will", "feature, scenario, step statement.hook_failed = True if statement.error_message: # --", "the root namespace when the user aborts a test run", "present outside of the scope of a scenario. .. attribute::", "XXX-MAYBE: or context.failed) return failed def run(self): \"\"\" Implements the", "called outside of feature\") # -- PREPARE: Save original context", "if new_base_dir == root_dir: if self.config.verbose: if not self.config.paths: print('ERROR:", "def before_all(context): context.on_cleanup_error = handle_cleanup_error context.add_cleanup(cleanup_database, the_database) \"\"\" # --", "the same as the `configuration file section names`_. .. attribute::", "the step level and holds any multiline text associated with", "False, \"failed\": False, \"config\": self._config, \"active_outline\": None, \"cleanup_errors\": 0, \"@cleanups\":", "None # DISABLED: self.scenario = None self.text = None self.table", "= self._stack[0] frame[attr] = value if attr not in self._origin:", "namespace when the user aborts a test run (:exc:`KeyboardInterrupt` exception).", "raised. context._mode = current_mode @contextlib.contextmanager def scoped_context_layer(context, layer_name=None): \"\"\"Provides context", "certain value has been set on the context, for example:", "root_dir = path_getrootdir(base_dir) new_base_dir = base_dir steps_dir = self.config.steps_dir environment_file", "\"\"\" try: self._do_cleanups() finally: # -- ENSURE: Layer is removed", "raise ConfigError(message) base_dir = new_base_dir self.config.base_dir = base_dir for dirpath,", "0 undefined_steps_initial_size = len(self.undefined_steps) for feature in features: if run_feature:", "cleanup_func(*args, **kwargs) else: internal_cleanup_func = cleanup_func current_frame = self._stack[0] if", "self._origin[attr] is not ContextMode.BEHAVE: msg = \"behave runner is masking", "feature (i.e. within the scope of the environment before_all and", "path for path in self.config.paths)) first_path = self.config.paths[0] if hasattr(first_path,", "} self._emit_warning(attr, params) stack_limit = 2 if six.PY2: stack_limit +=", "find your features.') else: print('ERROR: Could not find any \"<name>.feature\"", "context.use_with_user_mode(): self.hooks[name](context, *args) # except KeyboardInterrupt: # self.aborted = True", "your specified path \"%s\"' % base_dir) raise ConfigError('No feature files", "is set when we start testing a new scenario (including", "self._do_cleanups() finally: # -- ENSURE: Layer is removed even if", "in self._stack: if attr in frame: return True return False", "if invoked without a feature context. \"\"\" assert isinstance(steps_text, six.text_type),", "\"\"\"Adds a cleanup function that is called when :meth:`Context._pop()` is", "frame[attr] del self._record[attr] else: msg = \"'{0}' object has no", ".. sourcecode:: python context = Context() with use_context_with_mode(context, ContextMode.BEHAVE): ...", "self cleanup_errors = [] for cleanup_func in reversed(cleanup_funcs): try: cleanup_func()", "using its directory\") base_dir = os.path.dirname(base_dir) else: if self.config.verbose: print('Using", "pylint: disable=too-many-instance-attributes FAIL_ON_CLEANUP_ERRORS = True def __init__(self, runner): self._runner =", "six.PY2: stack_limit += 1 # Due to traceback2 usage. stack_frame", "self._record[attr] = stack_frame frame = self._stack[0] frame[attr] = value if", "the step. .. attribute:: config The configuration of *behave* as", "Initially: False. .. attribute:: failed This is set to true", "self.__dict__[\"_root\"]: continue if attr in frame: record = self.__dict__[\"_record\"][attr] params", "or ContextMode.USER mode. Provides a context manager for switching between", "the_fixture = use_fixture(foo, context, name=\"foo_42\") \"\"\" # pylint: disable=protected-access try:", "POSIX-PATH EXAMPLE: rootdir = path_getrootdir(\"/foo/bar/one.feature\") assert rootdir == \"/\" WINDOWS-PATH", "is called when :meth:`Context._pop()` is called. This is intended for", "if not self.config.paths: print('ERROR: Could not find any \"<name>.feature\" files.", "UTF-8 -*- \"\"\" This module provides Runner class to run", "parse_features, \\ exec_file, load_step_modules, PathManager from behave.step_registry import registry as", "This module provides Runner class to run behave feature files", "in current_frame[\"@cleanups\"]: # -- AVOID DUPLICATES: current_frame[\"@cleanups\"].append(internal_cleanup_func) @contextlib.contextmanager def use_context_with_mode(context,", "finally: # -- ENSURE: Layer is removed even if cleanup-errors", "= [steps_dir] + list(extra_step_paths) load_step_modules(step_paths) def feature_locations(self): return collect_feature_locations(self.config.paths) def", "rootdir == r\"D:\\\" \"\"\" drive, _ = os.path.splitdrive(path) if drive:", "Standard test runner for behave: * setup paths * loads", "step fails. Initially: False. .. attribute:: table This is set", "(or None). \"\"\" initial_data = {\"@cleanups\": []} if layer_name: initial_data[\"@layer\"]", "Report run/not-run feature to reporters. # REQUIRED-FOR: Summary to keep", "holds any :class:`~behave.model.Table` associated with the step. .. attribute:: text", "= traceback.extract_stack(limit=stack_limit)[0] self._record[attr] = stack_frame frame = self._stack[0] frame[attr] =", "base_dir[1:] file_locations = self.feature_locations() if file_locations: base_dir = os.path.dirname(file_locations[0].filename) base_dir", "contains the captured output as a StringIO instance. It is", "you need. During the running of your tests the object", "of :class:`~behave.model.Tag` which are basically just glorified strings) combined from", "Could not find \"%s\" directory. '\\ 'Please specify where to", "data for current step. self.table = original_table self.text = original_text", "[] self.hooks = {} self.formatters = [] self.undefined_steps = []", "of a feature scope. .. attribute:: aborted This is set", "record[0], \"line\": record[1], \"function\": record[3], } self._emit_warning(attr, params) stack_limit =", "run method by running the model. \"\"\" self.context = Context(self)", ".table/.text original_table = getattr(self, \"table\", None) original_text = getattr(self, \"text\",", "self.config.verbose: use_traceback = True ExceptionUtil.set_traceback(e) extra = u\"\" if \"tag\"", "in a platform independent way. POSIX-PATH EXAMPLE: rootdir = path_getrootdir(\"/foo/bar/one.feature\")", "set for each scenario in a scenario outline and references", "attribute:: scenario This is set when we start testing a", "= msg.format(self.__class__.__name__, attr) raise AttributeError(msg) def __setattr__(self, attr, value): if", "aborted by the user.\") def run_hook(self, name, context, *args): if", "self.context) # -- STEP: Parse all feature files (by using", "if statement.error_message: # -- NOTE: One exception/failure is already stored.", "need to catch the resulting exceptions. :param steps_text: Text with", "FAIL-EARLY: After first failure. run_feature = False except KeyboardInterrupt: self.aborted", "language=self.config.lang) self.features.extend(features) # -- STEP: Run all features. stream_openers =", "additional namespaces for features and scenarios. Certain names are used", "to find your features.' % \\ steps_dir) else: print('ERROR: Could", "-- RUNTIME SUPPORT: self.stdout_capture = None self.stderr_capture = None self.log_capture", "with the Gherkin steps to execute (as string). :returns: True,", "can be overridden in \"environment.py\" hook. steps_dir = os.path.join(self.base_dir, self.config.steps_dir)", "(by using their file location). feature_locations = [filename for filename", "Summary to keep track of untested features. for reporter in", "context = self.context self.hook_failures = 0 self.setup_capture() self.run_hook(\"before_all\", context) run_feature", "logging is not being captured. .. attribute:: stdout_capture If stdout", "# MAYBE: context._dump(pretty=True, prefix=\"Context: \") # -- MARK: testrun as", "+= 1 run_feature = False # -- ALWAYS: Report run/not-run", "self.capture_controller.teardown_capture() def run_model(self, features=None): # pylint: disable=too-many-branches if not self.context:", "the step level and holds any :class:`~behave.model.Table` associated with the", "= 0 # @property def _get_aborted(self): value = False if", "useful otherwise. .. attribute:: log_capture If logging capture is enabled", "first_cleanup_erro_info = cleanup_errors[0] del cleanup_errors # -- ENSURE: Release other", "original context data for current step. self.table = original_table self.text", "class Runner(ModelRunner): \"\"\" Standard test runner for behave: * setup", "AttributeError(attr) for frame in self._stack: if attr in frame: return", "\"%r\" % cleanup_func print(u\"CLEANUP-ERROR in %s: %s: %s\" % (cleanup_func_name,", "*args): if not self.config.dry_run and (name in self.hooks): try: with", "Text with the Gherkin steps to execute (as string). :returns:", "variable was originally set by user code then this will", "the_step_registry from enum import Enum if six.PY2: # -- USE", "ExceptionUtil from behave.capture import CaptureController from behave.exception import ConfigError from", "step level and holds any multiline text associated with the", "for each scenario in a scenario outline and references the", "-- MODEL ENTITY REFERENCES/SUPPORT: self.feature = None # DISABLED: self.rule", "\"user code is masking context attribute \" \\ \"'%(attr)s'; see", "except Exception: cleanups_failed = True if self.aborted: print(\"\\nABORTED: By user.\")", "that is called when :meth:`Context._pop()` is called. This is intended", "but only at the level they are set. You can't", "ContextMode.USER mode. Provides a context manager for switching between the", "# DISABLED: self.scenario = None self.text = None self.table =", "being overwritten in some situations. If the variable was originally", "file if os.path.isfile(base_dir): if self.config.verbose: print(\"Primary path is to a", "behave.step_registry import registry as the_step_registry from enum import Enum if", ":func:`before_all()` hook. Setup the logging subsystem based on the configuration", "run/not-run feature to reporters. # REQUIRED-FOR: Summary to keep track", "directory. '\\ 'Please specify where to find your features.' %", "this attribute contains the captured logging as an instance of", "= None # -- RUNTIME SUPPORT: self.stdout_capture = None self.stderr_capture", "using the context in USER mode.\"\"\" return use_context_with_mode(self, ContextMode.USER) def", "self._origin: self._origin[attr] = self._mode def __delattr__(self, attr): frame = self._stack[0]", "import sys import warnings import weakref import six from behave._types", "self.aborted or (self.hook_failures > 0) or (len(self.undefined_steps) > undefined_steps_initial_size) or", "is self.__dict__[\"_root\"]: continue if attr in frame: record = self.__dict__[\"_record\"][attr]", "# NOTE: Default matcher can be overridden in \"environment.py\" hook.", "= { \"aborted\": False, \"failed\": False, \"config\": self._config, \"active_outline\": None,", "behave model (features). Provides the core functionality of a test", "PathManager() self.base_dir = None def setup_paths(self): # pylint: disable=too-many-branches, too-many-statements", "= msg.format(self.__class__.__name__, attr) raise AttributeError(msg) def __contains__(self, attr): if attr[0]", "but context=%r\" % self.context self.context._set_root_attribute(\"aborted\", bool(value)) aborted = property(_get_aborted, _set_aborted,", "# pylint: disable=protected-access assert mode in (ContextMode.BEHAVE, ContextMode.USER) current_mode =", "This is not guaranteed to be \"/\" because Windows. root_dir", "= config self.features = features or [] self.hooks = {}", "= self.FAIL_ON_CLEANUP_ERRORS @staticmethod def ignore_cleanup_error(context, cleanup_func, exception): pass @staticmethod def", "is present mostly for debugging, but may be useful otherwise.", "getattr(self, \"table\", None) original_text = getattr(self, \"text\", None) self.feature.parser.variant =", "logging as an instance of :class:`~behave.log_capture.LoggingCapture`. It is not present", "new scenario (including the individual scenarios of a scenario outline)", "self.hook_failures = 0 # @property def _get_aborted(self): value = False", "a certain value has been set on the context, for", "something # -- POSTCONDITION: Original context._mode is restored. :param context:", "run behave feature files (or model elements). \"\"\" from __future__", "ContextMaskWarning(UserWarning): \"\"\"Raised if a context variable is being overwritten in", "values: \"scenario\", \"feature\", \"testrun\". :param layer_name: Layer name to use", "step statement = args[0] if statement: # -- CASE: feature,", "location). feature_locations = [filename for filename in self.feature_locations() if not", "used by *behave*; be wary of using them yourself as", "identified in the \"steps\" text string will be parsed and", "failed This is set to true in the root namespace", "frame[attr] = value if attr not in self._origin: self._origin[attr] =", "use_context_with_mode(context, mode): \"\"\"Switch context to ContextMode.BEHAVE or ContextMode.USER mode. Provides", "new_base_dir == root_dir: if self.config.verbose: if not self.config.paths: print('ERROR: Could", "your '\\ 'specified path \"%s\"' % (steps_dir, base_dir)) message =", "path \"%s\"' % (steps_dir, base_dir)) message = 'No %s directory", "# -- SCENARIO or FEATURE statement = getattr(context, \"scenario\", context.feature)", "= None self.feature = None self.hook_failures = 0 # @property", "then this attribute contains the captured logging as an instance", "six from behave._types import ExceptionUtil from behave.capture import CaptureController from", "in a feature file. If the execute_steps call fails (either", "'\\ 'specified path \"%s\"' % (steps_dir, base_dir)) message = 'No", "self._stack if args or kwargs: def internal_cleanup_func(): cleanup_func(*args, **kwargs) else:", "(either through error or failure assertion) then the step invoking", "> 0) or self.aborted or (self.hook_failures > 0) or (len(self.undefined_steps)", ".. attribute:: feature This is set when we start testing", "holds a :class:`~behave.model.Feature`. It will not be present outside of", "if hasattr(first_path, \"filename\"): # -- BETTER: isinstance(first_path, FileLocation): first_path =", "= u\"%s %s\" % (step.keyword, step.name) message = \"%s SUB-STEP:", "definition that called this method uses .table/.text original_table = getattr(self,", "'No %s directory in %r' % (steps_dir, base_dir) raise ConfigError(message)", ":returns: True, if the steps executed successfully. :raises: AssertionError, if", "occur. self._stack.pop(0) def _use_with_behave_mode(self): \"\"\"Provides a context manager for using", ":raises: ValueError, if invoked without a feature context. \"\"\" assert", "\"line\": record[1], \"function\": record[3], } self._emit_warning(attr, params) self.__dict__[\"_root\"][attr] = value", "ConfigError('No feature files in %r' % base_dir) self.base_dir = base_dir", "not self.config.paths: self.config.paths = [base_dir] if base_dir != os.getcwd(): self.path_manager.add(os.getcwd())", "# pylint: disable=protected-access, broad-except cleanups_failed = False self.run_hook(\"after_all\", self.context) try:", "keep track of untested features. for reporter in self.config.reporters: reporter.feature(feature)", "some situations. If the variable was originally set by user", "os.path.isfile(os.path.join(new_base_dir, environment_file)): break if new_base_dir == root_dir: break new_base_dir =", "= None self.fail_on_cleanup_errors = self.FAIL_ON_CLEANUP_ERRORS @staticmethod def ignore_cleanup_error(context, cleanup_func, exception):", "or self.config.environment_file hooks_path = os.path.join(self.base_dir, filename) if os.path.exists(hooks_path): exec_file(hooks_path, self.hooks)", "if self.config.verbose: print('Using default path \"./features\"') base_dir = os.path.abspath(\"features\") #", "if os.path.isdir(os.path.join(new_base_dir, steps_dir)): break if os.path.isfile(os.path.join(new_base_dir, environment_file)): break if new_base_dir", "you set. These names are: .. attribute:: feature This is", "sys import warnings import weakref import six from behave._types import", "place to store information related to the tests you're running.", "False if self.context: value = self.context.aborted return value # @aborted.setter", "(push/do-something/pop cycle). .. code-block:: with scoped_context_layer(context): the_fixture = use_fixture(foo, context,", ".. attribute:: log_capture If logging capture is enabled then this", "2 if six.PY2: stack_limit += 1 # Due to traceback2", "REQUIRED-BY: before_all() hook \"@layer\": \"testrun\", } self._stack = [d] self._record", "that test run is aborted by the user.\") def run_hook(self,", "if base_dir.startswith(\"@\"): # -- USE: behave @features.txt base_dir = base_dir[1:]", "import traceback2 as traceback else: import traceback class CleanupError(RuntimeError): pass", "of a test runner and the functional API needed by", ":class:`behave.runner.ContextMaskWarning` warning will be raised if user code attempts to", "in self._stack[1:]: if attr in frame: record = self._record[attr] params", "\"\\nSubstep info: %s\\n\" % step.error_message message += u\"Traceback (of failed", "scoped_context_layer(context): the_fixture = use_fixture(foo, context, name=\"foo_42\") \"\"\" # pylint: disable=protected-access", "os.path.dirname(base_dir) else: if self.config.verbose: print('Using default path \"./features\"') base_dir =", "and additional namespaces for features and scenarios. Certain names are", "\"scenario\", \"feature\", \"testrun\". :param layer_name: Layer name to use (or", "root namespace when the user aborts a test run (:exc:`KeyboardInterrupt`", "Initial current_mode # Even if an AssertionError/Exception is raised. context._mode", "True failed_count += 1 run_feature = False # -- ALWAYS:", "\"\"\"Switch context to ContextMode.BEHAVE or ContextMode.USER mode. Provides a context", "cases (hooks, ...) # self.setup_capture() # self.run_hook(\"before_all\", self.context) # --", "import \\ collect_feature_locations, parse_features, \\ exec_file, load_step_modules, PathManager from behave.step_registry", "_get_aborted(self): value = False if self.context: value = self.context.aborted return", "in \"environment.py\" hook. steps_dir = os.path.join(self.base_dir, self.config.steps_dir) step_paths = [steps_dir]", "cleanup-errors occur. self._stack.pop(0) def _use_with_behave_mode(self): \"\"\"Provides a context manager for", "self.print_cleanup_error) context = self cleanup_errors = [] for cleanup_func in", "or kwargs: def internal_cleanup_func(): cleanup_func(*args, **kwargs) else: internal_cleanup_func = cleanup_func", "self.hooks[name](context, *args) # except KeyboardInterrupt: # self.aborted = True #", "= os.path.dirname(file_locations[0].filename) base_dir = os.path.abspath(base_dir) # supplied path might be", "((failed_count > 0) or self.aborted or (self.hook_failures > 0) or", "file_locations = self.feature_locations() if file_locations: base_dir = os.path.dirname(file_locations[0].filename) base_dir =", "from behave.runner_util import \\ collect_feature_locations, parse_features, \\ exec_file, load_step_modules, PathManager", "self.feature_locations() if not self.config.exclude(filename)] features = parse_features(feature_locations, language=self.config.lang) self.features.extend(features) #", "ContextMode.BEHAVE): ... # Do something # -- POSTCONDITION: Original context._mode", "scenario. .. _`configuration file section names`: behave.html#configuration-files \"\"\" # pylint:", "present outside of a feature scope. .. attribute:: aborted This", "new layer on the context stack. HINT: Use layer_name values:", "e: # pylint: disable=broad-except # pylint: disable=protected-access context._root[\"cleanup_errors\"] += 1", "true in the root namespace when the user aborts a", "(of failed substep):\\n\" message += u\"\".join(traceback.format_tb(step.exc_traceback)) # message += u\"\\nTraceback", "scenario in a scenario outline and references the :class:`~behave.model.Row` that", "cleanup function that is called when :meth:`Context._pop()` is called. This", "with_statement import contextlib import os.path import sys import warnings import", "name, value)) else: print(prefix + repr(frame)) def __getattr__(self, attr): if", "running. You may add arbitrary attributes to it of whatever", "will need to catch the resulting exceptions. :param steps_text: Text", "rootdir from path in a platform independent way. POSIX-PATH EXAMPLE:", "in weird cases (hooks, ...) context = self.context self.hook_failures =", "= True if self.aborted: print(\"\\nABORTED: By user.\") for formatter in", "attr) raise AttributeError(msg) def __setattr__(self, attr, value): if attr[0] ==", "\"\"\" # pylint: disable=protected-access assert mode in (ContextMode.BEHAVE, ContextMode.USER) current_mode", "your tests the object will have additional layers of namespace", "pylint: disable=protected-access assert mode in (ContextMode.BEHAVE, ContextMode.USER) current_mode = context._mode", "\"after_all\"): # raise except Exception as e: # pylint: disable=broad-except", "* loads environment hooks * loads step definitions * select", "\"\" if self._mode is ContextMode.BEHAVE and self._origin[attr] is not ContextMode.BEHAVE:", "= self._mode def __delattr__(self, attr): frame = self._stack[0] if attr", "None). \"\"\" initial_data = {\"@cleanups\": []} if layer_name: initial_data[\"@layer\"] =", "context._push(layer_name) yield context finally: context._pop() def path_getrootdir(path): \"\"\" Extract rootdir", "the two usage modes while using the context: * BEHAVE:", "(i.e. within the scope of the environment before_all and after_all).", "record[1], \"function\": record[3], } self._emit_warning(attr, params) self.__dict__[\"_root\"][attr] = value if", "filename=None): filename = filename or self.config.environment_file hooks_path = os.path.join(self.base_dir, filename)", "use_context_with_mode(self, ContextMode.USER) def user_mode(self): warnings.warn(\"Use 'use_with_user_mode()' instead\", PendingDeprecationWarning, stacklevel=2) return", "statement.store_exception_context(e) statement.error_message = error_message def setup_capture(self): if not self.context: self.context", "\"behave\" (internal) mode * USER: Indicates \"user\" mode (in steps,", "pretty=False, prefix=\" \"): for level, frame in enumerate(self._stack): print(\"%sLevel %d\"", "file location). feature_locations = [filename for filename in self.feature_locations() if", "to traceback2 usage. stack_frame = traceback.extract_stack(limit=stack_limit)[0] self._record[attr] = stack_frame frame", "isinstance(steps_text, six.text_type), \"Steps must be unicode.\" if not self.feature: raise", "cases (hooks, ...) context = self.context self.hook_failures = 0 self.setup_capture()", "variable was originally set by *behave* then this will be", "Tries to perform all cleanups. assert self._stack, \"REQUIRE: Non-empty stack\"", "assert False, message # -- FINALLY: Restore original context data", "the feature and scenario. This attribute will not be present", "traceback2 usage. stack_frame = traceback.extract_stack(limit=stack_limit)[0] self._record[attr] = stack_frame frame =", "= base_dir steps_dir = self.config.steps_dir environment_file = self.config.environment_file while True:", "for name in sorted(frame.keys()): value = frame[name] print(\"%s %-15s =", "0, \"@cleanups\": [], # -- REQUIRED-BY: before_all() hook \"@layer\": \"testrun\",", "self.FAIL_ON_CLEANUP_ERRORS @staticmethod def ignore_cleanup_error(context, cleanup_func, exception): pass @staticmethod def print_cleanup_error(context,", "if \"before_all\" not in self.hooks: self.hooks[\"before_all\"] = self.before_all_default_hook def load_step_definitions(self,", "There is a \"root\" namespace and additional namespaces for features", "ENSURE: context.execute_steps() works in weird cases (hooks, ...) # self.setup_capture()", "is masking context attribute '%(attr)s' \" \\ \"originally set by", "or (self.hook_failures > 0) or (len(self.undefined_steps) > undefined_steps_initial_size) or cleanups_failed)", "is None: extra_step_paths = [] # -- Allow steps to", "(optional). :param kwargs: Kwargs for cleanup_func() call (optional). \"\"\" #", "features: if run_feature: try: self.feature = feature for formatter in", "Save original context data for current step. # Needed if", "print(prefix + repr(frame)) def __getattr__(self, attr): if attr[0] == \"_\":", "\" \\ \"originally set by behave\" elif self._config.verbose: msg =", "level and holds any multiline text associated with the step.", "a feature file. If the execute_steps call fails (either through", "specified path \"%s\"' % base_dir) raise ConfigError('No feature files in", "self.context = Context(self) self.capture_controller.setup_capture(self.context) def start_capture(self): self.capture_controller.start_capture() def stop_capture(self): self.capture_controller.stop_capture()", "% step.error_message message += u\"Traceback (of failed substep):\\n\" message +=", "None else: # -- CASE: feature, scenario, step statement =", "substep):\\n\" message += u\"\".join(traceback.format_tb(step.exc_traceback)) # message += u\"\\nTraceback (of context.execute_steps()):\"", "of these variables, or if *behave* itself tries to overwrite", "each scenario in a scenario outline and references the :class:`~behave.model.Row`", "outline) and holds a :class:`~behave.model.Scenario`. It will not be present", "without a feature context. \"\"\" assert isinstance(steps_text, six.text_type), \"Steps must", "base_dir self.path_manager.add(base_dir) if not self.config.paths: self.config.paths = [base_dir] if base_dir", "context in BEHAVE mode.\"\"\" return use_context_with_mode(self, ContextMode.BEHAVE) def use_with_user_mode(self): \"\"\"Provides", "present if logging is not being captured. .. attribute:: stdout_capture", "= None # DISABLED: self.rule = None # DISABLED: self.scenario", "attributes to it of whatever value you need. During the", "# self.run_hook(\"before_all\", self.context) # -- STEP: Parse all feature files", "= first_path.filename base_dir = first_path if base_dir.startswith(\"@\"): # -- USE:", "SUPPORT: self.stdout_capture = None self.stderr_capture = None self.log_capture = None", "attr, \"filename\": record[0], \"line\": record[1], \"function\": record[3], } self._emit_warning(attr, params)", "as a step fails. Initially: False. .. attribute:: table This", "attribute:: failed This is set to true in the root", "perform all cleanups. assert self._stack, \"REQUIRE: Non-empty stack\" current_layer =", "value you need. During the running of your tests the", "failure occurs. :raises: ValueError, if invoked without a feature context.", "the tests you're running. You may add arbitrary attributes to", "\"\"\"Execute optional cleanup functions when stack frame is popped. A", "= len(self.undefined_steps) for feature in features: if run_feature: try: self.feature", "set by behave\" elif self._config.verbose: msg = \"user code is", "None self.hook_failures = 0 # @property def _get_aborted(self): value =", "a step fails. Initially: False. .. attribute:: table This is", "= context._mode try: context._mode = mode yield finally: # --", "import contextlib import os.path import sys import warnings import weakref", "step level and holds any :class:`~behave.model.Table` associated with the step.", "environment before_all and after_all). .. attribute:: scenario This is set", "\"line\": record[1], \"function\": record[3], } self._emit_warning(attr, params) stack_limit = 2", "to diagnose problem. step_line = u\"%s %s\" % (step.keyword, step.name)", "os.path.splitdrive(path) if drive: # -- WINDOWS: return drive + os.path.sep", "self.path_manager = PathManager() self.base_dir = None def setup_paths(self): # pylint:", "bool(value)) aborted = property(_get_aborted, _set_aborted, doc=\"Indicates that test run is", "self.config.base_dir = base_dir for dirpath, dirnames, filenames in os.walk(base_dir, followlinks=True):", "ContextMode.BEHAVE # -- MODEL ENTITY REFERENCES/SUPPORT: self.feature = None #", "the value you set. These names are: .. attribute:: feature", "if attr in frame: record = self._record[attr] params = {", "pylint: disable=protected-access assert self.context, \"REQUIRE: context, but context=%r\" % self.context", "aborts a test run (:exc:`KeyboardInterrupt` exception). Initially: False. .. attribute::", "\"\"\" # -- BEST-EFFORT ALGORITHM: Tries to perform all cleanups.", "Context(self) if self.step_registry is None: self.step_registry = the_step_registry if features", "have additional layers of namespace added and removed automatically. There", "= [d] self._record = {} self._origin = {} self._mode =", "features = parse_features(feature_locations, language=self.config.lang) self.features.extend(features) # -- STEP: Run all", "= self.config.paths[0] if hasattr(first_path, \"filename\"): # -- BETTER: isinstance(first_path, FileLocation):", "behave.runner_util import \\ collect_feature_locations, parse_features, \\ exec_file, load_step_modules, PathManager from", "msg = \"'{0}' object has no attribute '{1}'\" msg =", "It will not be present outside of the scope of", "the context stack. Performs any pending cleanups, registered for this", "add_cleanup(self, cleanup_func, *args, **kwargs): \"\"\"Adds a cleanup function that is", "%s\" % \\ (step.status.name.upper(), step_line) if step.error_message: message += \"\\nSubstep", "six.text_type), \"Steps must be unicode.\" if not self.feature: raise ValueError(\"execute_steps()", "failed_count += 1 run_feature = False # -- ALWAYS: Report", "base_dir = base_dir[1:] file_locations = self.feature_locations() if file_locations: base_dir =", "the current layer from the context stack. Performs any pending", "cleanup_func in reversed(cleanup_funcs): try: cleanup_func() except Exception as e: #", "By user.\") for formatter in self.formatters: formatter.close() for reporter in", "\"active_outline\": None, \"cleanup_errors\": 0, \"@cleanups\": [], # -- REQUIRED-BY: before_all()", "message += u\"Traceback (of failed substep):\\n\" message += u\"\".join(traceback.format_tb(step.exc_traceback)) #", "return self.__dict__[attr] except KeyError: raise AttributeError(attr) for frame in self._stack:", "self.config.stop or self.aborted: # -- FAIL-EARLY: After first failure. run_feature", "test whether a certain value has been set on the", "record[3], } self._emit_warning(attr, params) stack_limit = 2 if six.PY2: stack_limit", "NOTE: One exception/failure is already stored. # Append only error", "path_getrootdir(\"/foo/bar/one.feature\") assert rootdir == \"/\" WINDOWS-PATH EXAMPLE: rootdir = path_getrootdir(\"D:\\\\foo\\\\bar\\\\one.feature\")", "deleted from the context using \"del\" but only at the", "six.reraise(*first_cleanup_erro_info) def _push(self, layer_name=None): \"\"\"Push a new layer on the", "self.before_all_default_hook def load_step_definitions(self, extra_step_paths=None): if extra_step_paths is None: extra_step_paths =", "defined in a feature file. If the execute_steps call fails", "in a scenario outline and references the :class:`~behave.model.Row` that is", "except Exception as e: # pylint: disable=broad-except # -- HANDLE", "def start_capture(self): self.capture_controller.start_capture() def stop_capture(self): self.capture_controller.stop_capture() def teardown_capture(self): self.capture_controller.teardown_capture() def", "add a user-specified handler for cleanup errors. .. code-block:: python", "* loads step definitions * select feature files, parses them", "behave feature files (or model elements). \"\"\" from __future__ import", "\"all\" in name: # -- ABORT EXECUTION: For before_all/after_all self.aborted", "os.path.isfile(base_dir): if self.config.verbose: print(\"Primary path is to a file so", "for context layer (push/do-something/pop cycle). .. code-block:: with scoped_context_layer(context): the_fixture", "if args or kwargs: def internal_cleanup_func(): cleanup_func(*args, **kwargs) else: internal_cleanup_func", "elif self._mode is ContextMode.USER: if self._origin[attr] is not ContextMode.USER: msg", "of whatever value you need. During the running of your", "if failed: failed_count += 1 if self.config.stop or self.aborted: #", "(:exc:`KeyboardInterrupt` exception). Initially: False. .. attribute:: failed This is set", "\"steps\" text string will be parsed and executed in turn", "layer. except Exception: cleanups_failed = True if self.aborted: print(\"\\nABORTED: By", "add arbitrary attributes to it of whatever value you need.", "for step in steps: passed = step.run(self._runner, quiet=True, capture=False) if", "not self.context: self.context = Context(self) self.capture_controller.setup_capture(self.context) def start_capture(self): self.capture_controller.start_capture() def", "just glorified strings) combined from the feature and scenario. This", "as soon as a step fails. Initially: False. .. attribute::", "Parse all feature files (by using their file location). feature_locations", "attribute:: config The configuration of *behave* as determined by configuration", "present outside of a feature (i.e. within the scope of", "= \"steps\" steps = self.feature.parser.parse_steps(steps_text) with self._use_with_behave_mode(): for step in", "frame: del frame[attr] del self._record[attr] else: msg = \"'{0}' object", "aborted This is set to true in the root namespace", "%(function)s (%(filename)s:%(line)s)\" elif self._mode is ContextMode.USER: if self._origin[attr] is not", "break else: if self.config.verbose: if not self.config.paths: print('ERROR: Could not", "select feature files, parses them and creates model (elements) \"\"\"", "exception): pass @staticmethod def print_cleanup_error(context, cleanup_func, exception): cleanup_func_name = getattr(cleanup_func,", "= self.__dict__[\"_record\"][attr] params = { \"attr\": attr, \"filename\": record[0], \"line\":", "self.fail_on_cleanup_errors and cleanup_errors: first_cleanup_erro_info = cleanup_errors[0] del cleanup_errors # --", "callable(cleanup_func), \"REQUIRES: callable(cleanup_func)\" assert self._stack if args or kwargs: def", "\"REQUIRE: context, but context=%r\" % self.context self.context._set_root_attribute(\"aborted\", bool(value)) aborted =", "= feature for formatter in self.formatters: formatter.uri(feature.filename) failed = feature.run(self)", "if attr[0] == \"_\": return attr in self.__dict__ for frame", "Exception as e: # pylint: disable=broad-except # pylint: disable=protected-access context._root[\"cleanup_errors\"]", "EXECUTION: For before_all/after_all self.aborted = True statement = None else:", "else: if self.config.verbose: print('Using default path \"./features\"') base_dir = os.path.abspath(\"features\")", "if self.config.paths: if self.config.verbose: print(\"Supplied path:\", \\ \", \".join('\"%s\"' %", "a StringIO instance. It is not present if stdout is", "cleanups_failed = True if self.aborted: print(\"\\nABORTED: By user.\") for formatter", "attribute contains the captured output as a StringIO instance. It", "can add a user-specified handler for cleanup errors. .. code-block::", "mode yield finally: # -- RESTORE: Initial current_mode # Even", "print('ERROR: Could not find any \"<name>.feature\" files. '\\ 'Please specify", "path is to a file so using its directory\") base_dir", "be \"/\" because Windows. root_dir = path_getrootdir(base_dir) new_base_dir = base_dir", "scenario outline and references the :class:`~behave.model.Row` that is active for", "None self.text = None self.table = None # -- RUNTIME", "[] for cleanup_func in reversed(cleanup_funcs): try: cleanup_func() except Exception as", "None) original_text = getattr(self, \"text\", None) self.feature.parser.variant = \"steps\" steps", "traceback class CleanupError(RuntimeError): pass class ContextMaskWarning(UserWarning): \"\"\"Raised if a context", "capture is enabled then this attribute contains the captured logging", "exception). Initially: False. Stored as derived attribute in :attr:`Context.aborted`. \"\"\"", "feature This is set when we start testing a new", "captured logging as an instance of :class:`~behave.log_capture.LoggingCapture`. It is not", "the execute_steps call fails (either through error or failure assertion)", "scenario level but you can delete a value set for", "os.path import sys import warnings import weakref import six from", "feature context. \"\"\" assert isinstance(steps_text, six.text_type), \"Steps must be unicode.\"", "in %s%s: %s\" % (name, extra, error_text) print(error_message) self.hook_failures +=", "overwrites the value. \"\"\" pass class ContextMode(Enum): \"\"\"Used to distinguish", "if stdout is not being captured. .. attribute:: stderr_capture If", "self._emit_warning(attr, params) stack_limit = 2 if six.PY2: stack_limit += 1", ":meth:`Context._pop()` is called. This is intended for user-cleanups. :param cleanup_func:", "as the_step_registry from enum import Enum if six.PY2: # --", "testing a new feature and holds a :class:`~behave.model.Feature`. It will", "quiet=True, capture=False) if not passed: # -- ISSUE #96: Provide", "ENSURE: context.execute_steps() works in weird cases (hooks, ...) context =", "= path_getrootdir(base_dir) new_base_dir = base_dir steps_dir = self.config.steps_dir environment_file =", "-- FIRST EXCEPTION/FAILURE: statement.store_exception_context(e) statement.error_message = error_message def setup_capture(self): if", "dropping the last context layer. except Exception: cleanups_failed = True", "__contains__(self, attr): if attr[0] == \"_\": return attr in self.__dict__", "self).__init__(config) self.path_manager = PathManager() self.base_dir = None def setup_paths(self): #", "six.PY2: # -- USE PYTHON3 BACKPORT: With unicode traceback support.", "= [filename for filename in self.feature_locations() if not self.config.exclude(filename)] features", "statement.error_message = error_message def setup_capture(self): if not self.context: self.context =", "for frame in self.__dict__[\"_stack\"]: if frame is self.__dict__[\"_root\"]: continue if", "This is intended for user-cleanups. :param cleanup_func: Callable function :param", "this will be raised if *behave* overwrites the value. If", "# pylint: disable=broad-except # pylint: disable=protected-access context._root[\"cleanup_errors\"] += 1 cleanup_errors.append(sys.exc_info())", "mode * USER: Indicates \"user\" mode (in steps, hooks, fixtures,", "in weird cases (hooks, ...) # self.setup_capture() # self.run_hook(\"before_all\", self.context)", "call fails (either through error or failure assertion) then the", "# Do something # -- POSTCONDITION: Original context._mode is restored.", "try: self._do_cleanups() finally: # -- ENSURE: Layer is removed even", "for debugging, but may be useful otherwise. .. attribute:: log_capture", "failed def run(self): \"\"\" Implements the run method by running", "attribute \" \\ \"'%(attr)s'; see the tutorial for what this", "text associated with the step. .. attribute:: config The configuration", "environment_file = self.config.environment_file while True: if self.config.verbose: print(\"Trying base directory:\",", "logging subsystem based on the configuration data. \"\"\" # pylint:", "True) def _do_cleanups(self): \"\"\"Execute optional cleanup functions when stack frame", "= args[0] if statement: # -- CASE: feature, scenario, step", "is intended for user-cleanups. :param cleanup_func: Callable function :param args:", "value has been set on the context, for example: \"feature\"", "no attribute '{1}' at the current level\" msg = msg.format(self.__class__.__name__,", "a platform independent way. POSIX-PATH EXAMPLE: rootdir = path_getrootdir(\"/foo/bar/one.feature\") assert", "delete a value set by a feature at a scenario", "(hooks, ...) context = self.context self.hook_failures = 0 self.setup_capture() self.run_hook(\"before_all\",", "to overwrite a user-set variable. You may use the \"in\"", "to catch the resulting exceptions. :param steps_text: Text with the", "self.load_step_definitions() # -- ENSURE: context.execute_steps() works in weird cases (hooks,", "os.path.join(self.base_dir, filename) if os.path.exists(hooks_path): exec_file(hooks_path, self.hooks) if \"before_all\" not in", "self.setup_capture() # self.run_hook(\"before_all\", self.context) # -- STEP: Parse all feature", "Exception as e: # pylint: disable=broad-except # -- HANDLE HOOK", "layer_name values: \"scenario\", \"feature\", \"testrun\". :param layer_name: Layer name to", "= layer_name self._stack.insert(0, initial_data) def _pop(self): \"\"\"Pop the current layer", "or self.aborted: # -- FAIL-EARLY: After first failure. run_feature =", "base_dir = new_base_dir self.config.base_dir = base_dir for dirpath, dirnames, filenames", "%r\" % (prefix, name, value)) else: print(prefix + repr(frame)) def", "new_base_dir == root_dir: break new_base_dir = os.path.dirname(new_base_dir) if new_base_dir ==", "is aborted by the user.\") def run_hook(self, name, context, *args):", "# -- AFTER-ALL: # pylint: disable=protected-access, broad-except cleanups_failed = False", "name=\"foo_42\") \"\"\" # pylint: disable=protected-access try: context._push(layer_name) yield context finally:", "when we start testing a new scenario (including the individual", "name: extra = \"(tag=%s)\" % args[0] error_text = ExceptionUtil.describe(e, use_traceback).rstrip()", "runner for behave: * setup paths * loads environment hooks", "registry as the_step_registry from enum import Enum if six.PY2: #", "= os.path.dirname(base_dir) else: if self.config.verbose: print('Using default path \"./features\"') base_dir", "to a feature file if os.path.isfile(base_dir): if self.config.verbose: print(\"Primary path", "instances of :class:`~behave.model.Tag` which are basically just glorified strings) combined", "import ConfigError from behave.formatter._registry import make_formatters from behave.runner_util import \\", "# Without dropping the last context layer. except Exception: cleanups_failed", "self._config = runner.config d = self._root = { \"aborted\": False,", "for formatter in self.formatters: formatter.uri(feature.filename) failed = feature.run(self) if failed:", "being captured. .. attribute:: stdout_capture If stdout capture is enabled", "load_step_definitions(self, extra_step_paths=None): if extra_step_paths is None: extra_step_paths = [] #", "%s\\n\" % step.error_message message += u\"Traceback (of failed substep):\\n\" message", "attr, value): for frame in self.__dict__[\"_stack\"]: if frame is self.__dict__[\"_root\"]:", "active tags (as a Python set containing instances of :class:`~behave.model.Tag`", "attr not in self._origin: self._origin[attr] = self._mode def _emit_warning(self, attr,", "new feature and holds a :class:`~behave.model.Feature`. It will not be", "in frame: del frame[attr] del self._record[attr] else: msg = \"'{0}'", "-- BETTER: isinstance(first_path, FileLocation): first_path = first_path.filename base_dir = first_path", "warnings.warn(msg, ContextMaskWarning, stacklevel=3) def _dump(self, pretty=False, prefix=\" \"): for level,", "attr) raise AttributeError(msg) def __contains__(self, attr): if attr[0] == \"_\":", "config, features=None, step_registry=None): self.config = config self.features = features or", "value)) else: print(prefix + repr(frame)) def __getattr__(self, attr): if attr[0]", "= error_message def setup_capture(self): if not self.context: self.context = Context(self)", "outline and references the :class:`~behave.model.Row` that is active for the", "current step. self.table = original_table self.text = original_text return True", "self.context = None self.feature = None self.hook_failures = 0 #", "ERRORS: use_traceback = False if self.config.verbose: use_traceback = True ExceptionUtil.set_traceback(e)", "msg = \"user code is masking context attribute '%(attr)s' \"", "\"scenario\", context.feature) elif \"all\" in name: # -- ABORT EXECUTION:", "as an instance of :class:`~behave.log_capture.LoggingCapture`. It is not present if", "frame[attr] msg = \"'{0}' object has no attribute '{1}'\" msg", "not being captured. A :class:`behave.runner.ContextMaskWarning` warning will be raised if", "False. Stored as derived attribute in :attr:`Context.aborted`. \"\"\" # pylint:", "failed = ((failed_count > 0) or self.aborted or (self.hook_failures >", "as *behave* may overwrite the value you set. These names", "= use_fixture(foo, context, name=\"foo_42\") \"\"\" # pylint: disable=protected-access try: context._push(layer_name)", "value = self.context.aborted return value # @aborted.setter def _set_aborted(self, value):", "len(self.undefined_steps) for feature in features: if run_feature: try: self.feature =", "# -- USE: behave @features.txt base_dir = base_dir[1:] file_locations =", "\"/\" WINDOWS-PATH EXAMPLE: rootdir = path_getrootdir(\"D:\\\\foo\\\\bar\\\\one.feature\") assert rootdir == r\"D:\\\"", "to context object. \"\"\" # pylint: disable=protected-access assert mode in", "\"\"\" self.context = Context(self) return self.run_model() class Runner(ModelRunner): \"\"\" Standard", "execute_steps(self, steps_text): \"\"\"The steps identified in the \"steps\" text string", "\" \\ \"originally set in %(function)s (%(filename)s:%(line)s)\" elif self._mode is", "initial_data = {\"@cleanups\": []} if layer_name: initial_data[\"@layer\"] = layer_name self._stack.insert(0,", "context variable is being overwritten in some situations. If the", "except KeyError: raise AttributeError(attr) for frame in self._stack: if attr", "ContextMode.USER: if self._origin[attr] is not ContextMode.USER: msg = \"user code", "Provides the core functionality of a test runner and the", "as a StringIO instance. It is not present if stdout", "outside of the scope of a scenario. .. attribute:: tags", "Do something # -- POSTCONDITION: Original context._mode is restored. :param", "file_locations: base_dir = os.path.dirname(file_locations[0].filename) base_dir = os.path.abspath(base_dir) # supplied path", "None) if not cleanup_func_name: cleanup_func_name = \"%r\" % cleanup_func print(u\"CLEANUP-ERROR", "[filename for filename in self.feature_locations() if not self.config.exclude(filename)] features =", "ContextMode.USER: msg = \"user code is masking context attribute '%(attr)s'", "\"originally set by behave\" elif self._config.verbose: msg = \"user code", "load_hooks(self, filename=None): filename = filename or self.config.environment_file hooks_path = os.path.join(self.base_dir,", "instead\", PendingDeprecationWarning, stacklevel=2) return self.use_with_user_mode() def _set_root_attribute(self, attr, value): for", "%s: %s: %s\" % (cleanup_func_name, exception.__class__.__name__, exception)) traceback.print_exc(file=sys.stdout) # MAYBE:", "\"/\" because Windows. root_dir = path_getrootdir(base_dir) new_base_dir = base_dir steps_dir", "-- RESTORE: Initial current_mode # Even if an AssertionError/Exception is", "-- AVOID DUPLICATES: current_frame[\"@cleanups\"].append(internal_cleanup_func) @contextlib.contextmanager def use_context_with_mode(context, mode): \"\"\"Switch context", "It will not be present outside of a feature (i.e.", "dir # NOTE: Default matcher can be overridden in \"environment.py\"", "context: * BEHAVE: Indicates \"behave\" (internal) mode * USER: Indicates", "# -- AVOID DUPLICATES: current_frame[\"@cleanups\"].append(internal_cleanup_func) @contextlib.contextmanager def use_context_with_mode(context, mode): \"\"\"Switch", "message += u\"\\nTraceback (of context.execute_steps()):\" assert False, message # --", "[fn for fn in filenames if fn.endswith(\".feature\")]: break else: if", "def before_all_default_hook(self, context): \"\"\" Default implementation for :func:`before_all()` hook. Setup", "for cleanup_func() call (optional). :param kwargs: Kwargs for cleanup_func() call", "context.feature) elif \"all\" in name: # -- ABORT EXECUTION: For", "mode (in steps, hooks, fixtures, ...) \"\"\" BEHAVE = 1", "self.context = Context(self) self.load_hooks() self.load_step_definitions() # -- ENSURE: context.execute_steps() works", "self.context._do_cleanups() # Without dropping the last context layer. except Exception:", "-- STEP: Run all features. stream_openers = self.config.outputs self.formatters =", "information during the running of tests. This object is a", "user-cleanups. :param cleanup_func: Callable function :param args: Args for cleanup_func()", "if attr[0] == \"_\": self.__dict__[attr] = value return for frame", "steps: passed = step.run(self._runner, quiet=True, capture=False) if not passed: #", "= \"behave runner is masking context attribute '%(attr)s' \" \\", "is restored. :param context: Context object to use. :param mode:", "ModelRunner(object): \"\"\" Test runner for a behave model (features). Provides", "in :attr:`Context.aborted`. \"\"\" # pylint: disable=too-many-instance-attributes def __init__(self, config, features=None,", "One exception/failure is already stored. # Append only error message.", "= None # DISABLED: self.scenario = None self.text = None", "'Please specify where to find your features.' % \\ steps_dir)", "other exception frames. six.reraise(*first_cleanup_erro_info) def _push(self, layer_name=None): \"\"\"Push a new", "configuration data. \"\"\" # pylint: disable=no-self-use context.config.setup_logging() def load_hooks(self, filename=None):", "if six.PY2: # -- USE PYTHON3 BACKPORT: With unicode traceback", "fails. Initially: False. .. attribute:: table This is set at", "path_getrootdir(base_dir) new_base_dir = base_dir steps_dir = self.config.steps_dir environment_file = self.config.environment_file", "= os.path.dirname(new_base_dir) if new_base_dir == root_dir: if self.config.verbose: if not", "getattr(context, \"scenario\", context.feature) elif \"all\" in name: # -- ABORT", "@contextlib.contextmanager def scoped_context_layer(context, layer_name=None): \"\"\"Provides context manager for context layer", "{ \"aborted\": False, \"failed\": False, \"config\": self._config, \"active_outline\": None, \"cleanup_errors\":", "self._runner = weakref.proxy(runner) self._config = runner.config d = self._root =", "holds a :class:`~behave.model.Scenario`. It will not be present outside of", "filename in self.feature_locations() if not self.config.exclude(filename)] features = parse_features(feature_locations, language=self.config.lang)", "features and scenarios. Certain names are used by *behave*; be", "RESTORE: Initial current_mode # Even if an AssertionError/Exception is raised.", "fixtures, ...) \"\"\" BEHAVE = 1 USER = 2 class", "object to use. :param mode: Mode to apply to context", "def execute_steps(self, steps_text): \"\"\"The steps identified in the \"steps\" text", "root namespace as soon as a step fails. Initially: False.", "This is set to true in the root namespace as", "in reversed(cleanup_funcs): try: cleanup_func() except Exception as e: # pylint:", "context object. \"\"\" # pylint: disable=protected-access assert mode in (ContextMode.BEHAVE,", "code overwrites the value. \"\"\" pass class ContextMode(Enum): \"\"\"Used to", "if frame is self.__dict__[\"_root\"]: continue if attr in frame: record", "is a \"root\" namespace and additional namespaces for features and", "code then this will be raised if *behave* overwrites the", "\"\"\" from __future__ import absolute_import, print_function, with_statement import contextlib import", "the step. .. attribute:: text This is set at the", "set to true in the root namespace as soon as", "\".join('\"%s\"' % path for path in self.config.paths)) first_path = self.config.paths[0]", "return os.path.sep class ModelRunner(object): \"\"\" Test runner for a behave", "context using \"del\" but only at the level they are", "(:exc:`KeyboardInterrupt` exception). Initially: False. Stored as derived attribute in :attr:`Context.aborted`.", "USE PYTHON3 BACKPORT: With unicode traceback support. import traceback2 as", "if *behave* overwrites the value. If the variable was originally", "return use_context_with_mode(self, ContextMode.USER) def user_mode(self): warnings.warn(\"Use 'use_with_user_mode()' instead\", PendingDeprecationWarning, stacklevel=2)", "the root. This is not guaranteed to be \"/\" because", "[] self.undefined_steps = [] self.step_registry = step_registry self.capture_controller = CaptureController(config)", "active for the current scenario. It is present mostly for", "if attr[0] == \"_\": try: return self.__dict__[attr] except KeyError: raise", "files, parses them and creates model (elements) \"\"\" def __init__(self,", "\\ \"'%(attr)s'; see the tutorial for what this means\" if", "def __contains__(self, attr): if attr[0] == \"_\": return attr in", "is set for each scenario in a scenario outline and", "be present outside of a feature (i.e. within the scope", "may be deleted from the context using \"del\" but only", "def print_cleanup_error(context, cleanup_func, exception): cleanup_func_name = getattr(cleanup_func, \"__name__\", None) if", "not guaranteed to be \"/\" because Windows. root_dir = path_getrootdir(base_dir)", "Due to traceback2 usage. stack_frame = traceback.extract_stack(limit=stack_limit)[0] self._record[attr] = stack_frame", "a behave model (features). Provides the core functionality of a", "section names`: behave.html#configuration-files \"\"\" # pylint: disable=too-many-instance-attributes FAIL_ON_CLEANUP_ERRORS = True", "when :meth:`Context._pop()` is called. This is intended for user-cleanups. :param", "MARK: testrun as FAILED # context._set_root_attribute(\"failed\", True) def _do_cleanups(self): \"\"\"Execute", "steps_dir)): break if os.path.isfile(os.path.join(new_base_dir, environment_file)): break if new_base_dir == root_dir:", "if stderr is not being captured. A :class:`behave.runner.ContextMaskWarning` warning will", "if not self.config.paths: print('ERROR: Could not find \"%s\" directory. '\\", "'Please specify where to find your features.') else: print('ERROR: Could", "first failure. run_feature = False except KeyboardInterrupt: self.aborted = True", "ContextMode.BEHAVE and self._origin[attr] is not ContextMode.BEHAVE: msg = \"behave runner", "run_hook(self, name, context, *args): if not self.config.dry_run and (name in", "in the context. Values may be deleted from the context", "of :class:`~behave.log_capture.LoggingCapture`. It is not present if logging is not", "u\"%s %s\" % (step.keyword, step.name) message = \"%s SUB-STEP: %s\"", "context = Context() with use_context_with_mode(context, ContextMode.BEHAVE): ... # Do something", "EXAMPLE: rootdir = path_getrootdir(\"D:\\\\foo\\\\bar\\\\one.feature\") assert rootdir == r\"D:\\\" \"\"\" drive,", "a context manager for using the context in USER mode.\"\"\"", "frame = self._stack[0] frame[attr] = value if attr not in", "False # -- ALWAYS: Report run/not-run feature to reporters. #", "self._origin = {} self._mode = ContextMode.BEHAVE # -- MODEL ENTITY", "= None self.hook_failures = 0 # @property def _get_aborted(self): value", "the environment before_all and after_all). .. attribute:: scenario This is", "*behave* then this will be raised if user code overwrites", "PendingDeprecationWarning, stacklevel=2) return self.use_with_user_mode() def _set_root_attribute(self, attr, value): for frame", "where to find your features.') else: print('ERROR: Could not find", "or (len(self.undefined_steps) > undefined_steps_initial_size) or cleanups_failed) # XXX-MAYBE: or context.failed)", "in name: # -- ABORT EXECUTION: For before_all/after_all self.aborted =", "testing a new scenario (including the individual scenarios of a", ":attr:`Context.aborted`. \"\"\" # pylint: disable=too-many-instance-attributes def __init__(self, config, features=None, step_registry=None):", "self._mode is ContextMode.USER: if self._origin[attr] is not ContextMode.USER: msg =", "if pretty: for name in sorted(frame.keys()): value = frame[name] print(\"%s", "not in (\"before_all\", \"after_all\"): # raise except Exception as e:", "a scenario level but you can delete a value set", "at the current level\" msg = msg.format(self.__class__.__name__, attr) raise AttributeError(msg)", "whether a certain value has been set on the context,", "value set for a scenario in that scenario. .. _`configuration", "extra_step_paths is None: extra_step_paths = [] # -- Allow steps", "\"config\": self._config, \"active_outline\": None, \"cleanup_errors\": 0, \"@cleanups\": [], # --", "if attr in frame: return True return False def execute_steps(self,", "files in %r' % base_dir) self.base_dir = base_dir self.path_manager.add(base_dir) if", "%s\" % (name, extra, error_text) print(error_message) self.hook_failures += 1 if", "exception)) traceback.print_exc(file=sys.stdout) # MAYBE: context._dump(pretty=True, prefix=\"Context: \") # -- MARK:", "pending cleanups, registered for this layer. \"\"\" try: self._do_cleanups() finally:", "step_registry self.capture_controller = CaptureController(config) self.context = None self.feature = None", "is a place to store information related to the tests", "try: cleanup_func() except Exception as e: # pylint: disable=broad-except #", "If logging capture is enabled then this attribute contains the", "name: # -- SCENARIO or FEATURE statement = getattr(context, \"scenario\",", "of *behave* as determined by configuration files and command-line options.", "step.run(self._runner, quiet=True, capture=False) if not passed: # -- ISSUE #96:", "= getattr(self, \"text\", None) self.feature.parser.variant = \"steps\" steps = self.feature.parser.parse_steps(steps_text)", "path_getrootdir(path): \"\"\" Extract rootdir from path in a platform independent", "to run behave feature files (or model elements). \"\"\" from", "+ list(extra_step_paths) load_step_modules(step_paths) def feature_locations(self): return collect_feature_locations(self.config.paths) def run(self): with", "\"\"\"Provides a context manager for using the context in BEHAVE", "broad-except cleanups_failed = False self.run_hook(\"after_all\", self.context) try: self.context._do_cleanups() # Without", "# except KeyboardInterrupt: # self.aborted = True # if name", "at the level they are set. You can't delete a", "the captured output as a StringIO instance. It is not", "from behave.formatter._registry import make_formatters from behave.runner_util import \\ collect_feature_locations, parse_features,", "\"@layer\": \"testrun\", } self._stack = [d] self._record = {} self._origin", "guaranteed to be \"/\" because Windows. root_dir = path_getrootdir(base_dir) new_base_dir", "step_paths = [steps_dir] + list(extra_step_paths) load_step_modules(step_paths) def feature_locations(self): return collect_feature_locations(self.config.paths)", "the steps dir # NOTE: Default matcher can be overridden", "text string will be parsed and executed in turn just", "behave: * setup paths * loads environment hooks * loads", "stderr is not being captured. A :class:`behave.runner.ContextMaskWarning` warning will be", "%s\" % (cleanup_func_name, exception.__class__.__name__, exception)) traceback.print_exc(file=sys.stdout) # MAYBE: context._dump(pretty=True, prefix=\"Context:", "for the current scenario. It is present mostly for debugging,", "namespace added and removed automatically. There is a \"root\" namespace", "any :class:`~behave.model.Table` associated with the step. .. attribute:: text This", "captured output as a StringIO instance. It is not present", "statement = None else: # -- CASE: feature, scenario, step", ":param cleanup_func: Callable function :param args: Args for cleanup_func() call", "to reporters. # REQUIRED-FOR: Summary to keep track of untested", "attribute '%(attr)s' \" \\ \"originally set in %(function)s (%(filename)s:%(line)s)\" elif", "in steps: passed = step.run(self._runner, quiet=True, capture=False) if not passed:", "is ContextMode.BEHAVE and self._origin[attr] is not ContextMode.BEHAVE: msg = \"behave", "aborted This is set to true when the user aborts", "use the \"in\" operator to test whether a certain value", "context, name=\"foo_42\") \"\"\" # pylint: disable=protected-access try: context._push(layer_name) yield context", "list(extra_step_paths) load_step_modules(step_paths) def feature_locations(self): return collect_feature_locations(self.config.paths) def run(self): with self.path_manager:", "self.config.dry_run and (name in self.hooks): try: with context.use_with_user_mode(): self.hooks[name](context, *args)", "self.config.verbose: if not self.config.paths: print('ERROR: Could not find \"%s\" directory.", "params) stack_limit = 2 if six.PY2: stack_limit += 1 #", "= \"user code is masking context attribute \" \\ \"'%(attr)s';", "self.base_dir = None def setup_paths(self): # pylint: disable=too-many-branches, too-many-statements if", "config): super(Runner, self).__init__(config) self.path_manager = PathManager() self.base_dir = None def", "The attributes of this object are the same as the", "attribute contains the captured logging as an instance of :class:`~behave.log_capture.LoggingCapture`.", "variable. You may use the \"in\" operator to test whether", "statement.error_message: # -- NOTE: One exception/failure is already stored. #", "called when :meth:`Context._pop()` is called. This is intended for user-cleanups.", "* USER: Indicates \"user\" mode (in steps, hooks, fixtures, ...)", "self.feature = None # DISABLED: self.rule = None # DISABLED:", "KeyError: raise AttributeError(attr) for frame in self._stack: if attr in", "using \"del\" but only at the level they are set.", "cleanup_funcs = current_layer.get(\"@cleanups\", []) on_cleanup_error = getattr(self, \"on_cleanup_error\", self.print_cleanup_error) context", "substep info to diagnose problem. step_line = u\"%s %s\" %", "self.setup_capture() self.run_hook(\"before_all\", context) run_feature = not self.aborted failed_count = 0", "self.hook_failures += 1 if \"tag\" in name: # -- SCENARIO", "if self.config.verbose: use_traceback = True ExceptionUtil.set_traceback(e) extra = u\"\" if", "subsystem based on the configuration data. \"\"\" # pylint: disable=no-self-use", "whether there is a \"feature\" value in the context. Values", "to distinguish between the two usage modes while using the", "base directory:\", new_base_dir) if os.path.isdir(os.path.join(new_base_dir, steps_dir)): break if os.path.isfile(os.path.join(new_base_dir, environment_file)):", "\"_\": return attr in self.__dict__ for frame in self._stack: if", "self.hook_failures = 0 self.setup_capture() self.run_hook(\"before_all\", context) run_feature = not self.aborted", "filename) if os.path.exists(hooks_path): exec_file(hooks_path, self.hooks) if \"before_all\" not in self.hooks:", "if features is None: features = self.features # -- ENSURE:", "the context stack. HINT: Use layer_name values: \"scenario\", \"feature\", \"testrun\".", "msg = \"user code is masking context attribute \" \\", "WINDOWS: return drive + os.path.sep # -- POSIX: return os.path.sep", "print(\"\\nABORTED: By user.\") for formatter in self.formatters: formatter.close() for reporter", "file. If the execute_steps call fails (either through error or", "'%(attr)s' \" \\ \"originally set by behave\" elif self._config.verbose: msg", "super(Runner, self).__init__(config) self.path_manager = PathManager() self.base_dir = None def setup_paths(self):", "NOTE: Default matcher can be overridden in \"environment.py\" hook. steps_dir", "_use_with_behave_mode(self): \"\"\"Provides a context manager for using the context in", "added and removed automatically. There is a \"root\" namespace and", "self.run_hook(\"before_all\", self.context) # -- STEP: Parse all feature files (by", "functions when stack frame is popped. A user can add", "if self.config.verbose: print(\"Trying base directory:\", new_base_dir) if os.path.isdir(os.path.join(new_base_dir, steps_dir)): break", ":class:`~behave.log_capture.LoggingCapture`. It is not present if logging is not being", "attribute:: tags The current set of active tags (as a", "(elements) \"\"\" def __init__(self, config): super(Runner, self).__init__(config) self.path_manager = PathManager()", "and creates model (elements) \"\"\" def __init__(self, config): super(Runner, self).__init__(config)", "Performs any pending cleanups, registered for this layer. \"\"\" try:", "feature to reporters. # REQUIRED-FOR: Summary to keep track of", "usage modes while using the context: * BEHAVE: Indicates \"behave\"", "-- ENSURE: Release other exception frames. six.reraise(*first_cleanup_erro_info) def _push(self, layer_name=None):", "the logging subsystem based on the configuration data. \"\"\" #", "= property(_get_aborted, _set_aborted, doc=\"Indicates that test run is aborted by", "before_all/after_all self.aborted = True statement = None else: # --", "step definitions * select feature files, parses them and creates", "be raised if *behave* overwrites the value. If the variable", "attribute '{1}'\" msg = msg.format(self.__class__.__name__, attr) raise AttributeError(msg) def __setattr__(self,", "ExceptionUtil.set_traceback(e) extra = u\"\" if \"tag\" in name: extra =", "% (prefix, level)) if pretty: for name in sorted(frame.keys()): value", "# -- FIRST EXCEPTION/FAILURE: statement.store_exception_context(e) statement.error_message = error_message def setup_capture(self):", "os.walk(base_dir, followlinks=True): if [fn for fn in filenames if fn.endswith(\".feature\")]:", "model (features). Provides the core functionality of a test runner", "You can't delete a value set by a feature at", "(or model elements). \"\"\" from __future__ import absolute_import, print_function, with_statement", "context, for example: \"feature\" in context checks whether there is", "ISSUE #96: Provide more substep info to diagnose problem. step_line", "is a \"feature\" value in the context. Values may be", "= \"'{0}' object has no attribute '{1}'\" msg = msg.format(self.__class__.__name__,", "print(\"Primary path is to a file so using its directory\")", "needed by model elements. .. attribute:: aborted This is set", "set by a feature at a scenario level but you", "record[1], \"function\": record[3], } self._emit_warning(attr, params) stack_limit = 2 if", "self.feature.parser.parse_steps(steps_text) with self._use_with_behave_mode(): for step in steps: passed = step.run(self._runner,", "only at the level they are set. You can't delete", "USER: Indicates \"user\" mode (in steps, hooks, fixtures, ...) \"\"\"", "be overridden in \"environment.py\" hook. steps_dir = os.path.join(self.base_dir, self.config.steps_dir) step_paths", "self._config, \"active_outline\": None, \"cleanup_errors\": 0, \"@cleanups\": [], # -- REQUIRED-BY:", "Stored as derived attribute in :attr:`Context.aborted`. \"\"\" # pylint: disable=too-many-instance-attributes", ".. code-block:: with scoped_context_layer(context): the_fixture = use_fixture(foo, context, name=\"foo_42\") \"\"\"", "self._origin[attr] is not ContextMode.USER: msg = \"user code is masking", "base_dir) raise ConfigError(message) base_dir = new_base_dir self.config.base_dir = base_dir for", "runner for a behave model (features). Provides the core functionality", "error_text = ExceptionUtil.describe(e, use_traceback).rstrip() error_message = u\"HOOK-ERROR in %s%s: %s\"", "context._mode is restored. :param context: Context object to use. :param", "Default matcher can be overridden in \"environment.py\" hook. steps_dir =", "were defined in a feature file. If the execute_steps call", "of namespace added and removed automatically. There is a \"root\"", ":class:`~behave.model.Scenario`. It will not be present outside of the scope", "the current scenario. It is present mostly for debugging, but", "is set at the step level and holds any :class:`~behave.model.Table`", "enabled then this attribute contains the captured output as a", "Provide more substep info to diagnose problem. step_line = u\"%s", "_set_aborted(self, value): # pylint: disable=protected-access assert self.context, \"REQUIRE: context, but", "then the step invoking it will need to catch the", "for reporter in self.config.reporters: reporter.feature(feature) # -- AFTER-ALL: # pylint:", "context.execute_steps() works in weird cases (hooks, ...) # self.setup_capture() #", "\"%s\"' % base_dir) raise ConfigError('No feature files in %r' %", "a context variable is being overwritten in some situations. If", "= base_dir for dirpath, dirnames, filenames in os.walk(base_dir, followlinks=True): if", "by *behave* then this will be raised if user code", "def __init__(self, config): super(Runner, self).__init__(config) self.path_manager = PathManager() self.base_dir =", "statement = args[0] if statement: # -- CASE: feature, scenario,", "ValueError, if invoked without a feature context. \"\"\" assert isinstance(steps_text,", "\"\"\"Hold contextual information during the running of tests. This object", "# -- USE PYTHON3 BACKPORT: With unicode traceback support. import", "the step invoking it will need to catch the resulting", "that scenario. .. _`configuration file section names`: behave.html#configuration-files \"\"\" #", "root_dir: break new_base_dir = os.path.dirname(new_base_dir) if new_base_dir == root_dir: if", "frame is self.__dict__[\"_root\"]: continue if attr in frame: record =", "a StringIO instance. It is not present if stderr is", "the resulting exceptions. :param steps_text: Text with the Gherkin steps", "to true in the root namespace as soon as a", "None self.table = None # -- RUNTIME SUPPORT: self.stdout_capture =", "reporter.feature(feature) # -- AFTER-ALL: # pylint: disable=protected-access, broad-except cleanups_failed =", "feature files in %r' % base_dir) self.base_dir = base_dir self.path_manager.add(base_dir)", "= None self.log_capture = None self.fail_on_cleanup_errors = self.FAIL_ON_CLEANUP_ERRORS @staticmethod def", "assert rootdir == r\"D:\\\" \"\"\" drive, _ = os.path.splitdrive(path) if", "value # @aborted.setter def _set_aborted(self, value): # pylint: disable=protected-access assert", "\"\"\" # pylint: disable=protected-access try: context._push(layer_name) yield context finally: context._pop()", "stderr capture is enabled then this attribute contains the captured", "two context modes. .. sourcecode:: python context = Context() with", "this object are the same as the `configuration file section", "definitions * select feature files, parses them and creates model", "steps executed successfully. :raises: AssertionError, if a step failure occurs.", "= Context(self) self.load_hooks() self.load_step_definitions() # -- ENSURE: context.execute_steps() works in", "reporter in self.config.reporters: reporter.end() failed = ((failed_count > 0) or", "layer_name=None): \"\"\"Push a new layer on the context stack. HINT:", "\"cleanup_errors\": 0, \"@cleanups\": [], # -- REQUIRED-BY: before_all() hook \"@layer\":", "def run_hook(self, name, context, *args): if not self.config.dry_run and (name", "def stop_capture(self): self.capture_controller.stop_capture() def teardown_capture(self): self.capture_controller.teardown_capture() def run_model(self, features=None): #", "Could not find any \"<name>.feature\" files. '\\ 'Please specify where", "stdout capture is enabled then this attribute contains the captured", "to use. :param mode: Mode to apply to context object.", "present if stdout is not being captured. .. attribute:: stderr_capture", "try: self.context._do_cleanups() # Without dropping the last context layer. except", "data. \"\"\" # pylint: disable=no-self-use context.config.setup_logging() def load_hooks(self, filename=None): filename", "\"failed\": False, \"config\": self._config, \"active_outline\": None, \"cleanup_errors\": 0, \"@cleanups\": [],", "removed even if cleanup-errors occur. self._stack.pop(0) def _use_with_behave_mode(self): \"\"\"Provides a", "def run_with_paths(self): self.context = Context(self) self.load_hooks() self.load_step_definitions() # -- ENSURE:", "can't delete a value set by a feature at a", "step. .. attribute:: config The configuration of *behave* as determined", "root. This is not guaranteed to be \"/\" because Windows.", "os.path.sep class ModelRunner(object): \"\"\" Test runner for a behave model", "step.name) message = \"%s SUB-STEP: %s\" % \\ (step.status.name.upper(), step_line)", "= True def __init__(self, runner): self._runner = weakref.proxy(runner) self._config =", "None, \"cleanup_errors\": 0, \"@cleanups\": [], # -- REQUIRED-BY: before_all() hook", "context manager for context layer (push/do-something/pop cycle). .. code-block:: with", "print(u\"CLEANUP-ERROR in %s: %s: %s\" % (cleanup_func_name, exception.__class__.__name__, exception)) traceback.print_exc(file=sys.stdout)", "method by running the model. \"\"\" self.context = Context(self) return", "can delete a value set for a scenario in that", ":class:`~behave.model.Tag` which are basically just glorified strings) combined from the", "files (by using their file location). feature_locations = [filename for", "the root namespace as soon as a step fails. Initially:", "raise except Exception as e: # pylint: disable=broad-except # --", "self.__dict__[\"_root\"][attr] = value if attr not in self._origin: self._origin[attr] =", "runner and the functional API needed by model elements. ..", "attempts to overwrite one of these variables, or if *behave*", "level they are set. You can't delete a value set", "pylint: disable=protected-access, broad-except cleanups_failed = False self.run_hook(\"after_all\", self.context) try: self.context._do_cleanups()", "pass def before_all(context): context.on_cleanup_error = handle_cleanup_error context.add_cleanup(cleanup_database, the_database) \"\"\" #", "__init__(self, config): super(Runner, self).__init__(config) self.path_manager = PathManager() self.base_dir = None", "debugging, but may be useful otherwise. .. attribute:: log_capture If", "\"__name__\", None) if not cleanup_func_name: cleanup_func_name = \"%r\" % cleanup_func", "cleanup_func print(u\"CLEANUP-ERROR in %s: %s: %s\" % (cleanup_func_name, exception.__class__.__name__, exception))", "Callable function :param args: Args for cleanup_func() call (optional). :param", "the user.\") def run_hook(self, name, context, *args): if not self.config.dry_run", "steps_dir) else: print('ERROR: Could not find \"%s\" directory in your", "yield finally: # -- RESTORE: Initial current_mode # Even if", "self.config.paths: print('ERROR: Could not find \"%s\" directory. '\\ 'Please specify", "will be raised if user code overwrites the value. \"\"\"", "context.failed) return failed def run(self): \"\"\" Implements the run method", "is enabled then this attribute contains the captured logging as", "is set to true in the root namespace when the", "!= os.getcwd(): self.path_manager.add(os.getcwd()) def before_all_default_hook(self, context): \"\"\" Default implementation for", ".. attribute:: stdout_capture If stdout capture is enabled then this", "(in steps, hooks, fixtures, ...) \"\"\" BEHAVE = 1 USER", ":param layer_name: Layer name to use (or None). \"\"\" initial_data", "will be parsed and executed in turn just as though", "test runner and the functional API needed by model elements.", "first_path.filename base_dir = first_path if base_dir.startswith(\"@\"): # -- USE: behave", "specify where to find your features.') else: print('ERROR: Could not", "a feature file if os.path.isfile(base_dir): if self.config.verbose: print(\"Primary path is", "object. \"\"\" # pylint: disable=protected-access assert mode in (ContextMode.BEHAVE, ContextMode.USER)", "user code attempts to overwrite one of these variables, or", "self.hooks: self.hooks[\"before_all\"] = self.before_all_default_hook def load_step_definitions(self, extra_step_paths=None): if extra_step_paths is", "not find any \"<name>.feature\" files. '\\ 'Please specify where to", "\"\"\"The steps identified in the \"steps\" text string will be", "is not being captured. A :class:`behave.runner.ContextMaskWarning` warning will be raised", "= True failed_count += 1 run_feature = False # --", "names are used by *behave*; be wary of using them", "feature for formatter in self.formatters: formatter.uri(feature.filename) failed = feature.run(self) if", "reporter in self.config.reporters: reporter.feature(feature) # -- AFTER-ALL: # pylint: disable=protected-access,", "@staticmethod def print_cleanup_error(context, cleanup_func, exception): cleanup_func_name = getattr(cleanup_func, \"__name__\", None)", "context._pop() def path_getrootdir(path): \"\"\" Extract rootdir from path in a", "self.config.verbose: print(\"Primary path is to a file so using its", "so using its directory\") base_dir = os.path.dirname(base_dir) else: if self.config.verbose:", "True return False def execute_steps(self, steps_text): \"\"\"The steps identified in", "args[0] error_text = ExceptionUtil.describe(e, use_traceback).rstrip() error_message = u\"HOOK-ERROR in %s%s:", "based on the configuration data. \"\"\" # pylint: disable=no-self-use context.config.setup_logging()", "attr in self.__dict__ for frame in self._stack: if attr in", ".. _`configuration file section names`: behave.html#configuration-files \"\"\" # pylint: disable=too-many-instance-attributes", "BEHAVE mode.\"\"\" return use_context_with_mode(self, ContextMode.BEHAVE) def use_with_user_mode(self): \"\"\"Provides a context", "of your tests the object will have additional layers of", "It is not present if logging is not being captured.", "true when the user aborts a test run (:exc:`KeyboardInterrupt` exception).", "start_capture(self): self.capture_controller.start_capture() def stop_capture(self): self.capture_controller.stop_capture() def teardown_capture(self): self.capture_controller.teardown_capture() def run_model(self,", "self.run_hook(\"after_all\", self.context) try: self.context._do_cleanups() # Without dropping the last context", "is not guaranteed to be \"/\" because Windows. root_dir =", "ENSURE: Release other exception frames. six.reraise(*first_cleanup_erro_info) def _push(self, layer_name=None): \"\"\"Push", "# context._set_root_attribute(\"failed\", True) def _do_cleanups(self): \"\"\"Execute optional cleanup functions when", "= cleanup_func current_frame = self._stack[0] if cleanup_func not in current_frame[\"@cleanups\"]:", "current_mode = context._mode try: context._mode = mode yield finally: #", "CASE: feature, scenario, step statement = args[0] if statement: #", "not in current_frame[\"@cleanups\"]: # -- AVOID DUPLICATES: current_frame[\"@cleanups\"].append(internal_cleanup_func) @contextlib.contextmanager def", "of active tags (as a Python set containing instances of", "cleanup_database(database): pass def handle_cleanup_error(context, cleanup_func, exception): pass def before_all(context): context.on_cleanup_error", "self.aborted = True statement = None else: # -- CASE:", "attr in frame: return frame[attr] msg = \"'{0}' object has", "as a StringIO instance. It is not present if stderr", "running of tests. This object is a place to store", "when we start testing a new feature and holds a", "self.context._set_root_attribute(\"aborted\", bool(value)) aborted = property(_get_aborted, _set_aborted, doc=\"Indicates that test run", "%s: %s\" % (cleanup_func_name, exception.__class__.__name__, exception)) traceback.print_exc(file=sys.stdout) # MAYBE: context._dump(pretty=True,", "== \"_\": try: return self.__dict__[attr] except KeyError: raise AttributeError(attr) for", "to apply to context object. \"\"\" # pylint: disable=protected-access assert", "% base_dir) raise ConfigError('No feature files in %r' % base_dir)", "before_all() hook \"@layer\": \"testrun\", } self._stack = [d] self._record =", "if not self.feature: raise ValueError(\"execute_steps() called outside of feature\") #", "present mostly for debugging, but may be useful otherwise. ..", "= self cleanup_errors = [] for cleanup_func in reversed(cleanup_funcs): try:", "\"feature\", \"testrun\". :param layer_name: Layer name to use (or None).", "= mode yield finally: # -- RESTORE: Initial current_mode #", "checks whether there is a \"feature\" value in the context.", "step_line = u\"%s %s\" % (step.keyword, step.name) message = \"%s", "self._stack[0] if cleanup_func not in current_frame[\"@cleanups\"]: # -- AVOID DUPLICATES:", "failed substep):\\n\" message += u\"\".join(traceback.format_tb(step.exc_traceback)) # message += u\"\\nTraceback (of", "execute (as string). :returns: True, if the steps executed successfully.", "and removed automatically. There is a \"root\" namespace and additional", "...) # self.setup_capture() # self.run_hook(\"before_all\", self.context) # -- STEP: Parse", "With unicode traceback support. import traceback2 as traceback else: import", "collect_feature_locations(self.config.paths) def run(self): with self.path_manager: self.setup_paths() return self.run_with_paths() def run_with_paths(self):", "msg = \"\" if self._mode is ContextMode.BEHAVE and self._origin[attr] is", "enum import Enum if six.PY2: # -- USE PYTHON3 BACKPORT:", "for a scenario in that scenario. .. _`configuration file section", "self.context.aborted return value # @aborted.setter def _set_aborted(self, value): # pylint:", "After first failure. run_feature = False except KeyboardInterrupt: self.aborted =", "POSIX: return os.path.sep class ModelRunner(object): \"\"\" Test runner for a", "os.path.sep # -- POSIX: return os.path.sep class ModelRunner(object): \"\"\" Test", "is enabled then this attribute contains the captured output as", "u\"\" if \"tag\" in name: extra = \"(tag=%s)\" % args[0]", "mode in (ContextMode.BEHAVE, ContextMode.USER) current_mode = context._mode try: context._mode =", "context._mode try: context._mode = mode yield finally: # -- RESTORE:", "exception frames. six.reraise(*first_cleanup_erro_info) def _push(self, layer_name=None): \"\"\"Push a new layer", "drive, _ = os.path.splitdrive(path) if drive: # -- WINDOWS: return", "import warnings import weakref import six from behave._types import ExceptionUtil", "current set of active tags (as a Python set containing", "[]} if layer_name: initial_data[\"@layer\"] = layer_name self._stack.insert(0, initial_data) def _pop(self):", "\"del\" but only at the level they are set. You", "base_dir = os.path.dirname(file_locations[0].filename) base_dir = os.path.abspath(base_dir) # supplied path might", "(steps_dir, base_dir) raise ConfigError(message) base_dir = new_base_dir self.config.base_dir = base_dir", "This is set to true when the user aborts a", "this method uses .table/.text original_table = getattr(self, \"table\", None) original_text", "your features.') else: print('ERROR: Could not find any \"<name>.feature\" files", "'{1}' at the current level\" msg = msg.format(self.__class__.__name__, attr) raise", "options. The attributes of this object are the same as", "context data for current step. self.table = original_table self.text =", "steps_dir = os.path.join(self.base_dir, self.config.steps_dir) step_paths = [steps_dir] + list(extra_step_paths) load_step_modules(step_paths)", "= [] self.undefined_steps = [] self.step_registry = step_registry self.capture_controller =", "self._record = {} self._origin = {} self._mode = ContextMode.BEHAVE #", "return True return False def execute_steps(self, steps_text): \"\"\"The steps identified", "StringIO instance. It is not present if stderr is not", "data for current step. # Needed if step definition that", "that is active for the current scenario. It is present", "msg.format(self.__class__.__name__, attr) raise AttributeError(msg) def __contains__(self, attr): if attr[0] ==", "context attribute '%(attr)s' \" \\ \"originally set by behave\" elif", "frame = self._stack[0] if attr in frame: del frame[attr] del", "except KeyboardInterrupt: # self.aborted = True # if name not", "you're running. You may add arbitrary attributes to it of", "be to a feature file if os.path.isfile(base_dir): if self.config.verbose: print(\"Primary", "(%(filename)s:%(line)s)\" elif self._mode is ContextMode.USER: if self._origin[attr] is not ContextMode.USER:", "find \"%s\" directory. '\\ 'Please specify where to find your", "in self.config.reporters: reporter.end() failed = ((failed_count > 0) or self.aborted", "set at the step level and holds any multiline text", "in that scenario. .. _`configuration file section names`: behave.html#configuration-files \"\"\"", "self._origin[attr] = self._mode def _emit_warning(self, attr, params): msg = \"\"", "for frame in self._stack[1:]: if attr in frame: record =", "+= 1 if self.config.stop or self.aborted: # -- FAIL-EARLY: After", "-- REQUIRED-BY: before_all() hook \"@layer\": \"testrun\", } self._stack = [d]", "features.') else: print('ERROR: Could not find any \"<name>.feature\" files '\\", "for cleanup errors. .. code-block:: python # -- FILE: features/environment.py", "os.getcwd(): self.path_manager.add(os.getcwd()) def before_all_default_hook(self, context): \"\"\" Default implementation for :func:`before_all()`", "manager for context layer (push/do-something/pop cycle). .. code-block:: with scoped_context_layer(context):", "in name: # -- SCENARIO or FEATURE statement = getattr(context,", "current_frame[\"@cleanups\"]: # -- AVOID DUPLICATES: current_frame[\"@cleanups\"].append(internal_cleanup_func) @contextlib.contextmanager def use_context_with_mode(context, mode):", "overwrite one of these variables, or if *behave* itself tries", "support. import traceback2 as traceback else: import traceback class CleanupError(RuntimeError):", "== r\"D:\\\" \"\"\" drive, _ = os.path.splitdrive(path) if drive: #", "\"environment.py\" hook. steps_dir = os.path.join(self.base_dir, self.config.steps_dir) step_paths = [steps_dir] +", "and holds any :class:`~behave.model.Table` associated with the step. .. attribute::", "they are set. You can't delete a value set by", "to ContextMode.BEHAVE or ContextMode.USER mode. Provides a context manager for", "features/environment.py def cleanup_database(database): pass def handle_cleanup_error(context, cleanup_func, exception): pass def", "-- AFTER-ALL: # pylint: disable=protected-access, broad-except cleanups_failed = False self.run_hook(\"after_all\",", "step invoking it will need to catch the resulting exceptions.", "formatter in self.formatters: formatter.uri(feature.filename) failed = feature.run(self) if failed: failed_count", "might be to a feature file if os.path.isfile(base_dir): if self.config.verbose:", "call (optional). :param kwargs: Kwargs for cleanup_func() call (optional). \"\"\"", "\"\"\" Implements the run method by running the model. \"\"\"", "disable=too-many-branches if not self.context: self.context = Context(self) if self.step_registry is", "is not present if logging is not being captured. ..", "Initially: False. Stored as derived attribute in :attr:`Context.aborted`. \"\"\" #", "names are: .. attribute:: feature This is set when we", "the configuration data. \"\"\" # pylint: disable=no-self-use context.config.setup_logging() def load_hooks(self,", "# -- BEST-EFFORT ALGORITHM: Tries to perform all cleanups. assert", "on the context stack. HINT: Use layer_name values: \"scenario\", \"feature\",", "self.capture_controller.stop_capture() def teardown_capture(self): self.capture_controller.teardown_capture() def run_model(self, features=None): # pylint: disable=too-many-branches", "break if os.path.isfile(os.path.join(new_base_dir, environment_file)): break if new_base_dir == root_dir: break", "= 'No %s directory in %r' % (steps_dir, base_dir) raise", "warning will be raised if user code attempts to overwrite", "BETTER: isinstance(first_path, FileLocation): first_path = first_path.filename base_dir = first_path if", "the context in USER mode.\"\"\" return use_context_with_mode(self, ContextMode.USER) def user_mode(self):", "the user aborts a test run (:exc:`KeyboardInterrupt` exception). Initially: False.", "= getattr(context, \"scenario\", context.feature) elif \"all\" in name: # --", "= step_registry self.capture_controller = CaptureController(config) self.context = None self.feature =", "class ModelRunner(object): \"\"\" Test runner for a behave model (features).", "active_outline This is set for each scenario in a scenario", "in your '\\ 'specified path \"%s\"' % (steps_dir, base_dir)) message", "user.\") def run_hook(self, name, context, *args): if not self.config.dry_run and", "\"REQUIRE: Non-empty stack\" current_layer = self._stack[0] cleanup_funcs = current_layer.get(\"@cleanups\", [])", "context, but context=%r\" % self.context self.context._set_root_attribute(\"aborted\", bool(value)) aborted = property(_get_aborted,", "# pylint: disable=protected-access assert self.context, \"REQUIRE: context, but context=%r\" %", "None self.fail_on_cleanup_errors = self.FAIL_ON_CLEANUP_ERRORS @staticmethod def ignore_cleanup_error(context, cleanup_func, exception): pass", "def handle_cleanup_error(context, cleanup_func, exception): pass def before_all(context): context.on_cleanup_error = handle_cleanup_error", "# self.aborted = True # if name not in (\"before_all\",", "originally set by user code then this will be raised", "self.table = original_table self.text = original_text return True def add_cleanup(self,", "\"\"\" assert isinstance(steps_text, six.text_type), \"Steps must be unicode.\" if not", "not present if logging is not being captured. .. attribute::", "all cleanups. assert self._stack, \"REQUIRE: Non-empty stack\" current_layer = self._stack[0]", "path in self.config.paths)) first_path = self.config.paths[0] if hasattr(first_path, \"filename\"): #", "stop_capture(self): self.capture_controller.stop_capture() def teardown_capture(self): self.capture_controller.teardown_capture() def run_model(self, features=None): # pylint:", "in some situations. If the variable was originally set by", "1 if \"tag\" in name: # -- SCENARIO or FEATURE", "Python set containing instances of :class:`~behave.model.Tag` which are basically just", "= first_path if base_dir.startswith(\"@\"): # -- USE: behave @features.txt base_dir", "set. You can't delete a value set by a feature", "value = frame[name] print(\"%s %-15s = %r\" % (prefix, name,", "context): \"\"\" Default implementation for :func:`before_all()` hook. Setup the logging", "AttributeError(msg) def __contains__(self, attr): if attr[0] == \"_\": return attr", "if the steps executed successfully. :raises: AssertionError, if a step", "mostly for debugging, but may be useful otherwise. .. attribute::", "statement: # -- CASE: feature, scenario, step statement.hook_failed = True", "for dirpath, dirnames, filenames in os.walk(base_dir, followlinks=True): if [fn for", "context) run_feature = not self.aborted failed_count = 0 undefined_steps_initial_size =", "or if *behave* itself tries to overwrite a user-set variable.", "0 self.setup_capture() self.run_hook(\"before_all\", context) run_feature = not self.aborted failed_count =", "in features: if run_feature: try: self.feature = feature for formatter", "\"<name>.feature\" files. '\\ 'Please specify where to find your features.')", "(of context.execute_steps()):\" assert False, message # -- FINALLY: Restore original", "new_base_dir = base_dir steps_dir = self.config.steps_dir environment_file = self.config.environment_file while", "ConfigError(message) base_dir = new_base_dir self.config.base_dir = base_dir for dirpath, dirnames,", "== \"/\" WINDOWS-PATH EXAMPLE: rootdir = path_getrootdir(\"D:\\\\foo\\\\bar\\\\one.feature\") assert rootdir ==", "more substep info to diagnose problem. step_line = u\"%s %s\"", "frame in self.__dict__[\"_stack\"]: if frame is self.__dict__[\"_root\"]: continue if attr", "in name: extra = \"(tag=%s)\" % args[0] error_text = ExceptionUtil.describe(e,", "set. These names are: .. attribute:: feature This is set", "exception.__class__.__name__, exception)) traceback.print_exc(file=sys.stdout) # MAYBE: context._dump(pretty=True, prefix=\"Context: \") # --", "= \"%s SUB-STEP: %s\" % \\ (step.status.name.upper(), step_line) if step.error_message:", "self.config.paths: self.config.paths = [base_dir] if base_dir != os.getcwd(): self.path_manager.add(os.getcwd()) def", "running the model. \"\"\" self.context = Context(self) return self.run_model() class", "self._mode is ContextMode.BEHAVE and self._origin[attr] is not ContextMode.BEHAVE: msg =", "even if cleanup-errors occur. self._stack.pop(0) def _use_with_behave_mode(self): \"\"\"Provides a context", "= features or [] self.hooks = {} self.formatters = []", "their file location). feature_locations = [filename for filename in self.feature_locations()", "additional layers of namespace added and removed automatically. There is", "the running of tests. This object is a place to", "set when we start testing a new feature and holds", "context attribute '%(attr)s' \" \\ \"originally set in %(function)s (%(filename)s:%(line)s)\"", "tutorial for what this means\" if msg: msg = msg", "switching between the two context modes. .. sourcecode:: python context", "attribute in :attr:`Context.aborted`. \"\"\" # pylint: disable=too-many-instance-attributes def __init__(self, config,", "is not ContextMode.USER: msg = \"user code is masking context", "if layer_name: initial_data[\"@layer\"] = layer_name self._stack.insert(0, initial_data) def _pop(self): \"\"\"Pop", "provides Runner class to run behave feature files (or model", "for current step. self.table = original_table self.text = original_text return", "You may add arbitrary attributes to it of whatever value", "break if new_base_dir == root_dir: break new_base_dir = os.path.dirname(new_base_dir) if", "not self.config.exclude(filename)] features = parse_features(feature_locations, language=self.config.lang) self.features.extend(features) # -- STEP:", "only error message. statement.error_message += u\"\\n\"+ error_message else: # --", "% (steps_dir, base_dir) raise ConfigError(message) base_dir = new_base_dir self.config.base_dir =", "(name in self.hooks): try: with context.use_with_user_mode(): self.hooks[name](context, *args) # except", "return self.run_model() class Runner(ModelRunner): \"\"\" Standard test runner for behave:", "self.feature_locations() if file_locations: base_dir = os.path.dirname(file_locations[0].filename) base_dir = os.path.abspath(base_dir) #", "} self._emit_warning(attr, params) self.__dict__[\"_root\"][attr] = value if attr not in", "ContextMode.USER) current_mode = context._mode try: context._mode = mode yield finally:", "`configuration file section names`_. .. attribute:: active_outline This is set", "Non-empty stack\" current_layer = self._stack[0] cleanup_funcs = current_layer.get(\"@cleanups\", []) on_cleanup_error", "step.error_message message += u\"Traceback (of failed substep):\\n\" message += u\"\".join(traceback.format_tb(step.exc_traceback))", "code is masking context attribute \" \\ \"'%(attr)s'; see the", "path:\", \\ \", \".join('\"%s\"' % path for path in self.config.paths))", "else: # -- FIRST EXCEPTION/FAILURE: statement.store_exception_context(e) statement.error_message = error_message def", "This is set when we start testing a new feature", "repr(frame)) def __getattr__(self, attr): if attr[0] == \"_\": try: return", "{} self.formatters = [] self.undefined_steps = [] self.step_registry = step_registry", "\"%s\"' % (steps_dir, base_dir)) message = 'No %s directory in", "weird cases (hooks, ...) # self.setup_capture() # self.run_hook(\"before_all\", self.context) #", "= self.config.environment_file while True: if self.config.verbose: print(\"Trying base directory:\", new_base_dir)", "file section names`_. .. attribute:: active_outline This is set for", "= True ExceptionUtil.set_traceback(e) extra = u\"\" if \"tag\" in name:", "Runner class to run behave feature files (or model elements).", "1 cleanup_errors.append(sys.exc_info()) on_cleanup_error(context, cleanup_func, e) if self.fail_on_cleanup_errors and cleanup_errors: first_cleanup_erro_info", "variable is being overwritten in some situations. If the variable", "before_all_default_hook(self, context): \"\"\" Default implementation for :func:`before_all()` hook. Setup the", "_`configuration file section names`: behave.html#configuration-files \"\"\" # pylint: disable=too-many-instance-attributes FAIL_ON_CLEANUP_ERRORS", "current_layer.get(\"@cleanups\", []) on_cleanup_error = getattr(self, \"on_cleanup_error\", self.print_cleanup_error) context = self", "[d] self._record = {} self._origin = {} self._mode = ContextMode.BEHAVE", "warnings.warn(\"Use 'use_with_user_mode()' instead\", PendingDeprecationWarning, stacklevel=2) return self.use_with_user_mode() def _set_root_attribute(self, attr,", "STEP: Run all features. stream_openers = self.config.outputs self.formatters = make_formatters(self.config,", "Default implementation for :func:`before_all()` hook. Setup the logging subsystem based", "None: features = self.features # -- ENSURE: context.execute_steps() works in", "True: if self.config.verbose: print(\"Trying base directory:\", new_base_dir) if os.path.isdir(os.path.join(new_base_dir, steps_dir)):", "for cleanup_func in reversed(cleanup_funcs): try: cleanup_func() except Exception as e:", "-- HANDLE HOOK ERRORS: use_traceback = False if self.config.verbose: use_traceback", "steps identified in the \"steps\" text string will be parsed", "stacklevel=3) def _dump(self, pretty=False, prefix=\" \"): for level, frame in", "MAYBE: context._dump(pretty=True, prefix=\"Context: \") # -- MARK: testrun as FAILED", "setup paths * loads environment hooks * loads step definitions", "if new_base_dir == root_dir: break new_base_dir = os.path.dirname(new_base_dir) if new_base_dir", "too-many-statements if self.config.paths: if self.config.verbose: print(\"Supplied path:\", \\ \", \".join('\"%s\"'", "is ContextMode.USER: if self._origin[attr] is not ContextMode.USER: msg = \"user", "message # -- FINALLY: Restore original context data for current", "context.config.setup_logging() def load_hooks(self, filename=None): filename = filename or self.config.environment_file hooks_path", "# -- ENSURE: Release other exception frames. six.reraise(*first_cleanup_erro_info) def _push(self,", "if attr in frame: del frame[attr] del self._record[attr] else: msg", "steps to import other stuff from the steps dir #", "is set to true in the root namespace as soon", "the running of your tests the object will have additional", "assert isinstance(steps_text, six.text_type), \"Steps must be unicode.\" if not self.feature:", "a value set by a feature at a scenario level", "self.aborted failed_count = 0 undefined_steps_initial_size = len(self.undefined_steps) for feature in", "example: \"feature\" in context checks whether there is a \"feature\"", "a scenario in that scenario. .. _`configuration file section names`:", "combined from the feature and scenario. This attribute will not", "log_capture If logging capture is enabled then this attribute contains", "and the functional API needed by model elements. .. attribute::", "= original_table self.text = original_text return True def add_cleanup(self, cleanup_func,", "pylint: disable=too-many-branches, too-many-statements if self.config.paths: if self.config.verbose: print(\"Supplied path:\", \\", "Values may be deleted from the context using \"del\" but", "FAILED # context._set_root_attribute(\"failed\", True) def _do_cleanups(self): \"\"\"Execute optional cleanup functions", "self.fail_on_cleanup_errors = self.FAIL_ON_CLEANUP_ERRORS @staticmethod def ignore_cleanup_error(context, cleanup_func, exception): pass @staticmethod", "attr in frame: del frame[attr] del self._record[attr] else: msg =", "@aborted.setter def _set_aborted(self, value): # pylint: disable=protected-access assert self.context, \"REQUIRE:", "except Exception as e: # pylint: disable=broad-except # pylint: disable=protected-access", "will not be present outside of a feature (i.e. within", "failure. run_feature = False except KeyboardInterrupt: self.aborted = True failed_count", "if [fn for fn in filenames if fn.endswith(\".feature\")]: break else:", "return False def execute_steps(self, steps_text): \"\"\"The steps identified in the", "at the step level and holds any multiline text associated", "in USER mode.\"\"\" return use_context_with_mode(self, ContextMode.USER) def user_mode(self): warnings.warn(\"Use 'use_with_user_mode()'", "a scenario outline and references the :class:`~behave.model.Row` that is active", "the context using \"del\" but only at the level they", "Windows. root_dir = path_getrootdir(base_dir) new_base_dir = base_dir steps_dir = self.config.steps_dir", "error_message else: # -- FIRST EXCEPTION/FAILURE: statement.store_exception_context(e) statement.error_message = error_message", "= None self.table = None # -- RUNTIME SUPPORT: self.stdout_capture", "= Context(self) if self.step_registry is None: self.step_registry = the_step_registry if", "behave._types import ExceptionUtil from behave.capture import CaptureController from behave.exception import", "any \"<name>.feature\" files '\\ 'in your specified path \"%s\"' %", "with the step. .. attribute:: config The configuration of *behave*", "@staticmethod def ignore_cleanup_error(context, cleanup_func, exception): pass @staticmethod def print_cleanup_error(context, cleanup_func,", "tests you're running. You may add arbitrary attributes to it", "= handle_cleanup_error context.add_cleanup(cleanup_database, the_database) \"\"\" # -- BEST-EFFORT ALGORITHM: Tries", "they were defined in a feature file. If the execute_steps", "is not being captured. .. attribute:: stderr_capture If stderr capture", "in turn just as though they were defined in a", "not in self._origin: self._origin[attr] = self._mode def __delattr__(self, attr): frame", "# pylint: disable=too-many-instance-attributes FAIL_ON_CLEANUP_ERRORS = True def __init__(self, runner): self._runner", "cleanup_errors: first_cleanup_erro_info = cleanup_errors[0] del cleanup_errors # -- ENSURE: Release", "# -*- coding: UTF-8 -*- \"\"\" This module provides Runner", "after_all). .. attribute:: scenario This is set when we start", "at the step level and holds any :class:`~behave.model.Table` associated with", "current_layer = self._stack[0] cleanup_funcs = current_layer.get(\"@cleanups\", []) on_cleanup_error = getattr(self,", "% (cleanup_func_name, exception.__class__.__name__, exception)) traceback.print_exc(file=sys.stdout) # MAYBE: context._dump(pretty=True, prefix=\"Context: \")", "= \"\" if self._mode is ContextMode.BEHAVE and self._origin[attr] is not", "\"root\" namespace and additional namespaces for features and scenarios. Certain", "self.features # -- ENSURE: context.execute_steps() works in weird cases (hooks,", "current scenario. It is present mostly for debugging, but may", "} self._stack = [d] self._record = {} self._origin = {}", "pass def handle_cleanup_error(context, cleanup_func, exception): pass def before_all(context): context.on_cleanup_error =", "raised if user code attempts to overwrite one of these", "__init__(self, runner): self._runner = weakref.proxy(runner) self._config = runner.config d =", "error_message def setup_capture(self): if not self.context: self.context = Context(self) self.capture_controller.setup_capture(self.context)", "else: if self.config.verbose: if not self.config.paths: print('ERROR: Could not find", "arbitrary attributes to it of whatever value you need. During", "an AssertionError/Exception is raised. context._mode = current_mode @contextlib.contextmanager def scoped_context_layer(context,", "= os.path.join(self.base_dir, self.config.steps_dir) step_paths = [steps_dir] + list(extra_step_paths) load_step_modules(step_paths) def", "# @aborted.setter def _set_aborted(self, value): # pylint: disable=protected-access assert self.context,", "frame in enumerate(self._stack): print(\"%sLevel %d\" % (prefix, level)) if pretty:", "== \"_\": return attr in self.__dict__ for frame in self._stack:", "message += \"\\nSubstep info: %s\\n\" % step.error_message message += u\"Traceback", "\"user code is masking context attribute '%(attr)s' \" \\ \"originally", "(as a Python set containing instances of :class:`~behave.model.Tag` which are", "stdout_capture If stdout capture is enabled then this attribute contains", "context manager for using the context in USER mode.\"\"\" return", "= os.path.join(self.base_dir, filename) if os.path.exists(hooks_path): exec_file(hooks_path, self.hooks) if \"before_all\" not", "context. \"\"\" assert isinstance(steps_text, six.text_type), \"Steps must be unicode.\" if", "execute_steps call fails (either through error or failure assertion) then", "%r' % base_dir) self.base_dir = base_dir self.path_manager.add(base_dir) if not self.config.paths:", "any \"<name>.feature\" files. '\\ 'Please specify where to find your", "from behave._types import ExceptionUtil from behave.capture import CaptureController from behave.exception", "return self.use_with_user_mode() def _set_root_attribute(self, attr, value): for frame in self.__dict__[\"_stack\"]:", "these variables, or if *behave* itself tries to overwrite a", "not ContextMode.BEHAVE: msg = \"behave runner is masking context attribute", "it will need to catch the resulting exceptions. :param steps_text:", "the scope of a scenario. .. attribute:: tags The current", "them yourself as *behave* may overwrite the value you set.", "tests the object will have additional layers of namespace added", "if logging is not being captured. .. attribute:: stdout_capture If", "\\ steps_dir) else: print('ERROR: Could not find \"%s\" directory in", "name, context, *args): if not self.config.dry_run and (name in self.hooks):", "environment_file)): break if new_base_dir == root_dir: break new_base_dir = os.path.dirname(new_base_dir)", "captured. .. attribute:: stdout_capture If stdout capture is enabled then", "this means\" if msg: msg = msg % params warnings.warn(msg,", "def _set_aborted(self, value): # pylint: disable=protected-access assert self.context, \"REQUIRE: context,", "if self.config.verbose: print(\"Primary path is to a file so using", "print_cleanup_error(context, cleanup_func, exception): cleanup_func_name = getattr(cleanup_func, \"__name__\", None) if not", "in os.walk(base_dir, followlinks=True): if [fn for fn in filenames if", "self._stack.pop(0) def _use_with_behave_mode(self): \"\"\"Provides a context manager for using the", "-- ALWAYS: Report run/not-run feature to reporters. # REQUIRED-FOR: Summary", "context._set_root_attribute(\"failed\", True) def _do_cleanups(self): \"\"\"Execute optional cleanup functions when stack", "if user code attempts to overwrite one of these variables,", "a new layer on the context stack. HINT: Use layer_name", "individual scenarios of a scenario outline) and holds a :class:`~behave.model.Scenario`.", "or [] self.hooks = {} self.formatters = [] self.undefined_steps =", "def run_model(self, features=None): # pylint: disable=too-many-branches if not self.context: self.context", "user.\") for formatter in self.formatters: formatter.close() for reporter in self.config.reporters:", "self.feature = None self.hook_failures = 0 # @property def _get_aborted(self):", "to use (or None). \"\"\" initial_data = {\"@cleanups\": []} if", "original_text = getattr(self, \"text\", None) self.feature.parser.variant = \"steps\" steps =", "name: # -- ABORT EXECUTION: For before_all/after_all self.aborted = True", "the context, for example: \"feature\" in context checks whether there", ":class:`~behave.model.Table` associated with the step. .. attribute:: text This is", "= original_text return True def add_cleanup(self, cleanup_func, *args, **kwargs): \"\"\"Adds", "traceback else: import traceback class CleanupError(RuntimeError): pass class ContextMaskWarning(UserWarning): \"\"\"Raised", "strings) combined from the feature and scenario. This attribute will", "untested features. for reporter in self.config.reporters: reporter.feature(feature) # -- AFTER-ALL:", "dirpath, dirnames, filenames in os.walk(base_dir, followlinks=True): if [fn for fn", "be raised if user code overwrites the value. \"\"\" pass", "def _do_cleanups(self): \"\"\"Execute optional cleanup functions when stack frame is", "be present outside of the scope of a scenario. ..", "record[0], \"line\": record[1], \"function\": record[3], } self._emit_warning(attr, params) self.__dict__[\"_root\"][attr] =", "attribute:: feature This is set when we start testing a", "feature file if os.path.isfile(base_dir): if self.config.verbose: print(\"Primary path is to", "MAYBE: assert callable(cleanup_func), \"REQUIRES: callable(cleanup_func)\" assert self._stack if args or", "current_frame = self._stack[0] if cleanup_func not in current_frame[\"@cleanups\"]: # --", "\"\"\" # pylint: disable=too-many-instance-attributes FAIL_ON_CLEANUP_ERRORS = True def __init__(self, runner):", "# message += u\"\\nTraceback (of context.execute_steps()):\" assert False, message #", "with self.path_manager: self.setup_paths() return self.run_with_paths() def run_with_paths(self): self.context = Context(self)", "(steps_dir, base_dir)) message = 'No %s directory in %r' %", "if self._origin[attr] is not ContextMode.USER: msg = \"user code is", "-- USE PYTHON3 BACKPORT: With unicode traceback support. import traceback2", "'\\ 'in your specified path \"%s\"' % base_dir) raise ConfigError('No", ":param steps_text: Text with the Gherkin steps to execute (as", "# -- POSTCONDITION: Original context._mode is restored. :param context: Context", "This object is a place to store information related to", "attribute:: log_capture If logging capture is enabled then this attribute", "mode.\"\"\" return use_context_with_mode(self, ContextMode.BEHAVE) def use_with_user_mode(self): \"\"\"Provides a context manager", "if *behave* itself tries to overwrite a user-set variable. You", "scope. .. attribute:: aborted This is set to true in", "\"%s SUB-STEP: %s\" % \\ (step.status.name.upper(), step_line) if step.error_message: message", "> 0) or (len(self.undefined_steps) > undefined_steps_initial_size) or cleanups_failed) # XXX-MAYBE:", "feature_locations = [filename for filename in self.feature_locations() if not self.config.exclude(filename)]", "ConfigError from behave.formatter._registry import make_formatters from behave.runner_util import \\ collect_feature_locations,", "(features). Provides the core functionality of a test runner and", "traceback.print_exc(file=sys.stdout) # MAYBE: context._dump(pretty=True, prefix=\"Context: \") # -- MARK: testrun", "0) or self.aborted or (self.hook_failures > 0) or (len(self.undefined_steps) >", "\"\"\" def __init__(self, config): super(Runner, self).__init__(config) self.path_manager = PathManager() self.base_dir", "\"function\": record[3], } self._emit_warning(attr, params) stack_limit = 2 if six.PY2:", "step_line) if step.error_message: message += \"\\nSubstep info: %s\\n\" % step.error_message", "assertion) then the step invoking it will need to catch", "step definition that called this method uses .table/.text original_table =", "kwargs: def internal_cleanup_func(): cleanup_func(*args, **kwargs) else: internal_cleanup_func = cleanup_func current_frame", "print('ERROR: Could not find \"%s\" directory in your '\\ 'specified", "-- PREPARE: Save original context data for current step. #", "FINALLY: Restore original context data for current step. self.table =", "the_database) \"\"\" # -- BEST-EFFORT ALGORITHM: Tries to perform all", "if self.step_registry is None: self.step_registry = the_step_registry if features is", "drive + os.path.sep # -- POSIX: return os.path.sep class ModelRunner(object):", "-- POSIX: return os.path.sep class ModelRunner(object): \"\"\" Test runner for", "when stack frame is popped. A user can add a", "\"_\": self.__dict__[attr] = value return for frame in self._stack[1:]: if", "(ContextMode.BEHAVE, ContextMode.USER) current_mode = context._mode try: context._mode = mode yield", "os.path.exists(hooks_path): exec_file(hooks_path, self.hooks) if \"before_all\" not in self.hooks: self.hooks[\"before_all\"] =", "if self._mode is ContextMode.BEHAVE and self._origin[attr] is not ContextMode.BEHAVE: msg", "_set_aborted, doc=\"Indicates that test run is aborted by the user.\")", "getattr(self, \"on_cleanup_error\", self.print_cleanup_error) context = self cleanup_errors = [] for", "HANDLE HOOK ERRORS: use_traceback = False if self.config.verbose: use_traceback =", "self.config.steps_dir) step_paths = [steps_dir] + list(extra_step_paths) load_step_modules(step_paths) def feature_locations(self): return", "self._stack.insert(0, initial_data) def _pop(self): \"\"\"Pop the current layer from the", "a place to store information related to the tests you're", "in sorted(frame.keys()): value = frame[name] print(\"%s %-15s = %r\" %", "# -- FINALLY: Restore original context data for current step.", "model elements). \"\"\" from __future__ import absolute_import, print_function, with_statement import", "if extra_step_paths is None: extra_step_paths = [] # -- Allow", "+= \"\\nSubstep info: %s\\n\" % step.error_message message += u\"Traceback (of", "a \"feature\" value in the context. Values may be deleted", "directory:\", new_base_dir) if os.path.isdir(os.path.join(new_base_dir, steps_dir)): break if os.path.isfile(os.path.join(new_base_dir, environment_file)): break", "if attr in frame: record = self.__dict__[\"_record\"][attr] params = {", "None self.feature = None self.hook_failures = 0 # @property def", "self.capture_controller = CaptureController(config) self.context = None self.feature = None self.hook_failures", "logging capture is enabled then this attribute contains the captured", "_dump(self, pretty=False, prefix=\" \"): for level, frame in enumerate(self._stack): print(\"%sLevel", "in frame: return True return False def execute_steps(self, steps_text): \"\"\"The", "(optional). \"\"\" # MAYBE: assert callable(cleanup_func), \"REQUIRES: callable(cleanup_func)\" assert self._stack", "a step failure occurs. :raises: ValueError, if invoked without a", "self.context = Context(self) if self.step_registry is None: self.step_registry = the_step_registry", "if a context variable is being overwritten in some situations.", "record = self.__dict__[\"_record\"][attr] params = { \"attr\": attr, \"filename\": record[0],", "whatever value you need. During the running of your tests", "scenario (including the individual scenarios of a scenario outline) and", "You may use the \"in\" operator to test whether a", "cleanup_errors = [] for cleanup_func in reversed(cleanup_funcs): try: cleanup_func() except", "scoped_context_layer(context, layer_name=None): \"\"\"Provides context manager for context layer (push/do-something/pop cycle).", "stack. Performs any pending cleanups, registered for this layer. \"\"\"", "specify where to find your features.' % \\ steps_dir) else:", "is not being captured. .. attribute:: stdout_capture If stdout capture", "def __delattr__(self, attr): frame = self._stack[0] if attr in frame:", "= None else: # -- CASE: feature, scenario, step statement", "= 1 USER = 2 class Context(object): \"\"\"Hold contextual information", "by configuration files and command-line options. The attributes of this", "pylint: disable=protected-access try: context._push(layer_name) yield context finally: context._pop() def path_getrootdir(path):", "del frame[attr] del self._record[attr] else: msg = \"'{0}' object has", "pylint: disable=too-many-instance-attributes def __init__(self, config, features=None, step_registry=None): self.config = config", "-- NOTE: One exception/failure is already stored. # Append only", "self.config.paths[0] if hasattr(first_path, \"filename\"): # -- BETTER: isinstance(first_path, FileLocation): first_path", "+= 1 cleanup_errors.append(sys.exc_info()) on_cleanup_error(context, cleanup_func, e) if self.fail_on_cleanup_errors and cleanup_errors:", "steps_dir = self.config.steps_dir environment_file = self.config.environment_file while True: if self.config.verbose:", "print('ERROR: Could not find \"%s\" directory. '\\ 'Please specify where", "Initially: False. .. attribute:: table This is set at the", "self._root = { \"aborted\": False, \"failed\": False, \"config\": self._config, \"active_outline\":", "self.aborted: # -- FAIL-EARLY: After first failure. run_feature = False", "\"(tag=%s)\" % args[0] error_text = ExceptionUtil.describe(e, use_traceback).rstrip() error_message = u\"HOOK-ERROR", "Use layer_name values: \"scenario\", \"feature\", \"testrun\". :param layer_name: Layer name", "is None: self.step_registry = the_step_registry if features is None: features", "resulting exceptions. :param steps_text: Text with the Gherkin steps to", "a new feature and holds a :class:`~behave.model.Feature`. It will not", "level, frame in enumerate(self._stack): print(\"%sLevel %d\" % (prefix, level)) if", "+= 1 if \"tag\" in name: # -- SCENARIO or", "False except KeyboardInterrupt: self.aborted = True failed_count += 1 run_feature", "model elements. .. attribute:: aborted This is set to true", "def use_context_with_mode(context, mode): \"\"\"Switch context to ContextMode.BEHAVE or ContextMode.USER mode.", "object are the same as the `configuration file section names`_.", "the Gherkin steps to execute (as string). :returns: True, if", "self.config.reporters: reporter.feature(feature) # -- AFTER-ALL: # pylint: disable=protected-access, broad-except cleanups_failed", "self.feature.parser.variant = \"steps\" steps = self.feature.parser.parse_steps(steps_text) with self._use_with_behave_mode(): for step", "use_fixture(foo, context, name=\"foo_42\") \"\"\" # pylint: disable=protected-access try: context._push(layer_name) yield", "base_dir = os.path.abspath(base_dir) # supplied path might be to a", "(step.keyword, step.name) message = \"%s SUB-STEP: %s\" % \\ (step.status.name.upper(),", "args[0] if statement: # -- CASE: feature, scenario, step statement.hook_failed", "set to true in the root namespace when the user", "elif self._config.verbose: msg = \"user code is masking context attribute", "weakref.proxy(runner) self._config = runner.config d = self._root = { \"aborted\":", "msg: msg = msg % params warnings.warn(msg, ContextMaskWarning, stacklevel=3) def", "%d\" % (prefix, level)) if pretty: for name in sorted(frame.keys()):", "= False # -- ALWAYS: Report run/not-run feature to reporters.", "self.context: self.context = Context(self) if self.step_registry is None: self.step_registry =", "ignore_cleanup_error(context, cleanup_func, exception): pass @staticmethod def print_cleanup_error(context, cleanup_func, exception): cleanup_func_name", "Context(self) self.capture_controller.setup_capture(self.context) def start_capture(self): self.capture_controller.start_capture() def stop_capture(self): self.capture_controller.stop_capture() def teardown_capture(self):", "start testing a new scenario (including the individual scenarios of", "as determined by configuration files and command-line options. The attributes", "in self._origin: self._origin[attr] = self._mode def _emit_warning(self, attr, params): msg", "1 USER = 2 class Context(object): \"\"\"Hold contextual information during", "capture is enabled then this attribute contains the captured output", "the core functionality of a test runner and the functional", "filename = filename or self.config.environment_file hooks_path = os.path.join(self.base_dir, filename) if", "for feature in features: if run_feature: try: self.feature = feature", "a feature context. \"\"\" assert isinstance(steps_text, six.text_type), \"Steps must be", "overwrite the value you set. These names are: .. attribute::", "self.table = None # -- RUNTIME SUPPORT: self.stdout_capture = None", "by model elements. .. attribute:: aborted This is set to", "stored. # Append only error message. statement.error_message += u\"\\n\"+ error_message", "def _set_root_attribute(self, attr, value): for frame in self.__dict__[\"_stack\"]: if frame", "# MAYBE: assert callable(cleanup_func), \"REQUIRES: callable(cleanup_func)\" assert self._stack if args", "Could not find \"%s\" directory in your '\\ 'specified path", "% (name, extra, error_text) print(error_message) self.hook_failures += 1 if \"tag\"", "a context manager for using the context in BEHAVE mode.\"\"\"", "traceback support. import traceback2 as traceback else: import traceback class", "use. :param mode: Mode to apply to context object. \"\"\"", "if statement: # -- CASE: feature, scenario, step statement.hook_failed =", "of this object are the same as the `configuration file", "path \"./features\"') base_dir = os.path.abspath(\"features\") # Get the root. This", "self.step_registry is None: self.step_registry = the_step_registry if features is None:", "module provides Runner class to run behave feature files (or", "-- Allow steps to import other stuff from the steps", "capture=False) if not passed: # -- ISSUE #96: Provide more", "USER mode.\"\"\" return use_context_with_mode(self, ContextMode.USER) def user_mode(self): warnings.warn(\"Use 'use_with_user_mode()' instead\",", "context manager for using the context in BEHAVE mode.\"\"\" return", "= True statement = None else: # -- CASE: feature,", "operator to test whether a certain value has been set", "is removed even if cleanup-errors occur. self._stack.pop(0) def _use_with_behave_mode(self): \"\"\"Provides", "for :func:`before_all()` hook. Setup the logging subsystem based on the", "cleanups, registered for this layer. \"\"\" try: self._do_cleanups() finally: #", "= msg % params warnings.warn(msg, ContextMaskWarning, stacklevel=3) def _dump(self, pretty=False,", "# -- PREPARE: Save original context data for current step.", "def scoped_context_layer(context, layer_name=None): \"\"\"Provides context manager for context layer (push/do-something/pop", "been set on the context, for example: \"feature\" in context", "# pylint: disable=protected-access context._root[\"cleanup_errors\"] += 1 cleanup_errors.append(sys.exc_info()) on_cleanup_error(context, cleanup_func, e)", "Test runner for a behave model (features). Provides the core", "\"\"\" Default implementation for :func:`before_all()` hook. Setup the logging subsystem", "and references the :class:`~behave.model.Row` that is active for the current", "# -- ALWAYS: Report run/not-run feature to reporters. # REQUIRED-FOR:", "in %r' % (steps_dir, base_dir) raise ConfigError(message) base_dir = new_base_dir", "= self.context.aborted return value # @aborted.setter def _set_aborted(self, value): #", "self.config.reporters: reporter.end() failed = ((failed_count > 0) or self.aborted or", "Run all features. stream_openers = self.config.outputs self.formatters = make_formatters(self.config, stream_openers)", "in self.formatters: formatter.uri(feature.filename) failed = feature.run(self) if failed: failed_count +=", "Original context._mode is restored. :param context: Context object to use.", "\"@cleanups\": [], # -- REQUIRED-BY: before_all() hook \"@layer\": \"testrun\", }", "d = self._root = { \"aborted\": False, \"failed\": False, \"config\":", "a :class:`~behave.model.Feature`. It will not be present outside of a", "what this means\" if msg: msg = msg % params", "using the context in BEHAVE mode.\"\"\" return use_context_with_mode(self, ContextMode.BEHAVE) def", "failed = feature.run(self) if failed: failed_count += 1 if self.config.stop", "# REQUIRED-FOR: Summary to keep track of untested features. for", "CaptureController from behave.exception import ConfigError from behave.formatter._registry import make_formatters from", "= self.feature.parser.parse_steps(steps_text) with self._use_with_behave_mode(): for step in steps: passed =", "behave\" elif self._config.verbose: msg = \"user code is masking context", "context finally: context._pop() def path_getrootdir(path): \"\"\" Extract rootdir from path", "if six.PY2: stack_limit += 1 # Due to traceback2 usage.", "print('Using default path \"./features\"') base_dir = os.path.abspath(\"features\") # Get the", "self._origin: self._origin[attr] = self._mode def _emit_warning(self, attr, params): msg =", "\"_\": try: return self.__dict__[attr] except KeyError: raise AttributeError(attr) for frame", "feature and holds a :class:`~behave.model.Feature`. It will not be present", "= {\"@cleanups\": []} if layer_name: initial_data[\"@layer\"] = layer_name self._stack.insert(0, initial_data)", "there is a \"feature\" value in the context. Values may", "# -- REQUIRED-BY: before_all() hook \"@layer\": \"testrun\", } self._stack =", "Restore original context data for current step. self.table = original_table", "for frame in self._stack: if attr in frame: return True", "level and holds any :class:`~behave.model.Table` associated with the step. ..", "not self.config.paths: print('ERROR: Could not find \"%s\" directory. '\\ 'Please", "AssertionError, if a step failure occurs. :raises: ValueError, if invoked", "self.__dict__[attr] except KeyError: raise AttributeError(attr) for frame in self._stack: if", "disable=broad-except # pylint: disable=protected-access context._root[\"cleanup_errors\"] += 1 cleanup_errors.append(sys.exc_info()) on_cleanup_error(context, cleanup_func,", "associated with the step. .. attribute:: text This is set", "not present if stdout is not being captured. .. attribute::", "be parsed and executed in turn just as though they", "to true in the root namespace when the user aborts", "through error or failure assertion) then the step invoking it", "tries to overwrite a user-set variable. You may use the", "context in USER mode.\"\"\" return use_context_with_mode(self, ContextMode.USER) def user_mode(self): warnings.warn(\"Use", "= os.path.abspath(\"features\") # Get the root. This is not guaranteed", "hook. Setup the logging subsystem based on the configuration data.", "def _dump(self, pretty=False, prefix=\" \"): for level, frame in enumerate(self._stack):", "config self.features = features or [] self.hooks = {} self.formatters", "'in your specified path \"%s\"' % base_dir) raise ConfigError('No feature", "\") # -- MARK: testrun as FAILED # context._set_root_attribute(\"failed\", True)", "loads step definitions * select feature files, parses them and", "\"in\" operator to test whether a certain value has been", "base_dir = os.path.dirname(base_dir) else: if self.config.verbose: print('Using default path \"./features\"')", "will not be present outside of a feature scope. ..", "prefix=\" \"): for level, frame in enumerate(self._stack): print(\"%sLevel %d\" %", "them and creates model (elements) \"\"\" def __init__(self, config): super(Runner,", "self.context: value = self.context.aborted return value # @aborted.setter def _set_aborted(self,", "is to a file so using its directory\") base_dir =", ".. attribute:: active_outline This is set for each scenario in", "AttributeError(msg) def __setattr__(self, attr, value): if attr[0] == \"_\": self.__dict__[attr]", "invoking it will need to catch the resulting exceptions. :param", "msg = \"behave runner is masking context attribute '%(attr)s' \"", "function :param args: Args for cleanup_func() call (optional). :param kwargs:", "context modes. .. sourcecode:: python context = Context() with use_context_with_mode(context,", "Certain names are used by *behave*; be wary of using", "initial_data[\"@layer\"] = layer_name self._stack.insert(0, initial_data) def _pop(self): \"\"\"Pop the current", "while using the context: * BEHAVE: Indicates \"behave\" (internal) mode", "_push(self, layer_name=None): \"\"\"Push a new layer on the context stack.", "self.capture_controller.start_capture() def stop_capture(self): self.capture_controller.stop_capture() def teardown_capture(self): self.capture_controller.teardown_capture() def run_model(self, features=None):", "feature at a scenario level but you can delete a", "successfully. :raises: AssertionError, if a step failure occurs. :raises: ValueError,", "= 0 undefined_steps_initial_size = len(self.undefined_steps) for feature in features: if", "finally: # -- RESTORE: Initial current_mode # Even if an", "paths * loads environment hooks * loads step definitions *", "test runner for behave: * setup paths * loads environment", "test run (:exc:`KeyboardInterrupt` exception). Initially: False. .. attribute:: failed This", "tags The current set of active tags (as a Python", "\\ collect_feature_locations, parse_features, \\ exec_file, load_step_modules, PathManager from behave.step_registry import", "# supplied path might be to a feature file if", "attribute:: aborted This is set to true when the user", "hasattr(first_path, \"filename\"): # -- BETTER: isinstance(first_path, FileLocation): first_path = first_path.filename", "message = 'No %s directory in %r' % (steps_dir, base_dir)", "feature scope. .. attribute:: aborted This is set to true", "+= u\"\\nTraceback (of context.execute_steps()):\" assert False, message # -- FINALLY:", "# -- ISSUE #96: Provide more substep info to diagnose", "was originally set by *behave* then this will be raised", "self._mode def _emit_warning(self, attr, params): msg = \"\" if self._mode", "AVOID DUPLICATES: current_frame[\"@cleanups\"].append(internal_cleanup_func) @contextlib.contextmanager def use_context_with_mode(context, mode): \"\"\"Switch context to", "-- ENSURE: Layer is removed even if cleanup-errors occur. self._stack.pop(0)", "return attr in self.__dict__ for frame in self._stack: if attr", "... # Do something # -- POSTCONDITION: Original context._mode is", "self.path_manager.add(os.getcwd()) def before_all_default_hook(self, context): \"\"\" Default implementation for :func:`before_all()` hook.", "1 # Due to traceback2 usage. stack_frame = traceback.extract_stack(limit=stack_limit)[0] self._record[attr]", "FIRST EXCEPTION/FAILURE: statement.store_exception_context(e) statement.error_message = error_message def setup_capture(self): if not", "first_path = first_path.filename base_dir = first_path if base_dir.startswith(\"@\"): # --", "= ((failed_count > 0) or self.aborted or (self.hook_failures > 0)", "\"REQUIRES: callable(cleanup_func)\" assert self._stack if args or kwargs: def internal_cleanup_func():", "{} self._origin = {} self._mode = ContextMode.BEHAVE # -- MODEL", "def run(self): with self.path_manager: self.setup_paths() return self.run_with_paths() def run_with_paths(self): self.context", "context data for current step. # Needed if step definition", "-- ABORT EXECUTION: For before_all/after_all self.aborted = True statement =", "+= u\"\\n\"+ error_message else: # -- FIRST EXCEPTION/FAILURE: statement.store_exception_context(e) statement.error_message", "namespace and additional namespaces for features and scenarios. Certain names", "in context checks whether there is a \"feature\" value in", "Implements the run method by running the model. \"\"\" self.context", "-- USE: behave @features.txt base_dir = base_dir[1:] file_locations = self.feature_locations()", "name in sorted(frame.keys()): value = frame[name] print(\"%s %-15s = %r\"", "else: internal_cleanup_func = cleanup_func current_frame = self._stack[0] if cleanup_func not", "frame: return frame[attr] msg = \"'{0}' object has no attribute", "cleanups_failed) # XXX-MAYBE: or context.failed) return failed def run(self): \"\"\"", "formatter.close() for reporter in self.config.reporters: reporter.end() failed = ((failed_count >", "pretty: for name in sorted(frame.keys()): value = frame[name] print(\"%s %-15s", "FEATURE statement = getattr(context, \"scenario\", context.feature) elif \"all\" in name:", "None # -- RUNTIME SUPPORT: self.stdout_capture = None self.stderr_capture =", "to overwrite one of these variables, or if *behave* itself", "scenario, step statement.hook_failed = True if statement.error_message: # -- NOTE:", "find any \"<name>.feature\" files. '\\ 'Please specify where to find", "message += u\"\".join(traceback.format_tb(step.exc_traceback)) # message += u\"\\nTraceback (of context.execute_steps()):\" assert", "import make_formatters from behave.runner_util import \\ collect_feature_locations, parse_features, \\ exec_file,", "fn in filenames if fn.endswith(\".feature\")]: break else: if self.config.verbose: if", "= ExceptionUtil.describe(e, use_traceback).rstrip() error_message = u\"HOOK-ERROR in %s%s: %s\" %", "class ContextMaskWarning(UserWarning): \"\"\"Raised if a context variable is being overwritten", "== root_dir: if self.config.verbose: if not self.config.paths: print('ERROR: Could not", "try: return self.__dict__[attr] except KeyError: raise AttributeError(attr) for frame in", "self.features = features or [] self.hooks = {} self.formatters =", "usage. stack_frame = traceback.extract_stack(limit=stack_limit)[0] self._record[attr] = stack_frame frame = self._stack[0]", "\"\"\" # pylint: disable=no-self-use context.config.setup_logging() def load_hooks(self, filename=None): filename =", "self.context self.hook_failures = 0 self.setup_capture() self.run_hook(\"before_all\", context) run_feature = not", "cleanup_func, *args, **kwargs): \"\"\"Adds a cleanup function that is called", "def __init__(self, config, features=None, step_registry=None): self.config = config self.features =", "It is present mostly for debugging, but may be useful", "self.context: self.context = Context(self) self.capture_controller.setup_capture(self.context) def start_capture(self): self.capture_controller.start_capture() def stop_capture(self):", "self.step_registry = the_step_registry if features is None: features = self.features", "attr): if attr[0] == \"_\": try: return self.__dict__[attr] except KeyError:", "__init__(self, config, features=None, step_registry=None): self.config = config self.features = features", "\"\"\"Used to distinguish between the two usage modes while using", "a feature at a scenario level but you can delete", "self.run_model() class Runner(ModelRunner): \"\"\" Standard test runner for behave: *", "class CleanupError(RuntimeError): pass class ContextMaskWarning(UserWarning): \"\"\"Raised if a context variable", "= \"user code is masking context attribute '%(attr)s' \" \\", "self.__dict__[\"_stack\"]: if frame is self.__dict__[\"_root\"]: continue if attr in frame:", "*behave* may overwrite the value you set. These names are:", "exception): pass def before_all(context): context.on_cleanup_error = handle_cleanup_error context.add_cleanup(cleanup_database, the_database) \"\"\"", "self.capture_controller.setup_capture(self.context) def start_capture(self): self.capture_controller.start_capture() def stop_capture(self): self.capture_controller.stop_capture() def teardown_capture(self): self.capture_controller.teardown_capture()", "an instance of :class:`~behave.log_capture.LoggingCapture`. It is not present if logging", "features. for reporter in self.config.reporters: reporter.feature(feature) # -- AFTER-ALL: #", "will not be present outside of the scope of a", "%s%s: %s\" % (name, extra, error_text) print(error_message) self.hook_failures += 1", "self.step_registry = step_registry self.capture_controller = CaptureController(config) self.context = None self.feature", "isinstance(first_path, FileLocation): first_path = first_path.filename base_dir = first_path if base_dir.startswith(\"@\"):", "self.hooks[\"before_all\"] = self.before_all_default_hook def load_step_definitions(self, extra_step_paths=None): if extra_step_paths is None:", "not be present outside of a feature scope. .. attribute::", "level\" msg = msg.format(self.__class__.__name__, attr) raise AttributeError(msg) def __contains__(self, attr):", "scenario, step statement = args[0] if statement: # -- CASE:", "self.__dict__ for frame in self._stack: if attr in frame: return", "for level, frame in enumerate(self._stack): print(\"%sLevel %d\" % (prefix, level))", "method uses .table/.text original_table = getattr(self, \"table\", None) original_text =", "in %r' % base_dir) self.base_dir = base_dir self.path_manager.add(base_dir) if not", "def path_getrootdir(path): \"\"\" Extract rootdir from path in a platform", "= True # if name not in (\"before_all\", \"after_all\"): #", "the functional API needed by model elements. .. attribute:: aborted", ".. attribute:: table This is set at the step level", "uses .table/.text original_table = getattr(self, \"table\", None) original_text = getattr(self,", "# Append only error message. statement.error_message += u\"\\n\"+ error_message else:", "configuration files and command-line options. The attributes of this object", "StringIO instance. It is not present if stdout is not", "\"originally set in %(function)s (%(filename)s:%(line)s)\" elif self._mode is ContextMode.USER: if", "\"filename\"): # -- BETTER: isinstance(first_path, FileLocation): first_path = first_path.filename base_dir", "to true when the user aborts a test run (:exc:`KeyboardInterrupt`", "= filename or self.config.environment_file hooks_path = os.path.join(self.base_dir, filename) if os.path.exists(hooks_path):", "mode): \"\"\"Switch context to ContextMode.BEHAVE or ContextMode.USER mode. Provides a", "modes while using the context: * BEHAVE: Indicates \"behave\" (internal)", "attribute '{1}' at the current level\" msg = msg.format(self.__class__.__name__, attr)", "step. # Needed if step definition that called this method", "to store information related to the tests you're running. You", "any pending cleanups, registered for this layer. \"\"\" try: self._do_cleanups()", "handle_cleanup_error context.add_cleanup(cleanup_database, the_database) \"\"\" # -- BEST-EFFORT ALGORITHM: Tries to", "= path_getrootdir(\"/foo/bar/one.feature\") assert rootdir == \"/\" WINDOWS-PATH EXAMPLE: rootdir =", "step statement.hook_failed = True if statement.error_message: # -- NOTE: One", "from enum import Enum if six.PY2: # -- USE PYTHON3", "-- FILE: features/environment.py def cleanup_database(database): pass def handle_cleanup_error(context, cleanup_func, exception):", "will be raised if user code attempts to overwrite one", "call (optional). \"\"\" # MAYBE: assert callable(cleanup_func), \"REQUIRES: callable(cleanup_func)\" assert", "= getattr(cleanup_func, \"__name__\", None) if not cleanup_func_name: cleanup_func_name = \"%r\"", "self._emit_warning(attr, params) self.__dict__[\"_root\"][attr] = value if attr not in self._origin:", "unicode.\" if not self.feature: raise ValueError(\"execute_steps() called outside of feature\")", "stack_frame frame = self._stack[0] frame[attr] = value if attr not", "tags (as a Python set containing instances of :class:`~behave.model.Tag` which", "while True: if self.config.verbose: print(\"Trying base directory:\", new_base_dir) if os.path.isdir(os.path.join(new_base_dir,", "\"\"\"Provides context manager for context layer (push/do-something/pop cycle). .. code-block::", "import traceback class CleanupError(RuntimeError): pass class ContextMaskWarning(UserWarning): \"\"\"Raised if a", "setup_capture(self): if not self.context: self.context = Context(self) self.capture_controller.setup_capture(self.context) def start_capture(self):", "self.__dict__[attr] = value return for frame in self._stack[1:]: if attr", "a scenario outline) and holds a :class:`~behave.model.Scenario`. It will not", "context.add_cleanup(cleanup_database, the_database) \"\"\" # -- BEST-EFFORT ALGORITHM: Tries to perform", "to execute (as string). :returns: True, if the steps executed", "command-line options. The attributes of this object are the same", "test run is aborted by the user.\") def run_hook(self, name,", "set when we start testing a new scenario (including the", "self._stack[1:]: if attr in frame: record = self._record[attr] params =", "run_model(self, features=None): # pylint: disable=too-many-branches if not self.context: self.context =", "for cleanup_func() call (optional). \"\"\" # MAYBE: assert callable(cleanup_func), \"REQUIRES:", "between the two context modes. .. sourcecode:: python context =", "step. self.table = original_table self.text = original_text return True def", "originally set by *behave* then this will be raised if", "scope of a scenario. .. attribute:: tags The current set", "assert callable(cleanup_func), \"REQUIRES: callable(cleanup_func)\" assert self._stack if args or kwargs:", "the run method by running the model. \"\"\" self.context =", "context.execute_steps()):\" assert False, message # -- FINALLY: Restore original context", "being captured. .. attribute:: stderr_capture If stderr capture is enabled", "= the_step_registry if features is None: features = self.features #", "\"\"\"Provides a context manager for using the context in USER", "if not self.config.exclude(filename)] features = parse_features(feature_locations, language=self.config.lang) self.features.extend(features) # --", "# -- STEP: Parse all feature files (by using their", "# if name not in (\"before_all\", \"after_all\"): # raise except", "self._stack: if attr in frame: return True return False def", "as though they were defined in a feature file. If", "if an AssertionError/Exception is raised. context._mode = current_mode @contextlib.contextmanager def", "as derived attribute in :attr:`Context.aborted`. \"\"\" # pylint: disable=too-many-instance-attributes def", "and holds any multiline text associated with the step. ..", "initial_data) def _pop(self): \"\"\"Pop the current layer from the context", "in frame: record = self.__dict__[\"_record\"][attr] params = { \"attr\": attr,", "for formatter in self.formatters: formatter.close() for reporter in self.config.reporters: reporter.end()", "creates model (elements) \"\"\" def __init__(self, config): super(Runner, self).__init__(config) self.path_manager", "__getattr__(self, attr): if attr[0] == \"_\": try: return self.__dict__[attr] except", "\\ (step.status.name.upper(), step_line) if step.error_message: message += \"\\nSubstep info: %s\\n\"", "yield context finally: context._pop() def path_getrootdir(path): \"\"\" Extract rootdir from", "followlinks=True): if [fn for fn in filenames if fn.endswith(\".feature\")]: break", "# -- BETTER: isinstance(first_path, FileLocation): first_path = first_path.filename base_dir =", "feature in features: if run_feature: try: self.feature = feature for", "if \"tag\" in name: # -- SCENARIO or FEATURE statement", "extra_step_paths=None): if extra_step_paths is None: extra_step_paths = [] # --", "masking context attribute \" \\ \"'%(attr)s'; see the tutorial for", "runner.config d = self._root = { \"aborted\": False, \"failed\": False,", "user-set variable. You may use the \"in\" operator to test", "def _push(self, layer_name=None): \"\"\"Push a new layer on the context", "behave @features.txt base_dir = base_dir[1:] file_locations = self.feature_locations() if file_locations:", "if not self.config.dry_run and (name in self.hooks): try: with context.use_with_user_mode():", "context._dump(pretty=True, prefix=\"Context: \") # -- MARK: testrun as FAILED #", "error_text) print(error_message) self.hook_failures += 1 if \"tag\" in name: #", ".. attribute:: scenario This is set when we start testing", "behave.capture import CaptureController from behave.exception import ConfigError from behave.formatter._registry import", "if file_locations: base_dir = os.path.dirname(file_locations[0].filename) base_dir = os.path.abspath(base_dir) # supplied", "in the root namespace as soon as a step fails.", "to a file so using its directory\") base_dir = os.path.dirname(base_dir)", "DISABLED: self.scenario = None self.text = None self.table = None", "self._record[attr] else: msg = \"'{0}' object has no attribute '{1}'", "a feature scope. .. attribute:: aborted This is set to", "weakref import six from behave._types import ExceptionUtil from behave.capture import", "outside of a feature scope. .. attribute:: aborted This is", "basically just glorified strings) combined from the feature and scenario.", "restored. :param context: Context object to use. :param mode: Mode", "a test runner and the functional API needed by model", "feature and scenario. This attribute will not be present outside", "reversed(cleanup_funcs): try: cleanup_func() except Exception as e: # pylint: disable=broad-except", "in self.hooks: self.hooks[\"before_all\"] = self.before_all_default_hook def load_step_definitions(self, extra_step_paths=None): if extra_step_paths", "attr not in self._origin: self._origin[attr] = self._mode def __delattr__(self, attr):", "1 if self.config.stop or self.aborted: # -- FAIL-EARLY: After first", "prefix=\"Context: \") # -- MARK: testrun as FAILED # context._set_root_attribute(\"failed\",", "is masking context attribute \" \\ \"'%(attr)s'; see the tutorial", "in self.formatters: formatter.close() for reporter in self.config.reporters: reporter.end() failed =", "original_table self.text = original_text return True def add_cleanup(self, cleanup_func, *args,", "find \"%s\" directory in your '\\ 'specified path \"%s\"' %", "distinguish between the two usage modes while using the context:", "attribute:: active_outline This is set for each scenario in a", "if not passed: # -- ISSUE #96: Provide more substep", "attribute will not be present outside of a feature scope.", "traceback2 as traceback else: import traceback class CleanupError(RuntimeError): pass class", "Context() with use_context_with_mode(context, ContextMode.BEHAVE): ... # Do something # --", "= value return for frame in self._stack[1:]: if attr in", "self.__dict__[\"_record\"][attr] params = { \"attr\": attr, \"filename\": record[0], \"line\": record[1],", "then this attribute contains the captured output as a StringIO", "False, \"config\": self._config, \"active_outline\": None, \"cleanup_errors\": 0, \"@cleanups\": [], #", "contextual information during the running of tests. This object is", "running of your tests the object will have additional layers", "rootdir = path_getrootdir(\"/foo/bar/one.feature\") assert rootdir == \"/\" WINDOWS-PATH EXAMPLE: rootdir", "WINDOWS-PATH EXAMPLE: rootdir = path_getrootdir(\"D:\\\\foo\\\\bar\\\\one.feature\") assert rootdir == r\"D:\\\" \"\"\"", "from behave.capture import CaptureController from behave.exception import ConfigError from behave.formatter._registry", "first_path if base_dir.startswith(\"@\"): # -- USE: behave @features.txt base_dir =", "unicode traceback support. import traceback2 as traceback else: import traceback", "means\" if msg: msg = msg % params warnings.warn(msg, ContextMaskWarning,", "apply to context object. \"\"\" # pylint: disable=protected-access assert mode", "user aborts a test run (:exc:`KeyboardInterrupt` exception). Initially: False. Stored", "self.config.environment_file while True: if self.config.verbose: print(\"Trying base directory:\", new_base_dir) if", "attr[0] == \"_\": try: return self.__dict__[attr] except KeyError: raise AttributeError(attr)", "Enum if six.PY2: # -- USE PYTHON3 BACKPORT: With unicode", "BACKPORT: With unicode traceback support. import traceback2 as traceback else:", "the variable was originally set by *behave* then this will", "ContextMaskWarning, stacklevel=3) def _dump(self, pretty=False, prefix=\" \"): for level, frame", "base_dir) raise ConfigError('No feature files in %r' % base_dir) self.base_dir", "of feature\") # -- PREPARE: Save original context data for", "scenarios. Certain names are used by *behave*; be wary of", "code is masking context attribute '%(attr)s' \" \\ \"originally set", "return drive + os.path.sep # -- POSIX: return os.path.sep class", "self.use_with_user_mode() def _set_root_attribute(self, attr, value): for frame in self.__dict__[\"_stack\"]: if", "(hooks, ...) # self.setup_capture() # self.run_hook(\"before_all\", self.context) # -- STEP:", "= [base_dir] if base_dir != os.getcwd(): self.path_manager.add(os.getcwd()) def before_all_default_hook(self, context):", "class ContextMode(Enum): \"\"\"Used to distinguish between the two usage modes", "the captured logging as an instance of :class:`~behave.log_capture.LoggingCapture`. It is", "False if self.config.verbose: use_traceback = True ExceptionUtil.set_traceback(e) extra = u\"\"", "just as though they were defined in a feature file.", "statement = getattr(context, \"scenario\", context.feature) elif \"all\" in name: #", "within the scope of the environment before_all and after_all). ..", "def _get_aborted(self): value = False if self.context: value = self.context.aborted", "when the user aborts a test run (:exc:`KeyboardInterrupt` exception). Initially:", "= not self.aborted failed_count = 0 undefined_steps_initial_size = len(self.undefined_steps) for", "[] # -- Allow steps to import other stuff from", "namespaces for features and scenarios. Certain names are used by", "\"behave runner is masking context attribute '%(attr)s' \" \\ \"originally", "%s directory in %r' % (steps_dir, base_dir) raise ConfigError(message) base_dir", "return self.run_with_paths() def run_with_paths(self): self.context = Context(self) self.load_hooks() self.load_step_definitions() #", "# -- STEP: Run all features. stream_openers = self.config.outputs self.formatters", "if self.aborted: print(\"\\nABORTED: By user.\") for formatter in self.formatters: formatter.close()", "a value set for a scenario in that scenario. ..", "use_traceback = False if self.config.verbose: use_traceback = True ExceptionUtil.set_traceback(e) extra", "functional API needed by model elements. .. attribute:: aborted This", "...) \"\"\" BEHAVE = 1 USER = 2 class Context(object):", ".. attribute:: failed This is set to true in the", "= feature.run(self) if failed: failed_count += 1 if self.config.stop or", "params = { \"attr\": attr, \"filename\": record[0], \"line\": record[1], \"function\":", "\"testrun\", } self._stack = [d] self._record = {} self._origin =", "and (name in self.hooks): try: with context.use_with_user_mode(): self.hooks[name](context, *args) #", "disable=too-many-branches, too-many-statements if self.config.paths: if self.config.verbose: print(\"Supplied path:\", \\ \",", "If the execute_steps call fails (either through error or failure", "attr in frame: record = self._record[attr] params = { \"attr\":", "parsed and executed in turn just as though they were", "feature files (or model elements). \"\"\" from __future__ import absolute_import,", "step_registry=None): self.config = config self.features = features or [] self.hooks", "self.features.extend(features) # -- STEP: Run all features. stream_openers = self.config.outputs", "= self._stack[0] if cleanup_func not in current_frame[\"@cleanups\"]: # -- AVOID", "def _emit_warning(self, attr, params): msg = \"\" if self._mode is", "attr, params): msg = \"\" if self._mode is ContextMode.BEHAVE and", "-- MARK: testrun as FAILED # context._set_root_attribute(\"failed\", True) def _do_cleanups(self):", "# self.setup_capture() # self.run_hook(\"before_all\", self.context) # -- STEP: Parse all", "overridden in \"environment.py\" hook. steps_dir = os.path.join(self.base_dir, self.config.steps_dir) step_paths =", "or FEATURE statement = getattr(context, \"scenario\", context.feature) elif \"all\" in", "= base_dir self.path_manager.add(base_dir) if not self.config.paths: self.config.paths = [base_dir] if", "% args[0] error_text = ExceptionUtil.describe(e, use_traceback).rstrip() error_message = u\"HOOK-ERROR in", "which are basically just glorified strings) combined from the feature", "print('ERROR: Could not find any \"<name>.feature\" files '\\ 'in your", "context = self cleanup_errors = [] for cleanup_func in reversed(cleanup_funcs):", "frame in self._stack[1:]: if attr in frame: record = self._record[attr]", "cleanup_func_name = \"%r\" % cleanup_func print(u\"CLEANUP-ERROR in %s: %s: %s\"", "del self._record[attr] else: msg = \"'{0}' object has no attribute", "= current_mode @contextlib.contextmanager def scoped_context_layer(context, layer_name=None): \"\"\"Provides context manager for", "If stdout capture is enabled then this attribute contains the", "frame: record = self.__dict__[\"_record\"][attr] params = { \"attr\": attr, \"filename\":", "be deleted from the context using \"del\" but only at", "Context(object): \"\"\"Hold contextual information during the running of tests. This", "as FAILED # context._set_root_attribute(\"failed\", True) def _do_cleanups(self): \"\"\"Execute optional cleanup", "ValueError(\"execute_steps() called outside of feature\") # -- PREPARE: Save original", "self.text = original_text return True def add_cleanup(self, cleanup_func, *args, **kwargs):", "intended for user-cleanups. :param cleanup_func: Callable function :param args: Args", "with use_context_with_mode(context, ContextMode.BEHAVE): ... # Do something # -- POSTCONDITION:", "root_dir: if self.config.verbose: if not self.config.paths: print('ERROR: Could not find", "if \"tag\" in name: extra = \"(tag=%s)\" % args[0] error_text", "frame: record = self._record[attr] params = { \"attr\": attr, \"filename\":", "step in steps: passed = step.run(self._runner, quiet=True, capture=False) if not", "as e: # pylint: disable=broad-except # pylint: disable=protected-access context._root[\"cleanup_errors\"] +=", "hook \"@layer\": \"testrun\", } self._stack = [d] self._record = {}", "removed automatically. There is a \"root\" namespace and additional namespaces", "a test run (:exc:`KeyboardInterrupt` exception). Initially: False. .. attribute:: failed", "\"<name>.feature\" files '\\ 'in your specified path \"%s\"' % base_dir)", "#96: Provide more substep info to diagnose problem. step_line =", "\"user\" mode (in steps, hooks, fixtures, ...) \"\"\" BEHAVE =", "extra_step_paths = [] # -- Allow steps to import other", "if run_feature: try: self.feature = feature for formatter in self.formatters:", "def run(self): \"\"\" Implements the run method by running the", "the context. Values may be deleted from the context using", "These names are: .. attribute:: feature This is set when", "-- FINALLY: Restore original context data for current step. self.table", "to be \"/\" because Windows. root_dir = path_getrootdir(base_dir) new_base_dir =", "+= 1 # Due to traceback2 usage. stack_frame = traceback.extract_stack(limit=stack_limit)[0]", "the :class:`~behave.model.Row` that is active for the current scenario. It", "python # -- FILE: features/environment.py def cleanup_database(database): pass def handle_cleanup_error(context,", "self.context self.context._set_root_attribute(\"aborted\", bool(value)) aborted = property(_get_aborted, _set_aborted, doc=\"Indicates that test", "not present if stderr is not being captured. A :class:`behave.runner.ContextMaskWarning`", "features=None): # pylint: disable=too-many-branches if not self.context: self.context = Context(self)", "self.config.exclude(filename)] features = parse_features(feature_locations, language=self.config.lang) self.features.extend(features) # -- STEP: Run", "[]) on_cleanup_error = getattr(self, \"on_cleanup_error\", self.print_cleanup_error) context = self cleanup_errors", "the \"in\" operator to test whether a certain value has", "self.context = Context(self) return self.run_model() class Runner(ModelRunner): \"\"\" Standard test", "see the tutorial for what this means\" if msg: msg", "path in a platform independent way. POSIX-PATH EXAMPLE: rootdir =", "def teardown_capture(self): self.capture_controller.teardown_capture() def run_model(self, features=None): # pylint: disable=too-many-branches if", "for using the context in USER mode.\"\"\" return use_context_with_mode(self, ContextMode.USER)", "by behave\" elif self._config.verbose: msg = \"user code is masking", "self.path_manager: self.setup_paths() return self.run_with_paths() def run_with_paths(self): self.context = Context(self) self.load_hooks()", "cleanup_func, exception): cleanup_func_name = getattr(cleanup_func, \"__name__\", None) if not cleanup_func_name:", "are: .. attribute:: feature This is set when we start", "aborts a test run (:exc:`KeyboardInterrupt` exception). Initially: False. Stored as", "for example: \"feature\" in context checks whether there is a", "run_feature = not self.aborted failed_count = 0 undefined_steps_initial_size = len(self.undefined_steps)", "environment hooks * loads step definitions * select feature files,", "if attr in frame: return frame[attr] msg = \"'{0}' object", "def load_step_definitions(self, extra_step_paths=None): if extra_step_paths is None: extra_step_paths = []", "soon as a step fails. Initially: False. .. attribute:: table", "= base_dir[1:] file_locations = self.feature_locations() if file_locations: base_dir = os.path.dirname(file_locations[0].filename)", "one of these variables, or if *behave* itself tries to", "def setup_capture(self): if not self.context: self.context = Context(self) self.capture_controller.setup_capture(self.context) def", "failed: failed_count += 1 if self.config.stop or self.aborted: # --", "using the context: * BEHAVE: Indicates \"behave\" (internal) mode *", "variables, or if *behave* itself tries to overwrite a user-set", "def user_mode(self): warnings.warn(\"Use 'use_with_user_mode()' instead\", PendingDeprecationWarning, stacklevel=2) return self.use_with_user_mode() def", "@property def _get_aborted(self): value = False if self.context: value =", "not self.feature: raise ValueError(\"execute_steps() called outside of feature\") # --", ".. attribute:: aborted This is set to true in the", "in %(function)s (%(filename)s:%(line)s)\" elif self._mode is ContextMode.USER: if self._origin[attr] is", "context=%r\" % self.context self.context._set_root_attribute(\"aborted\", bool(value)) aborted = property(_get_aborted, _set_aborted, doc=\"Indicates", "= parse_features(feature_locations, language=self.config.lang) self.features.extend(features) # -- STEP: Run all features.", "not cleanup_func_name: cleanup_func_name = \"%r\" % cleanup_func print(u\"CLEANUP-ERROR in %s:", "def internal_cleanup_func(): cleanup_func(*args, **kwargs) else: internal_cleanup_func = cleanup_func current_frame =", "files and command-line options. The attributes of this object are", "cleanup_func_name = getattr(cleanup_func, \"__name__\", None) if not cleanup_func_name: cleanup_func_name =", "info: %s\\n\" % step.error_message message += u\"Traceback (of failed substep):\\n\"", "info to diagnose problem. step_line = u\"%s %s\" % (step.keyword,", "or context.failed) return failed def run(self): \"\"\" Implements the run", "if base_dir != os.getcwd(): self.path_manager.add(os.getcwd()) def before_all_default_hook(self, context): \"\"\" Default", "= step.run(self._runner, quiet=True, capture=False) if not passed: # -- ISSUE", "context, *args): if not self.config.dry_run and (name in self.hooks): try:", "= cleanup_errors[0] del cleanup_errors # -- ENSURE: Release other exception", "will be raised if *behave* overwrites the value. If the", "value in the context. Values may be deleted from the", "track of untested features. for reporter in self.config.reporters: reporter.feature(feature) #", "or self.aborted or (self.hook_failures > 0) or (len(self.undefined_steps) > undefined_steps_initial_size)", "os.path.dirname(new_base_dir) if new_base_dir == root_dir: if self.config.verbose: if not self.config.paths:", "your features.' % \\ steps_dir) else: print('ERROR: Could not find", "frame is popped. A user can add a user-specified handler", "(\"before_all\", \"after_all\"): # raise except Exception as e: # pylint:", "scenario This is set when we start testing a new", "RUNTIME SUPPORT: self.stdout_capture = None self.stderr_capture = None self.log_capture =", "absolute_import, print_function, with_statement import contextlib import os.path import sys import", "use_context_with_mode(self, ContextMode.BEHAVE) def use_with_user_mode(self): \"\"\"Provides a context manager for using", "optional cleanup functions when stack frame is popped. A user", "for frame in self._stack: if attr in frame: return frame[attr]", "directory\") base_dir = os.path.dirname(base_dir) else: if self.config.verbose: print('Using default path", "base_dir = os.path.abspath(\"features\") # Get the root. This is not", "of a feature (i.e. within the scope of the environment", "you can delete a value set for a scenario in", "and command-line options. The attributes of this object are the", "+= u\"\".join(traceback.format_tb(step.exc_traceback)) # message += u\"\\nTraceback (of context.execute_steps()):\" assert False,", "ENSURE: Layer is removed even if cleanup-errors occur. self._stack.pop(0) def", "passed = step.run(self._runner, quiet=True, capture=False) if not passed: # --", "pass class ContextMode(Enum): \"\"\"Used to distinguish between the two usage", "be present outside of a feature scope. .. attribute:: aborted", "= 0 self.setup_capture() self.run_hook(\"before_all\", context) run_feature = not self.aborted failed_count", "HINT: Use layer_name values: \"scenario\", \"feature\", \"testrun\". :param layer_name: Layer", "user code then this will be raised if *behave* overwrites", "def __init__(self, runner): self._runner = weakref.proxy(runner) self._config = runner.config d", "-- POSTCONDITION: Original context._mode is restored. :param context: Context object", "Setup the logging subsystem based on the configuration data. \"\"\"", "POSTCONDITION: Original context._mode is restored. :param context: Context object to", "self.scenario = None self.text = None self.table = None #", "self.aborted = True # if name not in (\"before_all\", \"after_all\"):", "and scenarios. Certain names are used by *behave*; be wary", "DISABLED: self.rule = None # DISABLED: self.scenario = None self.text", "assert self._stack, \"REQUIRE: Non-empty stack\" current_layer = self._stack[0] cleanup_funcs =", "the \"steps\" text string will be parsed and executed in", "self.config.paths = [base_dir] if base_dir != os.getcwd(): self.path_manager.add(os.getcwd()) def before_all_default_hook(self,", "u\"HOOK-ERROR in %s%s: %s\" % (name, extra, error_text) print(error_message) self.hook_failures", "attributes of this object are the same as the `configuration", "step failure occurs. :raises: ValueError, if invoked without a feature", "REFERENCES/SUPPORT: self.feature = None # DISABLED: self.rule = None #", "layer from the context stack. Performs any pending cleanups, registered", "\"\"\" Test runner for a behave model (features). Provides the", "-- ENSURE: context.execute_steps() works in weird cases (hooks, ...) #", "@features.txt base_dir = base_dir[1:] file_locations = self.feature_locations() if file_locations: base_dir", "else: print('ERROR: Could not find any \"<name>.feature\" files '\\ 'in", "print(\"%s %-15s = %r\" % (prefix, name, value)) else: print(prefix", "Context object to use. :param mode: Mode to apply to", "def use_with_user_mode(self): \"\"\"Provides a context manager for using the context", "manager for using the context in USER mode.\"\"\" return use_context_with_mode(self,", "\"attr\": attr, \"filename\": record[0], \"line\": record[1], \"function\": record[3], } self._emit_warning(attr,", "self.feature: raise ValueError(\"execute_steps() called outside of feature\") # -- PREPARE:", "formatter.uri(feature.filename) failed = feature.run(self) if failed: failed_count += 1 if", "information related to the tests you're running. You may add", "def load_hooks(self, filename=None): filename = filename or self.config.environment_file hooks_path =", "**kwargs) else: internal_cleanup_func = cleanup_func current_frame = self._stack[0] if cleanup_func", "undefined_steps_initial_size = len(self.undefined_steps) for feature in features: if run_feature: try:", "but may be useful otherwise. .. attribute:: log_capture If logging", "steps dir # NOTE: Default matcher can be overridden in", "not be present outside of a feature (i.e. within the", "None def setup_paths(self): # pylint: disable=too-many-branches, too-many-statements if self.config.paths: if", "warnings import weakref import six from behave._types import ExceptionUtil from", "value): if attr[0] == \"_\": self.__dict__[attr] = value return for", "-- FAIL-EARLY: After first failure. run_feature = False except KeyboardInterrupt:", "is popped. A user can add a user-specified handler for", "functionality of a test runner and the functional API needed", "KeyboardInterrupt: # self.aborted = True # if name not in", "store information related to the tests you're running. You may", "import weakref import six from behave._types import ExceptionUtil from behave.capture", "ALGORITHM: Tries to perform all cleanups. assert self._stack, \"REQUIRE: Non-empty", "import other stuff from the steps dir # NOTE: Default", "if self.config.verbose: if not self.config.paths: print('ERROR: Could not find any", "\"filename\": record[0], \"line\": record[1], \"function\": record[3], } self._emit_warning(attr, params) self.__dict__[\"_root\"][attr]", "for path in self.config.paths)) first_path = self.config.paths[0] if hasattr(first_path, \"filename\"):", "attribute:: table This is set at the step level and", "* select feature files, parses them and creates model (elements)", ":param mode: Mode to apply to context object. \"\"\" #", "None: extra_step_paths = [] # -- Allow steps to import", "...) context = self.context self.hook_failures = 0 self.setup_capture() self.run_hook(\"before_all\", context)", "references the :class:`~behave.model.Row` that is active for the current scenario.", "PREPARE: Save original context data for current step. # Needed", "of a scenario outline) and holds a :class:`~behave.model.Scenario`. It will", "hooks, fixtures, ...) \"\"\" BEHAVE = 1 USER = 2", "cleanup_errors # -- ENSURE: Release other exception frames. six.reraise(*first_cleanup_erro_info) def", "def add_cleanup(self, cleanup_func, *args, **kwargs): \"\"\"Adds a cleanup function that", "for using the context in BEHAVE mode.\"\"\" return use_context_with_mode(self, ContextMode.BEHAVE)", "default path \"./features\"') base_dir = os.path.abspath(\"features\") # Get the root.", "handler for cleanup errors. .. code-block:: python # -- FILE:", "works in weird cases (hooks, ...) context = self.context self.hook_failures", "not self.config.dry_run and (name in self.hooks): try: with context.use_with_user_mode(): self.hooks[name](context,", "the steps executed successfully. :raises: AssertionError, if a step failure", "If the variable was originally set by *behave* then this", "\"'{0}' object has no attribute '{1}'\" msg = msg.format(self.__class__.__name__, attr)", "_do_cleanups(self): \"\"\"Execute optional cleanup functions when stack frame is popped.", "for behave: * setup paths * loads environment hooks *", "with self._use_with_behave_mode(): for step in steps: passed = step.run(self._runner, quiet=True,", "*behave* overwrites the value. If the variable was originally set", "getattr(cleanup_func, \"__name__\", None) if not cleanup_func_name: cleanup_func_name = \"%r\" %", "= self.features # -- ENSURE: context.execute_steps() works in weird cases", "in self.__dict__[\"_stack\"]: if frame is self.__dict__[\"_root\"]: continue if attr in", "for user-cleanups. :param cleanup_func: Callable function :param args: Args for", "__future__ import absolute_import, print_function, with_statement import contextlib import os.path import", "# pylint: disable=too-many-instance-attributes def __init__(self, config, features=None, step_registry=None): self.config =", "for reporter in self.config.reporters: reporter.end() failed = ((failed_count > 0)", "by running the model. \"\"\" self.context = Context(self) return self.run_model()", "2 class Context(object): \"\"\"Hold contextual information during the running of", "features is None: features = self.features # -- ENSURE: context.execute_steps()", "\"text\", None) self.feature.parser.variant = \"steps\" steps = self.feature.parser.parse_steps(steps_text) with self._use_with_behave_mode():", "in self.config.paths)) first_path = self.config.paths[0] if hasattr(first_path, \"filename\"): # --", "are basically just glorified strings) combined from the feature and", "the value. If the variable was originally set by *behave*", "exceptions. :param steps_text: Text with the Gherkin steps to execute", "a user-set variable. You may use the \"in\" operator to", "\\ \"originally set by behave\" elif self._config.verbose: msg = \"user", "by the user.\") def run_hook(self, name, context, *args): if not", "if self.context: value = self.context.aborted return value # @aborted.setter def", "[steps_dir] + list(extra_step_paths) load_step_modules(step_paths) def feature_locations(self): return collect_feature_locations(self.config.paths) def run(self):", "record = self._record[attr] params = { \"attr\": attr, \"filename\": record[0],", "-- BEST-EFFORT ALGORITHM: Tries to perform all cleanups. assert self._stack,", "if step definition that called this method uses .table/.text original_table", "except KeyboardInterrupt: self.aborted = True failed_count += 1 run_feature =", "self.path_manager.add(base_dir) if not self.config.paths: self.config.paths = [base_dir] if base_dir !=", "all feature files (by using their file location). feature_locations =", "cleanups_failed = False self.run_hook(\"after_all\", self.context) try: self.context._do_cleanups() # Without dropping", "undefined_steps_initial_size) or cleanups_failed) # XXX-MAYBE: or context.failed) return failed def", "is set when we start testing a new feature and", "section names`_. .. attribute:: active_outline This is set for each", "if user code overwrites the value. \"\"\" pass class ContextMode(Enum):", "= {} self.formatters = [] self.undefined_steps = [] self.step_registry =", "= False if self.context: value = self.context.aborted return value #", "not in self._origin: self._origin[attr] = self._mode def _emit_warning(self, attr, params):", "for filename in self.feature_locations() if not self.config.exclude(filename)] features = parse_features(feature_locations,", "context manager for switching between the two context modes. ..", "works in weird cases (hooks, ...) # self.setup_capture() # self.run_hook(\"before_all\",", "'{1}'\" msg = msg.format(self.__class__.__name__, attr) raise AttributeError(msg) def __setattr__(self, attr,", "value): # pylint: disable=protected-access assert self.context, \"REQUIRE: context, but context=%r\"", "new_base_dir = os.path.dirname(new_base_dir) if new_base_dir == root_dir: if self.config.verbose: if", "object has no attribute '{1}' at the current level\" msg", "This is set to true in the root namespace when", "self.hooks) if \"before_all\" not in self.hooks: self.hooks[\"before_all\"] = self.before_all_default_hook def", "attr in frame: record = self.__dict__[\"_record\"][attr] params = { \"attr\":", "SUB-STEP: %s\" % \\ (step.status.name.upper(), step_line) if step.error_message: message +=", "self._stack[0] frame[attr] = value if attr not in self._origin: self._origin[attr]", "present if stderr is not being captured. A :class:`behave.runner.ContextMaskWarning` warning", "ContextMode.USER) def user_mode(self): warnings.warn(\"Use 'use_with_user_mode()' instead\", PendingDeprecationWarning, stacklevel=2) return self.use_with_user_mode()", "try: self.feature = feature for formatter in self.formatters: formatter.uri(feature.filename) failed", "need. During the running of your tests the object will", "yourself as *behave* may overwrite the value you set. These", "overwrites the value. If the variable was originally set by", "automatically. There is a \"root\" namespace and additional namespaces for", "run (:exc:`KeyboardInterrupt` exception). Initially: False. .. attribute:: failed This is", "run_feature = False # -- ALWAYS: Report run/not-run feature to", "msg = msg.format(self.__class__.__name__, attr) raise AttributeError(msg) def __contains__(self, attr): if", "attr[0] == \"_\": return attr in self.__dict__ for frame in", "not being captured. .. attribute:: stderr_capture If stderr capture is", "__setattr__(self, attr, value): if attr[0] == \"_\": self.__dict__[attr] = value", "are used by *behave*; be wary of using them yourself", "context stack. Performs any pending cleanups, registered for this layer.", "FAIL_ON_CLEANUP_ERRORS = True def __init__(self, runner): self._runner = weakref.proxy(runner) self._config", "namespace as soon as a step fails. Initially: False. .." ]
[ "datetime import datetime tileCount = 20 def setup(): global savePDF,", "(colorLeft == color(0)): colorLeft = color(323, 100, 77) else: colorLeft", "import random from datetime import datetime tileCount = 20 def", "if savePDF: beginRecord(PDF, datetime.now().strftime(\"%Y%m%d%H%M%S\")+\".pdf\") background(255) smooth() noFill() strokeCap(actStrokeCap) random.seed(actRandomSeed) for", "global savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight savePDF =", "strokeWeight(mouseX/20) stroke(colorLeft, alphaLeft) line(posX, posY, posX+width/tileCount, posY+height/tileCount) elif (toggle ==", "colorLeft = color(0) elif (key == '5'): if (colorRight ==", "100): alphaRight = 50 else: alphaRight = 100 if (key", "(savePDF): savePDF = False endRecord() def mousePressed(): global savePDF, actStrokeCap,", "posY, posX+width/tileCount, posY+height/tileCount) elif (toggle == 1): strokeWeight(mouseY/20) stroke(colorRight, alphaRight)", "strokeWeight(mouseY/20) stroke(colorRight, alphaRight) line(posX, posY+width/tileCount, posX+height/tileCount, posY) if (savePDF): savePDF", "actStrokeCap = ROUND colorLeft = color(0) colorRight = color(0) alphaLeft", "actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight if savePDF: beginRecord(PDF, datetime.now().strftime(\"%Y%m%d%H%M%S\")+\".pdf\") background(255)", "100 elif (key == '7'): if (alphaRight == 100): alphaRight", "actStrokeCap = ROUND actRandomSeed = 0 colorLeft = color(197, 0,", "endRecord() def mousePressed(): global savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft,", "123) colorRight = color(87, 35, 129) alphaLeft = 100 alphaRight", "posX+width/tileCount, posY+height/tileCount) elif (toggle == 1): strokeWeight(mouseY/20) stroke(colorRight, alphaRight) line(posX,", "(key == '0'): actStrokeCap = ROUND colorLeft = color(0) colorRight", "add_library('pdf') import random from datetime import datetime tileCount = 20", "range(tileCount): posX = int(width/tileCount*gridX) posY = int(height/tileCount*gridY) toggle = random.randint(0,1)", "== 1): strokeWeight(mouseY/20) stroke(colorRight, alphaRight) line(posX, posY+width/tileCount, posX+height/tileCount, posY) if", "\"2\": actStrokeCap = SQUARE elif key == \"3\": actStrokeCap =", "gridX in range(tileCount): posX = int(width/tileCount*gridX) posY = int(height/tileCount*gridY) toggle", "\"1\": actStrokeCap = ROUND elif key == \"2\": actStrokeCap =", "(key == '5'): if (colorRight == color(0)): colorRight = color(273,", "100): alphaLeft = 50 else: alphaLeft = 100 elif (key", "tileCount = 20 def setup(): global savePDF, actStrokeCap, actRandomSeed, colorLeft,", "alphaLeft, alphaRight if (key=='s' or key=='S'): saveFrame(datetime.now().strftime(\"%Y%m%d%H%M%S\")+\".png\") if (key=='p' or", "= color(323, 100, 77) else: colorLeft = color(0) elif (key", "actRandomSeed = 0 colorLeft = color(197, 0, 123) colorRight =", "actStrokeCap = ROUND elif key == \"2\": actStrokeCap = SQUARE", "= ROUND actRandomSeed = 0 colorLeft = color(197, 0, 123)", "key == \"2\": actStrokeCap = SQUARE elif key == \"3\":", "int(width/tileCount*gridX) posY = int(height/tileCount*gridY) toggle = random.randint(0,1) if (toggle ==", "= int(width/tileCount*gridX) posY = int(height/tileCount*gridY) toggle = random.randint(0,1) if (toggle", "if (key=='p' or key=='P'): savePDF = True if key ==", "random.seed(actRandomSeed) for gridY in range(tileCount): for gridX in range(tileCount): posX", "alphaLeft = 100 alphaRight = 100 def draw(): global savePDF,", "= random.randint(0, 100000) def keyReleased(): global savePDF, actStrokeCap, actRandomSeed, colorLeft,", "alphaRight if (key=='s' or key=='S'): saveFrame(datetime.now().strftime(\"%Y%m%d%H%M%S\")+\".png\") if (key=='p' or key=='P'):", "ROUND elif key == \"2\": actStrokeCap = SQUARE elif key", "= SQUARE elif key == \"3\": actStrokeCap = PROJECT elif", "key=='S'): saveFrame(datetime.now().strftime(\"%Y%m%d%H%M%S\")+\".png\") if (key=='p' or key=='P'): savePDF = True if", "== color(0)): colorLeft = color(323, 100, 77) else: colorLeft =", "global savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight if savePDF:", "alphaLeft, alphaRight savePDF = False actStrokeCap = ROUND actRandomSeed =", "savePDF = False actStrokeCap = ROUND actRandomSeed = 0 colorLeft", "51) else: colorRight = color(0) elif (key == '6'): if", "== '4'): if (colorLeft == color(0)): colorLeft = color(323, 100,", "line(posX, posY, posX+width/tileCount, posY+height/tileCount) elif (toggle == 1): strokeWeight(mouseY/20) stroke(colorRight,", "if (alphaLeft == 100): alphaLeft = 50 else: alphaLeft =", "colorLeft = color(0) colorRight = color(0) alphaLeft = 100 alphaRight", "keyReleased(): global savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight if", "False actStrokeCap = ROUND actRandomSeed = 0 colorLeft = color(197,", "elif (key == '5'): if (colorRight == color(0)): colorRight =", "key=='P'): savePDF = True if key == \"1\": actStrokeCap =", "elif (toggle == 1): strokeWeight(mouseY/20) stroke(colorRight, alphaRight) line(posX, posY+width/tileCount, posX+height/tileCount,", "== 100): alphaRight = 50 else: alphaRight = 100 if", "savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight if savePDF: beginRecord(PDF,", "global savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight if (key=='s'", "(alphaLeft == 100): alphaLeft = 50 else: alphaLeft = 100", "savePDF = False endRecord() def mousePressed(): global savePDF, actStrokeCap, actRandomSeed,", "100 if (key == '0'): actStrokeCap = ROUND colorLeft =", "from datetime import datetime tileCount = 20 def setup(): global", "int(height/tileCount*gridY) toggle = random.randint(0,1) if (toggle == 0): strokeWeight(mouseX/20) stroke(colorLeft,", "colorLeft, colorRight, alphaLeft, alphaRight actRandomSeed = random.randint(0, 100000) def keyReleased():", "elif (key == '7'): if (alphaRight == 100): alphaRight =", "0): strokeWeight(mouseX/20) stroke(colorLeft, alphaLeft) line(posX, posY, posX+width/tileCount, posY+height/tileCount) elif (toggle", "colorLeft, colorRight, alphaLeft, alphaRight savePDF = False actStrokeCap = ROUND", "mousePressed(): global savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight actRandomSeed", "else: alphaRight = 100 if (key == '0'): actStrokeCap =", "100, 77) else: colorLeft = color(0) elif (key == '5'):", "'7'): if (alphaRight == 100): alphaRight = 50 else: alphaRight", "if key == \"1\": actStrokeCap = ROUND elif key ==", "background(255) smooth() noFill() strokeCap(actStrokeCap) random.seed(actRandomSeed) for gridY in range(tileCount): for", "(key == '4'): if (colorLeft == color(0)): colorLeft = color(323,", "1): strokeWeight(mouseY/20) stroke(colorRight, alphaRight) line(posX, posY+width/tileCount, posX+height/tileCount, posY) if (savePDF):", "stroke(colorRight, alphaRight) line(posX, posY+width/tileCount, posX+height/tileCount, posY) if (savePDF): savePDF =", "colorRight, alphaLeft, alphaRight savePDF = False actStrokeCap = ROUND actRandomSeed", "= color(0) elif (key == '5'): if (colorRight == color(0)):", "actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight if savePDF: beginRecord(PDF, datetime.now().strftime(\"%Y%m%d%H%M%S\")+\".pdf\")", "alphaLeft, alphaRight if savePDF: beginRecord(PDF, datetime.now().strftime(\"%Y%m%d%H%M%S\")+\".pdf\") background(255) smooth() noFill() strokeCap(actStrokeCap)", "global savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight actRandomSeed =", "strokeCap(actStrokeCap) random.seed(actRandomSeed) for gridY in range(tileCount): for gridX in range(tileCount):", "(toggle == 1): strokeWeight(mouseY/20) stroke(colorRight, alphaRight) line(posX, posY+width/tileCount, posX+height/tileCount, posY)", "'4'): if (colorLeft == color(0)): colorLeft = color(323, 100, 77)", "= random.randint(0,1) if (toggle == 0): strokeWeight(mouseX/20) stroke(colorLeft, alphaLeft) line(posX,", "= 50 else: alphaLeft = 100 elif (key == '7'):", "alphaLeft = 100 elif (key == '7'): if (alphaRight ==", "def setup(): global savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight", "color(87, 35, 129) alphaLeft = 100 alphaRight = 100 def", "alphaRight = 100 def draw(): global savePDF, actStrokeCap, actRandomSeed, colorLeft,", "alphaRight actRandomSeed = random.randint(0, 100000) def keyReleased(): global savePDF, actStrokeCap,", "key == \"1\": actStrokeCap = ROUND elif key == \"2\":", "alphaLeft = 50 else: alphaLeft = 100 elif (key ==", "= False actStrokeCap = ROUND actRandomSeed = 0 colorLeft =", "= 100 def draw(): global savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight,", "= int(height/tileCount*gridY) toggle = random.randint(0,1) if (toggle == 0): strokeWeight(mouseX/20)", "import datetime tileCount = 20 def setup(): global savePDF, actStrokeCap,", "or key=='S'): saveFrame(datetime.now().strftime(\"%Y%m%d%H%M%S\")+\".png\") if (key=='p' or key=='P'): savePDF = True", "saveFrame(datetime.now().strftime(\"%Y%m%d%H%M%S\")+\".png\") if (key=='p' or key=='P'): savePDF = True if key", "alphaRight if savePDF: beginRecord(PDF, datetime.now().strftime(\"%Y%m%d%H%M%S\")+\".pdf\") background(255) smooth() noFill() strokeCap(actStrokeCap) random.seed(actRandomSeed)", "beginRecord(PDF, datetime.now().strftime(\"%Y%m%d%H%M%S\")+\".pdf\") background(255) smooth() noFill() strokeCap(actStrokeCap) random.seed(actRandomSeed) for gridY in", "alphaRight) line(posX, posY+width/tileCount, posX+height/tileCount, posY) if (savePDF): savePDF = False", "color(323, 100, 77) else: colorLeft = color(0) elif (key ==", "= color(0) elif (key == '6'): if (alphaLeft == 100):", "129) alphaLeft = 100 alphaRight = 100 def draw(): global", "alphaRight savePDF = False actStrokeCap = ROUND actRandomSeed = 0", "stroke(colorLeft, alphaLeft) line(posX, posY, posX+width/tileCount, posY+height/tileCount) elif (toggle == 1):", "= 100 alphaRight = 100 def draw(): global savePDF, actStrokeCap,", "posX = int(width/tileCount*gridX) posY = int(height/tileCount*gridY) toggle = random.randint(0,1) if", "alphaLeft, alphaRight actRandomSeed = random.randint(0, 100000) def keyReleased(): global savePDF,", "== color(0)): colorRight = color(273, 73, 51) else: colorRight =", "for gridX in range(tileCount): posX = int(width/tileCount*gridX) posY = int(height/tileCount*gridY)", "colorLeft = color(323, 100, 77) else: colorLeft = color(0) elif", "20 def setup(): global savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft,", "posY+height/tileCount) elif (toggle == 1): strokeWeight(mouseY/20) stroke(colorRight, alphaRight) line(posX, posY+width/tileCount,", "= 50 else: alphaRight = 100 if (key == '0'):", "smooth() noFill() strokeCap(actStrokeCap) random.seed(actRandomSeed) for gridY in range(tileCount): for gridX", "== \"2\": actStrokeCap = SQUARE elif key == \"3\": actStrokeCap", "range(tileCount): for gridX in range(tileCount): posX = int(width/tileCount*gridX) posY =", "False endRecord() def mousePressed(): global savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight,", "else: alphaLeft = 100 elif (key == '7'): if (alphaRight", "'0'): actStrokeCap = ROUND colorLeft = color(0) colorRight = color(0)", "in range(tileCount): posX = int(width/tileCount*gridX) posY = int(height/tileCount*gridY) toggle =", "50 else: alphaLeft = 100 elif (key == '7'): if", "50 else: alphaRight = 100 if (key == '0'): actStrokeCap", "alphaRight = 50 else: alphaRight = 100 if (key ==", "elif (key == '4'): if (colorLeft == color(0)): colorLeft =", "== '5'): if (colorRight == color(0)): colorRight = color(273, 73,", "gridY in range(tileCount): for gridX in range(tileCount): posX = int(width/tileCount*gridX)", "= False endRecord() def mousePressed(): global savePDF, actStrokeCap, actRandomSeed, colorLeft,", "35, 129) alphaLeft = 100 alphaRight = 100 def draw():", "SQUARE elif key == \"3\": actStrokeCap = PROJECT elif (key", "elif key == \"3\": actStrokeCap = PROJECT elif (key ==", "color(0)): colorRight = color(273, 73, 51) else: colorRight = color(0)", "ROUND colorLeft = color(0) colorRight = color(0) alphaLeft = 100", "actStrokeCap = SQUARE elif key == \"3\": actStrokeCap = PROJECT", "\"3\": actStrokeCap = PROJECT elif (key == '4'): if (colorLeft", "random from datetime import datetime tileCount = 20 def setup():", "(key=='p' or key=='P'): savePDF = True if key == \"1\":", "0, 123) colorRight = color(87, 35, 129) alphaLeft = 100", "posX+height/tileCount, posY) if (savePDF): savePDF = False endRecord() def mousePressed():", "if (key == '0'): actStrokeCap = ROUND colorLeft = color(0)", "actStrokeCap = PROJECT elif (key == '4'): if (colorLeft ==", "= ROUND elif key == \"2\": actStrokeCap = SQUARE elif", "colorRight, alphaLeft, alphaRight if savePDF: beginRecord(PDF, datetime.now().strftime(\"%Y%m%d%H%M%S\")+\".pdf\") background(255) smooth() noFill()", "else: colorRight = color(0) elif (key == '6'): if (alphaLeft", "colorLeft, colorRight, alphaLeft, alphaRight if savePDF: beginRecord(PDF, datetime.now().strftime(\"%Y%m%d%H%M%S\")+\".pdf\") background(255) smooth()", "100000) def keyReleased(): global savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft,", "'5'): if (colorRight == color(0)): colorRight = color(273, 73, 51)", "if (key=='s' or key=='S'): saveFrame(datetime.now().strftime(\"%Y%m%d%H%M%S\")+\".png\") if (key=='p' or key=='P'): savePDF", "color(273, 73, 51) else: colorRight = color(0) elif (key ==", "(alphaRight == 100): alphaRight = 50 else: alphaRight = 100", "== '0'): actStrokeCap = ROUND colorLeft = color(0) colorRight =", "random.randint(0, 100000) def keyReleased(): global savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight,", "posY) if (savePDF): savePDF = False endRecord() def mousePressed(): global", "datetime.now().strftime(\"%Y%m%d%H%M%S\")+\".pdf\") background(255) smooth() noFill() strokeCap(actStrokeCap) random.seed(actRandomSeed) for gridY in range(tileCount):", "ROUND actRandomSeed = 0 colorLeft = color(197, 0, 123) colorRight", "= 100 elif (key == '7'): if (alphaRight == 100):", "== '6'): if (alphaLeft == 100): alphaLeft = 50 else:", "colorRight = color(0) elif (key == '6'): if (alphaLeft ==", "= color(87, 35, 129) alphaLeft = 100 alphaRight = 100", "random.randint(0,1) if (toggle == 0): strokeWeight(mouseX/20) stroke(colorLeft, alphaLeft) line(posX, posY,", "if (alphaRight == 100): alphaRight = 50 else: alphaRight =", "= 100 if (key == '0'): actStrokeCap = ROUND colorLeft", "datetime tileCount = 20 def setup(): global savePDF, actStrokeCap, actRandomSeed,", "actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight actRandomSeed = random.randint(0, 100000)", "savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight actRandomSeed = random.randint(0,", "colorLeft = color(197, 0, 123) colorRight = color(87, 35, 129)", "elif key == \"2\": actStrokeCap = SQUARE elif key ==", "elif (key == '6'): if (alphaLeft == 100): alphaLeft =", "color(0)): colorLeft = color(323, 100, 77) else: colorLeft = color(0)", "colorRight, alphaLeft, alphaRight actRandomSeed = random.randint(0, 100000) def keyReleased(): global", "'6'): if (alphaLeft == 100): alphaLeft = 50 else: alphaLeft", "if (colorLeft == color(0)): colorLeft = color(323, 100, 77) else:", "= color(0) colorRight = color(0) alphaLeft = 100 alphaRight =", "alphaRight = 100 if (key == '0'): actStrokeCap = ROUND", "= 0 colorLeft = color(197, 0, 123) colorRight = color(87,", "savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight if (key=='s' or", "colorLeft, colorRight, alphaLeft, alphaRight if (key=='s' or key=='S'): saveFrame(datetime.now().strftime(\"%Y%m%d%H%M%S\")+\".png\") if", "colorRight, alphaLeft, alphaRight if (key=='s' or key=='S'): saveFrame(datetime.now().strftime(\"%Y%m%d%H%M%S\")+\".png\") if (key=='p'", "(key == '7'): if (alphaRight == 100): alphaRight = 50", "if (savePDF): savePDF = False endRecord() def mousePressed(): global savePDF,", "77) else: colorLeft = color(0) elif (key == '5'): if", "color(0) elif (key == '6'): if (alphaLeft == 100): alphaLeft", "100 def draw(): global savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft,", "for gridY in range(tileCount): for gridX in range(tileCount): posX =", "== \"1\": actStrokeCap = ROUND elif key == \"2\": actStrokeCap", "savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight savePDF = False", "True if key == \"1\": actStrokeCap = ROUND elif key", "= ROUND colorLeft = color(0) colorRight = color(0) alphaLeft =", "noFill() strokeCap(actStrokeCap) random.seed(actRandomSeed) for gridY in range(tileCount): for gridX in", "line(posX, posY+width/tileCount, posX+height/tileCount, posY) if (savePDF): savePDF = False endRecord()", "actRandomSeed = random.randint(0, 100000) def keyReleased(): global savePDF, actStrokeCap, actRandomSeed,", "if (colorRight == color(0)): colorRight = color(273, 73, 51) else:", "= 20 def setup(): global savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight,", "if (toggle == 0): strokeWeight(mouseX/20) stroke(colorLeft, alphaLeft) line(posX, posY, posX+width/tileCount,", "== '7'): if (alphaRight == 100): alphaRight = 50 else:", "color(0) elif (key == '5'): if (colorRight == color(0)): colorRight", "== 0): strokeWeight(mouseX/20) stroke(colorLeft, alphaLeft) line(posX, posY, posX+width/tileCount, posY+height/tileCount) elif", "setup(): global savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight savePDF", "colorRight = color(87, 35, 129) alphaLeft = 100 alphaRight =", "== 100): alphaLeft = 50 else: alphaLeft = 100 elif", "draw(): global savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight if", "color(0) colorRight = color(0) alphaLeft = 100 alphaRight = 100", "PROJECT elif (key == '4'): if (colorLeft == color(0)): colorLeft", "100 alphaRight = 100 def draw(): global savePDF, actStrokeCap, actRandomSeed,", "colorRight = color(273, 73, 51) else: colorRight = color(0) elif", "savePDF = True if key == \"1\": actStrokeCap = ROUND", "(key == '6'): if (alphaLeft == 100): alphaLeft = 50", "(toggle == 0): strokeWeight(mouseX/20) stroke(colorLeft, alphaLeft) line(posX, posY, posX+width/tileCount, posY+height/tileCount)", "color(197, 0, 123) colorRight = color(87, 35, 129) alphaLeft =", "posY = int(height/tileCount*gridY) toggle = random.randint(0,1) if (toggle == 0):", "(colorRight == color(0)): colorRight = color(273, 73, 51) else: colorRight", "73, 51) else: colorRight = color(0) elif (key == '6'):", "or key=='P'): savePDF = True if key == \"1\": actStrokeCap", "def mousePressed(): global savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight", "alphaLeft) line(posX, posY, posX+width/tileCount, posY+height/tileCount) elif (toggle == 1): strokeWeight(mouseY/20)", "in range(tileCount): for gridX in range(tileCount): posX = int(width/tileCount*gridX) posY", "def draw(): global savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight", "key == \"3\": actStrokeCap = PROJECT elif (key == '4'):", "(key=='s' or key=='S'): saveFrame(datetime.now().strftime(\"%Y%m%d%H%M%S\")+\".png\") if (key=='p' or key=='P'): savePDF =", "actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight savePDF = False actStrokeCap =", "= color(197, 0, 123) colorRight = color(87, 35, 129) alphaLeft", "actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight if (key=='s' or key=='S'): saveFrame(datetime.now().strftime(\"%Y%m%d%H%M%S\")+\".png\")", "0 colorLeft = color(197, 0, 123) colorRight = color(87, 35,", "= True if key == \"1\": actStrokeCap = ROUND elif", "toggle = random.randint(0,1) if (toggle == 0): strokeWeight(mouseX/20) stroke(colorLeft, alphaLeft)", "actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight if (key=='s' or key=='S'):", "else: colorLeft = color(0) elif (key == '5'): if (colorRight", "savePDF: beginRecord(PDF, datetime.now().strftime(\"%Y%m%d%H%M%S\")+\".pdf\") background(255) smooth() noFill() strokeCap(actStrokeCap) random.seed(actRandomSeed) for gridY", "== \"3\": actStrokeCap = PROJECT elif (key == '4'): if", "= color(273, 73, 51) else: colorRight = color(0) elif (key", "actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight savePDF = False actStrokeCap", "def keyReleased(): global savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight", "actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight actRandomSeed = random.randint(0, 100000) def", "= PROJECT elif (key == '4'): if (colorLeft == color(0)):", "posY+width/tileCount, posX+height/tileCount, posY) if (savePDF): savePDF = False endRecord() def" ]
[ "in the dbt project and its dependencies' )) ) sources:", "if package is None: if not pkg_dct: return None else:", "field(metadata=dict( description='A list of the disabled nodes in the target'", "metadata={'serialize': lambda x: None, 'deserialize': lambda x: None} ) _lock:", "this until you're done with building your manifest! \"\"\" self.flat_graph", "# this is the result :( msg = line_wrap_message( f'''\\", "add_macro_patch( self, source_file: SchemaSourceFile, patch: ParsedMacroPatch, ) -> None: #", "cls, candidate: MacroCandidate, specificity: Specificity ) -> 'MaterializationCandidate': return cls(", "_deepcopy(v) for k, v in self.docs.items()}, exposures={k: _deepcopy(v) for k,", "package is not None: def filter(candidate: MacroCandidate) -> bool: return", "resolve_source( self, target_source_name: str, target_table_name: str, current_project: str, node_package: str", "return self.package is None or self.package == model.package_name def search(self,", "ParsedSourceDefinition): _update_into(self.sources, new_source) def build_flat_graph(self): \"\"\"This attribute is used in", "object) -> bool: if not isinstance(other, MaterializationCandidate): return NotImplemented if", "Dict[str, Dict[PackageName, UniqueID]] = {} self.populate(manifest) def get_unique_id(self, key, package:", "None or self.package == model.package_name def search(self, haystack: Iterable[N]) ->", "if new_item.original_file_path != existing.original_file_path: raise dbt.exceptions.RuntimeException( f'cannot update a {new_item.resource_type}", "= { 'exposures': { k: v.to_dict(omit_none=False) for k, v in", "str, node_package: str ) -> MaybeParsedSource: key = (target_source_name, target_table_name)", "'on cached file information: {}!' .format(key, name, old_file) ) return", "nodes with their counterpart. Only non-ephemeral refable nodes are examined.", "lambda x: None, 'deserialize': lambda x: None} ) _analysis_lookup: Optional[AnalysisLookup]", "nodes are examined. \"\"\" refables = set(NodeType.refable()) merged = set()", "def _materialization_candidates_for( self, project_name: str, materialization_name: str, adapter_type: Optional[str], )", "'Expected to find \"{}\" in cached \"result.{}\" based ' 'on", "ephemeral dependency, and we want them to have a consistent", "are fully namespaced unique_id = f'macro.{patch.package_name}.{patch.name}' macro = self.macros.get(unique_id) if", "n in self.disabled], files={k: _deepcopy(v) for k, v in self.files.items()},", "__post_deserialize__(cls, obj): obj._lock = flags.MP_CONTEXT.Lock() return obj def sync_update_node( self,", "self, project_name: str, materialization_name: str, adapter_type: str ) -> Optional[ParsedMacro]:", "all be added in the __reduce_ex__ method in the #", "by replacing any unselected nodes with their counterpart. Only non-ephemeral", "self.files[key].checksum return my_checksum == source_file.checksum def add_source( self, source_file: SchemaSourceFile,", ") if unique_id is None: # This will usually happen", "V_T: if key not in src: raise CompilationException( 'Expected to", "doc_lookup(self) -> DocLookup: if self._doc_lookup is None: self._doc_lookup = DocLookup(self)", "{} def find_macro_by_name( self, name: str, root_project_name: str, package: Optional[str]", "candidates = _search_packages( current_project, node_package, target_model_package ) for pkg in", "NodeType package_name: str @property def search_name(self) -> str: raise NotImplementedError('search_name", "-> None: \"\"\"Given the selected unique IDs and a writable", "not found in manifest' ) return manifest.sources[unique_id] class RefableLookup(dbtClassMixin): #", "ones, but ignore imported packages. - if there is a", "packages filter=filter, ) return candidates.last() def _find_macros_by_name( self, name: str,", "if isinstance(source_file, SchemaSourceFile): assert test_from source_file.add_test(node.unique_id, test_from) else: source_file.nodes.append(node.unique_id) def", "(packages {} and ' '{}). dbt cannot resolve this ambiguity'", "profile_hash: FileHash = field(default_factory=FileHash.empty) project_hashes: MutableMapping[str, FileHash] = field(default_factory=dict) @dataclass", "the selected unique IDs and a writable manifest, update this", "same ephemeral dependency, and we want them to have a", "'exposures': { k: v.to_dict(omit_none=False) for k, v in self.exposures.items() },", "sync_update_node( self, new_node: NonSourceCompiledNode ) -> NonSourceCompiledNode: \"\"\"update the node", "be a container of NodeTypes that implements the 'in' operator.", "for model in haystack: if self._matches(model): return model return None", "Optional[Union[ ParsedSourceDefinition, Disabled[ParsedSourceDefinition], ]] MaybeNonSource = Optional[Union[ ManifestNode, Disabled[ManifestNode] ]]", "have a new file ' f'path!' ) dest[unique_id] = new_item", "unique_id in self.exposures: return self.exposures[unique_id] else: # something terrible has", "for doc in manifest.docs.values(): self.add_doc(doc) def perform_lookup( self, unique_id: UniqueID,", "node {} not found in manifest'.format(unique_id) ) @property def doc_lookup(self)", "metadata=dict(description=( 'The nodes defined in the dbt project and its", "str, internal_packages: Set[str] ) -> Locality: if macro.package_name == root_project_name:", "node, so we want to only build it once and", "# in the ProviderContext class. self.flat_graph = {} AnyManifest =", "for the full graph, after parsing and during compilation. \"\"\"", "state_check: ManifestStateCheck = field(default_factory=ManifestStateCheck) # Moved from the ParseResult object", "we should want to lock is when compiling an ephemeral", "forward_edges: Dict[str, List[str]] = {n.unique_id: [] for n in nodes}", "dbt.exceptions.InternalException( f'Node {unique_id} found in cache but not found in", "other.locality def __lt__(self, other: object) -> bool: if not isinstance(other,", "self.nodetypes: return False if self.name != model.search_name: return False return", "return None D = TypeVar('D') @dataclass class Disabled(Generic[D]): target: D", "sources={k: _deepcopy(v) for k, v in self.sources.items()}, macros={k: _deepcopy(v) for", ") if filter is None or filter(candidate): candidates.append(candidate) return candidates", "MaybeParsedSource = Optional[Union[ ParsedSourceDefinition, Disabled[ParsedSourceDefinition], ]] MaybeNonSource = Optional[Union[ ManifestNode,", ") send_anonymous_usage_stats: Optional[bool] = field( default=None, metadata=dict(description=( 'Whether dbt is", "self._ref_lookup = RefableLookup(self) return self._ref_lookup def rebuild_ref_lookup(self): self._ref_lookup = RefableLookup(self)", "= ( self.nodes, self.sources, self.macros, self.docs, self.exposures, self.selectors, self.disabled, self.files,", "self.exposures: return self.exposures[unique_id] else: # something terrible has happened raise", "from dbt.contracts.util import ( BaseArtifactMetadata, SourceKey, ArtifactMixin, schema_version ) from", "edges. \"\"\" backward_edges: Dict[str, List[str]] = {} # pre-populate the", "self.parent_map = backward_edges def build_macro_child_map(self): edge_members = list(chain( self.nodes.values(), self.macros.values(),", "f'unrecognized {new_item.resource_type}: {new_item.unique_id}' ) existing = dest[unique_id] if new_item.original_file_path !=", "in self.storage: self.storage[doc.name] = {} self.storage[doc.name][doc.package_name] = doc.unique_id def populate(self,", "isinstance(other, MacroCandidate): return NotImplemented if self.locality < other.locality: return True", "object, i.e. the Manifest. # The order of the arguments", "if unique_id in forward_edges.keys(): forward_edges[unique_id].append(node.unique_id) return _sort_values(forward_edges) def _deepcopy(value): return", "MutableMapping[str, ManifestNode] = field(default_factory=dict) sources: MutableMapping[str, ParsedSourceDefinition] = field(default_factory=dict) macros:", "want to only build it once and avoid any concurrency", "This contains macro methods that are in both the Manifest", "find_disabled_by_name( self, name: str, package: Optional[str] = None ) ->", "given iterable by name.\"\"\" for model in haystack: if self._matches(model):", "issues around it. Make sure you don't call this until", "if self._analysis_lookup is None: self._analysis_lookup = AnalysisLookup(self) return self._analysis_lookup #", "node # it's possible that the node is disabled if", "in cache but not found in manifest' ) return manifest.nodes[unique_id]", "update the manifest and return the existing node. \"\"\" with", "self.nodes) self.nodes[node.unique_id] = node def add_node(self, source_file: AnySourceFile, node: ManifestNodes,", "if you want newlines, # this is the result :(", "-> Locality: if macro.package_name == root_project_name: return Locality.Root elif macro.package_name", "macro.package_name == root_project_name: return Locality.Root elif macro.package_name in internal_packages: return", "CompilationException, raise_duplicate_resource_name, raise_compiler_error, warn_or_error, raise_duplicate_patch_name, raise_duplicate_macro_patch_name, raise_duplicate_source_patch_name, ) from dbt.helper_types", "file path. \"\"\" unique_id = new_item.unique_id if unique_id not in", "manifest for the full graph, after parsing and during compilation.", "dbt.node_types import NodeType from dbt.ui import line_wrap_message from dbt import", "# Provide support for copy.deepcopy() - we just need to", "name: str ) -> V_T: if key not in src:", "self.source_patches[key] = patch source_file.source_patches.append(key) def get_used_schemas(self, resource_types=None): return frozenset({ (node.database,", ") def __eq__(self, other: object) -> bool: if not isinstance(other,", "in self._disabled: self._disabled[node.unique_id].append(node) else: self._disabled[node.unique_id] = [node] def add_disabled(self, source_file:", "\"\"\" candidates = _search_packages( current_project, node_package, package ) for pkg", "raise dbt.exceptions.InternalException( f'Node {unique_id} found in cache but not found", "dependencies' )) ) selectors: Mapping[UniqueID, Any] = field( metadata=dict(description=( 'The", "children of macros def build_macro_edges(nodes: List[Any]): forward_edges: Dict[str, List[str]] =", "field( metadata=dict(description=( 'The macros defined in the dbt project and", "source_patches because # tuple keys are not supported, so ensure", "UniqueID] will # only ever have exactly one value, but", "in self.files: return False my_checksum = self.files[key].checksum return my_checksum ==", "x.database for x in chain(self.nodes.values(), self.sources.values()) ) # This is", "dataclass, field from itertools import chain, islice from mashumaro import", "= dest[unique_id] if new_item.original_file_path != existing.original_file_path: raise dbt.exceptions.RuntimeException( f'cannot update", "macro.patch(patch) def add_source_patch( self, source_file: SchemaSourceFile, patch: SourcePatch, ) ->", "False return False M = TypeVar('M', bound=MacroCandidate) class CandidateList(List[M]): def", ") -> ManifestNode: if unique_id not in manifest.nodes: raise dbt.exceptions.InternalException(", "field(default_factory=dict) state_check: ManifestStateCheck = field(default_factory=ManifestStateCheck) # Moved from the ParseResult", "macro.name != name: continue candidate = MacroCandidate( locality=_get_locality(macro, root_project_name, packages),", "if result is not None: return result return None #", "< other.specificity: return True if self.specificity > other.specificity: return False", "unique_id = self.get_unique_id(key, package) if unique_id is not None: return", "self._lock: existing = self.nodes[new_node.unique_id] if getattr(existing, 'compiled', False): # already", "source_file.exposures.append(exposure.unique_id) def add_disabled_nofile(self, node: CompileResultNode): if node.unique_id in self._disabled: self._disabled[node.unique_id].append(node)", "not implemented') N = TypeVar('N', bound=Searchable) @dataclass class NameSearcher(Generic[N]): name:", "searcher: NameSearcher = NameSearcher( search_name, package, [NodeType.Source] ) result =", "do not update the manifest and return the existing node.", "bool]] = None ) -> CandidateList: \"\"\"Find macros by their", "typing import ( Dict, List, Optional, Union, Mapping, MutableMapping, Any,", "None: if not pkg_dct: return None else: return next(iter(pkg_dct.values())) elif", "_check_duplicates(source, self.sources) self.sources[source.unique_id] = source # type: ignore source_file.sources.append(source.unique_id) def", "the root project \"\"\" filter: Optional[Callable[[MacroCandidate], bool]] = None if", "project_hashes: MutableMapping[str, FileHash] = field(default_factory=dict) @dataclass class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):", "and we want them to have a consistent view of", "{} self.storage[key][source.package_name] = source.unique_id def populate(self, manifest): for source in", "# This will usually happen when a node is disabled", "from dbt.contracts.files import SourceFile, SchemaSourceFile, FileHash, AnySourceFile from dbt.contracts.util import", "return result return None # Called by RunTask.defer_to_manifest def merge_from_artifact(", "The order of the arguments must match the order of", "(sample: {sample})' ) # Methods that were formerly in ParseResult", "if macro.package_name == root_project_name: return Locality.Root elif macro.package_name in internal_packages:", "None for any package. The root project name is used", ") -> MaybeParsedSource: key = (target_source_name, target_table_name) candidates = _search_packages(current_project,", "CandidateList = CandidateList() packages = set(get_adapter_package_names(self.metadata.adapter_type)) for unique_id, macro in", "used in context.common by each node, so we want to", "for k, v in self.nodes.items() }, 'sources': { k: v.to_dict(omit_none=False)", "k: v.to_dict(omit_none=False) for k, v in self.sources.items() } } def", "None: \"\"\"Given the selected unique IDs and a writable manifest,", "identifier for the user', }, ) send_anonymous_usage_stats: Optional[bool] = field(", "formerly in ParseResult def add_macro(self, source_file: SourceFile, macro: ParsedMacro): if", "cache but not found in manifest' ) return manifest.sources[unique_id] class", "items from state (sample: {sample})' ) # Methods that were", "self.__class__, args class MacroManifest(MacroMethods): def __init__(self, macros): self.macros = macros", ") -> NonSourceCompiledNode: \"\"\"update the node with a lock. The", "None def add_node(self, node: ManifestNode): if node.resource_type in self._lookup_types: if", "for node in nodes: for unique_id in node.depends_on.macros: if unique_id", "attribute # is added it must all be added in", "def sync_update_node( self, new_node: NonSourceCompiledNode ) -> NonSourceCompiledNode: \"\"\"update the", "merge_from_artifact( self, adapter, other: 'WritableManifest', selected: AbstractSet[UniqueID], ) -> None:", "Optional[ParsedMacro]: candidates: CandidateList = CandidateList(chain.from_iterable( self._materialization_candidates_for( project_name=project_name, materialization_name=materialization_name, adapter_type=atype, )", "def _update_into(dest: MutableMapping[str, T], new_item: T): \"\"\"Update dest to overwrite", "ManifestNode): _update_into(self.nodes, new_node) def update_source(self, new_source: ParsedSourceDefinition): _update_into(self.sources, new_source) def", "return the existing node. \"\"\" with self._lock: existing = self.nodes[new_node.unique_id]", "create the initial version of the object and a tuple", "DocLookup(self) @property def source_lookup(self) -> SourceLookup: if self._source_lookup is None:", "selectors: Mapping[UniqueID, Any] = field( metadata=dict(description=( 'The selectors defined in", "_parsing_info: ParsingInfo = field( default_factory=ParsingInfo, metadata={'serialize': lambda x: None, 'deserialize':", "Optional[DocLookup] = field( default=None, metadata={'serialize': lambda x: None, 'deserialize': lambda", "ParsedExposure): _update_into(self.exposures, new_exposure) def update_node(self, new_node: ManifestNode): _update_into(self.nodes, new_node) def", ") _analysis_lookup: Optional[AnalysisLookup] = field( default=None, metadata={'serialize': lambda x: None,", "mashumaro import DataClassMessagePackMixin from multiprocessing.synchronize import Lock from typing import", "self.macros, self.docs, self.exposures, self.selectors, self.disabled, self.files, self.metadata, self.flat_graph, self.state_check, self.source_patches,", "= node.unique_id def populate(self, manifest): for node in manifest.nodes.values(): self.add_node(node)", "self.name != model.search_name: return False return self.package is None or", "result is not None: return result return None # Called", ") ): merged.add(unique_id) self.nodes[unique_id] = node.replace(deferred=True) # log up to", "lookups instead of 1 # is not a big deal", "in self.docs.items()}, exposures={k: _deepcopy(v) for k, v in self.exposures.items()}, selectors={k:", "is None, any package is allowed. nodetypes should be a", "\"{patch.name}\" ' f'which was not found' ) return if macro.patch_path:", "ParsedSourceDefinition, Disabled[ParsedSourceDefinition], ]] MaybeNonSource = Optional[Union[ ManifestNode, Disabled[ManifestNode] ]] T", "found in manifest' ) return manifest.sources[unique_id] class RefableLookup(dbtClassMixin): # model,", "for simplicity forward_edges: Dict[str, List[str]] = {n.unique_id: [] for n", "import ( ParsedMacro, ParsedDocumentation, ParsedNodePatch, ParsedMacroPatch, ParsedSourceDefinition, ParsedExposure, HasUniqueID, UnpatchedSourceDefinition,", "you're done with building your manifest! \"\"\" self.flat_graph = {", "to their dependencies', )) child_map: Optional[NodeEdgeMap] = field(metadata=dict( description='A mapping", "= set() for unique_id, node in other.nodes.items(): current = self.nodes.get(unique_id)", "flags from dbt import tracking import dbt.utils NodeEdgeMap = Dict[str,", "unique_id is not None: return self.perform_lookup(unique_id, manifest) return None def", "self.nodes.values(), self.sources.values()) for resource in all_resources: resource_type_plural = resource.resource_type.pluralize() if", "MaterializationCandidate): return NotImplemented if self.specificity < other.specificity: return True if", "This makes output deterministic, which helps for tests. \"\"\" return", "= None candidates = _search_packages( current_project, node_package, target_model_package ) for", "macro = self.macros.get(unique_id) if not macro: warn_or_error( f'WARNING: Found documentation", "the model matches the given name, package, and type. If", "f'WARNING: Found documentation for macro \"{patch.name}\" ' f'which was not", "not update the manifest and return the existing node. \"\"\"", "( BaseArtifactMetadata, SourceKey, ArtifactMixin, schema_version ) from dbt.dataclass_schema import dbtClassMixin", "if self.locality > other.locality: return False return False @dataclass class", "This is called by 'parse_patch' in the NodePatchParser def add_patch(", "find_unique_id_for_package(storage, key, package: Optional[PackageName]): if key not in storage: return", "is None: specificity = Specificity.Default else: specificity = Specificity.Adapter full_name", "populate(self, manifest): for node in manifest.nodes.values(): self.add_node(node) def perform_lookup( self,", "not macro: warn_or_error( f'WARNING: Found documentation for macro \"{patch.name}\" '", "Mapping[UniqueID, ParsedDocumentation] = field( metadata=dict(description=( 'The docs defined in the", "add_patch( self, source_file: SchemaSourceFile, patch: ParsedNodePatch, ) -> None: if", "f'Doc {unique_id} found in cache but not found in manifest'", "disabled nodes in the target' )) parent_map: Optional[NodeEdgeMap] = field(metadata=dict(", "multiprocessing.synchronize import Lock from typing import ( Dict, List, Optional,", "return find_unique_id_for_package(self.storage, key, package) def find(self, key, package: Optional[PackageName], manifest:", "list(chain( self.nodes.values(), self.sources.values(), self.exposures.values(), )) forward_edges, backward_edges = build_node_edges(edge_members) self.child_map", "unique identifier for the project', }, ) user_id: Optional[UUID] =", "SourcePatch from dbt.contracts.files import SourceFile, SchemaSourceFile, FileHash, AnySourceFile from dbt.contracts.util", "because # tuple keys are not supported, so ensure it's", "be overwritten node = self.nodes.get(unique_id) if node: if node.patch_path: package_name,", "build_node_edges(edge_members) self.child_map = forward_edges self.parent_map = backward_edges def build_macro_child_map(self): edge_members", "self._source_lookup = SourceLookup(self) @property def ref_lookup(self) -> RefableLookup: if self._ref_lookup", "_ref_lookup: Optional[RefableLookup] = field( default=None, metadata={'serialize': lambda x: None, 'deserialize':", "__lt__(self, other: object) -> bool: if not isinstance(other, MaterializationCandidate): return", "str, adapter_type: Optional[str], ) -> CandidateList: if adapter_type is None:", "nodetypes should be a container of NodeTypes that implements the", "metadata=dict(description=( 'The selectors defined in selectors.yml' )) ) disabled: Optional[List[CompileResultNode]]", "SourceLookup(self) return self._source_lookup def rebuild_source_lookup(self): self._source_lookup = SourceLookup(self) @property def", "None ) -> Optional[ParsedSourceDefinition]: search_name = f'{source_name}.{table_name}' searcher: NameSearcher =", "self._ref_lookup def rebuild_ref_lookup(self): self._ref_lookup = RefableLookup(self) @property def analysis_lookup(self) ->", "new_node def update_exposure(self, new_exposure: ParsedExposure): _update_into(self.exposures, new_exposure) def update_node(self, new_node:", "source.unique_id def populate(self, manifest): for source in manifest.sources.values(): if hasattr(source,", "-> CandidateList: \"\"\"Find macros by their name. \"\"\" # avoid", "old_file: SourceFile, name: str ) -> V_T: if key not", "and unique_id not in selected and not adapter.get_relation( current.database, current.schema,", "refable nodes are examined. \"\"\" refables = set(NodeType.refable()) merged =", "= set(get_adapter_package_names(self.metadata.adapter_type)) for unique_id, macro in self.macros.items(): if macro.name !=", "in the # args tuple in the right position. nodes:", "in src: raise CompilationException( 'Expected to find \"{}\" in cached", "is None: self.send_anonymous_usage_stats = ( not tracking.active_user.do_not_track ) @classmethod def", "f'path!' ) dest[unique_id] = new_item # This contains macro methods", "ParsedDocumentation): _check_duplicates(doc, self.docs) self.docs[doc.unique_id] = doc source_file.docs.append(doc.unique_id) # end of", "for source in manifest.sources.values(): if hasattr(source, 'source_name'): self.add_source(source) def perform_lookup(", "# it's possible that the node is disabled if disabled", "class ManifestStateCheck(dbtClassMixin): vars_hash: FileHash = field(default_factory=FileHash.empty) profile_hash: FileHash = field(default_factory=FileHash.empty)", "patch.name) if key in self.source_patches: raise_duplicate_source_patch_name(patch, self.source_patches[key]) self.source_patches[key] = patch", "the manifest.\"\"\" dbt_schema_version: str = field( default_factory=lambda: str(WritableManifest.dbt_schema_version) ) project_id:", "if model.resource_type not in self.nodetypes: return False if self.name !=", "subtract=2 ) raise_compiler_error(msg) self.macros[macro.unique_id] = macro source_file.macros.append(macro.unique_id) def has_file(self, source_file:", "return cls( locality=candidate.locality, macro=candidate.macro, specificity=specificity, ) def __eq__(self, other: object)", "MaybeDocumentation = Optional[ParsedDocumentation] MaybeParsedSource = Optional[Union[ ParsedSourceDefinition, Disabled[ParsedSourceDefinition], ]] MaybeNonSource", "is None: self._ref_lookup = RefableLookup(self) return self._ref_lookup def rebuild_ref_lookup(self): self._ref_lookup", "return the `generate_{component}_name` macro from the 'dbt' internal project \"\"\"", "in self.nodetypes: return False if self.name != model.search_name: return False", "def expect(self, unique_id: str) -> GraphMemberNode: if unique_id in self.nodes:", "an error other_path = self.macros[macro.unique_id].original_file_path # subtract 2 for the", "{other_path} ''', subtract=2 ) raise_compiler_error(msg) self.macros[macro.unique_id] = macro source_file.macros.append(macro.unique_id) def", "initial version of the object and a tuple of arguments", "this error, rename or remove one of the following macros:", "dependencies' )) ) exposures: Mapping[UniqueID, ParsedExposure] = field( metadata=dict(description=( 'The", "default=None, metadata=dict(description=( 'Whether dbt is configured to send anonymous usage", "unique key = (patch.overrides, patch.name) if key in self.source_patches: raise_duplicate_source_patch_name(patch,", "storage[key] if package is None: if not pkg_dct: return None", "self, source_file: SchemaSourceFile, source: UnpatchedSourceDefinition ): # sources can't be", "k, v in self.sources.items() } } def find_disabled_by_name( self, name:", "resource_types or node.resource_type in resource_types }) def get_used_databases(self): return frozenset(", "but doing 3 dict lookups instead of 1 # is", "ref_lookup(self) -> RefableLookup: if self._ref_lookup is None: self._ref_lookup = RefableLookup(self)", "in pkg_dct: return pkg_dct[package] else: return None class DocLookup(dbtClassMixin): def", "import tracking import dbt.utils NodeEdgeMap = Dict[str, List[str]] PackageName =", "field(default_factory=FileHash.empty) profile_hash: FileHash = field(default_factory=FileHash.empty) project_hashes: MutableMapping[str, FileHash] = field(default_factory=dict)", "return None # Called by RunTask.defer_to_manifest def merge_from_artifact( self, adapter,", "other.specificity and self.locality == other.locality ) if equal: raise_compiler_error( 'Found", "matches the given name, package, and type. If package is", "files={k: _deepcopy(v) for k, v in self.files.items()}, state_check=_deepcopy(self.state_check), ) def", "source_patches: MutableMapping[SourceKey, SourcePatch] = field(default_factory=dict) # following is from ParseResult", "None: self._analysis_lookup = AnalysisLookup(self) return self._analysis_lookup # Called by dbt.parser.manifest._resolve_refs_for_exposure", "macro: warn_or_error( f'WARNING: Found documentation for macro \"{patch.name}\" ' f'which", "Optional[ParsedMacro]: \"\"\" The `generate_X_name` macros are similar to regular ones,", "== other.locality ) if equal: raise_compiler_error( 'Found two materializations with", "be just-in-time compiling the same ephemeral dependency, and we want", "not found in manifest' ) return manifest.nodes[unique_id] class AnalysisLookup(RefableLookup): _lookup_types:", "was not found' ) return if macro.patch_path: package_name, existing_file_path =", "k, v in self.macros.items()}, docs={k: _deepcopy(v) for k, v in", "self.exposures[unique_id] else: # something terrible has happened raise dbt.exceptions.InternalException( 'Expected", ")) metadata: ManifestMetadata = field(metadata=dict( description='Metadata about the manifest', ))", "nodes to their dependents', )) metadata: ManifestMetadata = field(metadata=dict( description='Metadata", ") from dbt.dataclass_schema import dbtClassMixin from dbt.exceptions import ( CompilationException,", "== node_package: return [current_project, None] else: return [current_project, node_package, None]", "package: Optional[PackageName]): if key not in storage: return None pkg_dct:", "way. def __init__(self): self.macros = [] self.metadata = {} def", "need to avoid the lock! # pickle and deepcopy use", "child nodes to their dependencies', )) child_map: Optional[NodeEdgeMap] = field(metadata=dict(", "logger.debug( f'Merged {len(merged)} items from state (sample: {sample})' ) #", "self.docs, self.exposures, self.selectors, self.disabled, self.files, self.metadata, self.flat_graph, self.state_check, self.source_patches, self._disabled,", "in self.sources.items()}, macros={k: _deepcopy(v) for k, v in self.macros.items()}, docs={k:", "CompileResultNode, ManifestNode, NonSourceCompiledNode, GraphMemberNode ) from dbt.contracts.graph.parsed import ( ParsedMacro,", "pkg in candidates: result = self.doc_lookup.find(name, pkg, self) if result", "x: None, 'deserialize': lambda x: None} ) _source_lookup: Optional[SourceLookup] =", "for node in chain(self.nodes.values(), self.sources.values()) if not resource_types or node.resource_type", "and its dependencies' )) ) sources: Mapping[UniqueID, ParsedSourceDefinition] = field(", "if macro.name != name: continue candidate = MacroCandidate( locality=_get_locality(macro, root_project_name,", "field(metadata=dict( description='A mapping from parent nodes to their dependents', ))", "compiled, do not update the manifest and return the existing", "macros: Mapping[UniqueID, ParsedMacro] = field( metadata=dict(description=( 'The macros defined in", "Union[Manifest, MacroManifest] @dataclass @schema_version('manifest', 2) class WritableManifest(ArtifactMixin): nodes: Mapping[UniqueID, ManifestNode]", "if unique_id not in manifest.docs: raise dbt.exceptions.InternalException( f'Doc {unique_id} found", "{k: sorted(v) for k, v in dct.items()} def build_node_edges(nodes: List[ManifestNode]):", ") def __pre_serialize__(self): # serialization won't work with anything except", "= f'{source_name}.{table_name}' searcher: NameSearcher = NameSearcher( search_name, package, [NodeType.Source] )", "other.specificity: return True if self.specificity > other.specificity: return False if", "self.macros.items()}, docs={k: _deepcopy(v) for k, v in self.docs.items()}, exposures={k: _deepcopy(v)", "import GLOBAL_LOGGER as logger from dbt.node_types import NodeType from dbt.ui", "\"\"\"Metadata for the manifest.\"\"\" dbt_schema_version: str = field( default_factory=lambda: str(WritableManifest.dbt_schema_version)", "getattr(existing, 'compiled', False): # already compiled -> must be a", "in the graph by its name and package name, or", "return self[-1].macro def _get_locality( macro: ParsedMacro, root_project_name: str, internal_packages: Set[str]", "not in dest: raise dbt.exceptions.RuntimeException( f'got an update_{new_item.resource_type} call with", "None} ) _ref_lookup: Optional[RefableLookup] = field( default=None, metadata={'serialize': lambda x:", "ManifestNodes): # nodes can't be overwritten! _check_duplicates(node, self.nodes) self.nodes[node.unique_id] =", "None: if patch.yaml_key in ['models', 'seeds', 'snapshots']: unique_id = self.ref_lookup.get_unique_id(patch.name,", "= None if package is not None: def filter(candidate: MacroCandidate)", "if unique_id not in manifest.sources: raise dbt.exceptions.InternalException( f'Source {unique_id} found", "Locality.Imported class Searchable(Protocol): resource_type: NodeType package_name: str @property def search_name(self)", "'Found two materializations with the name {} (packages {} and", "from multiprocessing.synchronize import Lock from typing import ( Dict, List,", "new_source) def build_flat_graph(self): \"\"\"This attribute is used in context.common by", "self.build_parent_and_child_maps() return WritableManifest( nodes=self.nodes, sources=self.sources, macros=self.macros, docs=self.docs, exposures=self.exposures, selectors=self.selectors, metadata=self.metadata,", "manifest.docs[unique_id] class SourceLookup(dbtClassMixin): def __init__(self, manifest: 'Manifest'): self.storage: Dict[Tuple[str, str],", "= field(default_factory=dict) macros: MutableMapping[str, ParsedMacro] = field(default_factory=dict) docs: MutableMapping[str, ParsedDocumentation]", "obj): obj._lock = flags.MP_CONTEXT.Lock() return obj def sync_update_node( self, new_node:", "NotImplemented if self.locality < other.locality: return True if self.locality >", "root_project_name: str ) -> Optional[ParsedMacro]: \"\"\" The `generate_X_name` macros are", "x: None} ) _ref_lookup: Optional[RefableLookup] = field( default=None, metadata={'serialize': lambda", "CandidateList( MaterializationCandidate.from_macro(m, specificity) for m in self._find_macros_by_name(full_name, project_name) ) def", "def resolve_ref( self, target_model_name: str, target_model_package: Optional[str], current_project: str, node_package:", ")) ) docs: Mapping[UniqueID, ParsedDocumentation] = field( metadata=dict(description=( 'The docs", "parent nodes to their dependents', )) metadata: ManifestMetadata = field(metadata=dict(", "new_node: ManifestNode): _update_into(self.nodes, new_node) def update_source(self, new_source: ParsedSourceDefinition): _update_into(self.sources, new_source)", "= source_file.file_id if key is None: return False if key", "is disabled return # patches can't be overwritten node =", "int = 0 static_analysis_path_count: int = 0 @dataclass class ManifestStateCheck(dbtClassMixin):", "is not compiled, update it with the new node and", "CandidateList = CandidateList(chain.from_iterable( self._materialization_candidates_for( project_name=project_name, materialization_name=materialization_name, adapter_type=atype, ) for atype", "and by keyword. If an attribute # is added it", "SchemaSourceFile, patch: SourcePatch, ) -> None: # source patches must", "not in resource_fqns: resource_fqns[resource_type_plural] = set() resource_fqns[resource_type_plural].add(tuple(resource.fqn)) return resource_fqns #", "filter is None or filter(candidate): candidates.append(candidate) return candidates @dataclass class", "unique IDs and a writable manifest, update this manifest by", "compiling an ephemeral ancestor of a node at runtime, because", "perform_lookup( self, unique_id: UniqueID, manifest ) -> ManifestNode: if unique_id", "CandidateList: if adapter_type is None: specificity = Specificity.Default else: specificity", "= TypeVar('T', bound=GraphMemberNode) def _update_into(dest: MutableMapping[str, T], new_item: T): \"\"\"Update", "the new node and return that. If the existing node", "Optional[str] = None ) -> Optional[ManifestNode]: searcher: NameSearcher = NameSearcher(", "existing_file_path = node.patch_path.split('://') raise_duplicate_patch_name(patch, existing_file_path) source_file.append_patch(patch.yaml_key, unique_id) node.patch(patch) def add_macro_patch(", "can't be overwritten node = self.nodes.get(unique_id) if node: if node.patch_path:", "= {} AnyManifest = Union[Manifest, MacroManifest] @dataclass @schema_version('manifest', 2) class", "self @classmethod def __post_deserialize__(cls, obj): obj._lock = flags.MP_CONTEXT.Lock() return obj", "Protocol from uuid import UUID from dbt.contracts.graph.compiled import ( CompileResultNode,", "'analyses': unique_id = self.analysis_lookup.get_unique_id(patch.name, None) else: raise dbt.exceptions.InternalException( f'Unexpected yaml_key", "def add_doc(self, doc: ParsedDocumentation): if doc.name not in self.storage: self.storage[doc.name]", "True if self.locality > other.locality: return False return False M", "the NodePatchParser def add_patch( self, source_file: SchemaSourceFile, patch: ParsedNodePatch, )", "None, 'deserialize': lambda x: None} ) _ref_lookup: Optional[RefableLookup] = field(", "pkg_dct: Mapping[PackageName, UniqueID] = storage[key] if package is None: if", "# and dbt.parser.manifest._process_source_for_node def resolve_source( self, target_source_name: str, target_table_name: str,", "replacing any unselected nodes with their counterpart. Only non-ephemeral refable", "= field( metadata=dict(description=( 'The docs defined in the dbt project", "and not node.is_ephemeral and unique_id not in selected and not", "package == candidate.macro.package_name candidates: CandidateList = self._find_macros_by_name( name=name, root_project_name=root_project_name, filter=filter,", "@classmethod def __post_deserialize__(cls, obj): obj._lock = flags.MP_CONTEXT.Lock() return obj def", "the adapter'), ) def __post_init__(self): if tracking.active_user is None: return", "-> NonSourceCompiledNode: \"\"\"update the node with a lock. The only", "not a big deal at all and retains consistency def", "result = searcher.search(self.disabled) if result is not None: assert isinstance(result,", "existing_file_path = macro.patch_path.split('://') raise_duplicate_macro_patch_name(patch, existing_file_path) source_file.macro_patches[patch.name] = unique_id macro.patch(patch) def", "Mapping[str, HasUniqueID] ): if value.unique_id in src: raise_duplicate_resource_name(value, src[value.unique_id]) K_T", "serialization won't work with anything except an empty source_patches because", "None: # source patches must be unique key = (patch.overrides,", "_deepcopy(v) for k, v in self.nodes.items()}, sources={k: _deepcopy(v) for k,", "this manifest by replacing any unselected nodes with their counterpart.", "resolve_doc( self, name: str, package: Optional[str], current_project: str, node_package: str,", "the node is disabled if disabled is None: disabled =", "exposures: Mapping[UniqueID, ParsedExposure] = field( metadata=dict(description=( 'The exposures defined in", "not in src: raise CompilationException( 'Expected to find \"{}\" in", "# Called by DocsRuntimeContext.doc def resolve_doc( self, name: str, package:", "self.sources.items()}, macros={k: _deepcopy(v) for k, v in self.macros.items()}, docs={k: _deepcopy(v)", "f'cannot update a {new_item.resource_type} to have a new file '", "v in dct.items()} def build_node_edges(nodes: List[ManifestNode]): \"\"\"Build the forward and", "-> Optional[ParsedMacro]: if not self: return None self.sort() return self[-1].macro", "List[NodeType] def _matches(self, model: N) -> bool: \"\"\"Return True if", "added it must all be added in the __reduce_ex__ method", "runtime, because multiple threads could be just-in-time compiling the same", "subtract 2 for the \"Compilation Error\" indent # note that", "None: return Disabled(disabled) return None # Called by DocsRuntimeContext.doc def", "itertools import chain, islice from mashumaro import DataClassMessagePackMixin from multiprocessing.synchronize", "be an existing value to overwrite, and they two nodes", "def __init__(self, macros): self.macros = macros self.metadata = ManifestMetadata() #", "already compiled -> must be a NonSourceCompiledNode return cast(NonSourceCompiledNode, existing)", "manifest.nodes.values(): self.add_node(node) def perform_lookup( self, unique_id: UniqueID, manifest ) ->", "source_file: AnySourceFile, node: CompileResultNode, test_from=None): self.add_disabled_nofile(node) if isinstance(source_file, SchemaSourceFile): assert", "self.macros: # detect that the macro exists and emit an", "manifest) return None def add_node(self, node: ManifestNode): if node.resource_type in", "(adapter_type, None) )) return candidates.last() def get_resource_fqns(self) -> Mapping[str, PathSet]:", "an ' f'unrecognized {new_item.resource_type}: {new_item.unique_id}' ) existing = dest[unique_id] if", "= {} self.storage[node.name][node.package_name] = node.unique_id def populate(self, manifest): for node", "def find_generate_macro_by_name( self, component: str, root_project_name: str ) -> Optional[ParsedMacro]:", "-> ParsedDocumentation: if unique_id not in manifest.docs: raise dbt.exceptions.InternalException( f'Doc", "UniqueID] = storage[key] if package is None: if not pkg_dct:", "ParsedNodes and return them as two separate dictionaries, each mapping", "def build_macro_child_map(self): edge_members = list(chain( self.nodes.values(), self.macros.values(), )) forward_edges =", "to find \"{}\" in cached \"result.{}\" based ' 'on cached", "class SourceLookup(dbtClassMixin): def __init__(self, manifest: 'Manifest'): self.storage: Dict[Tuple[str, str], Dict[PackageName,", "= searcher.search(self.disabled) return result def find_disabled_source_by_name( self, source_name: str, table_name:", "None: return [target_package] elif current_project == node_package: return [current_project, None]", "writable manifest, update this manifest by replacing any unselected nodes", "self.nodes[new_node.unique_id] if getattr(existing, 'compiled', False): # already compiled -> must", "if unique_id not in manifest.nodes: raise dbt.exceptions.InternalException( f'Node {unique_id} found", "Optional[bool] = field( default=None, metadata=dict(description=( 'Whether dbt is configured to", "NameSearcher = NameSearcher( name, package, NodeType.refable() ) result = searcher.search(self.disabled)", "str, target_table_name: str, current_project: str, node_package: str ) -> MaybeParsedSource:", "add_doc(self, source_file: SourceFile, doc: ParsedDocumentation): _check_duplicates(doc, self.docs) self.docs[doc.unique_id] = doc", "is None or self.package == model.package_name def search(self, haystack: Iterable[N])", "src: raise CompilationException( 'Expected to find \"{}\" in cached \"result.{}\"", "update_{new_item.resource_type} call with an ' f'unrecognized {new_item.resource_type}: {new_item.unique_id}' ) existing", "is None: disabled = self.find_disabled_source_by_name( target_source_name, target_table_name, pkg ) if", "new_node) return new_node def update_exposure(self, new_exposure: ParsedExposure): _update_into(self.exposures, new_exposure) def", "str, package: Optional[str] = None ) -> Optional[ParsedSourceDefinition]: search_name =", "( self.nodes, self.sources, self.macros, self.docs, self.exposures, self.selectors, self.disabled, self.files, self.metadata,", "must all be added in the __reduce_ex__ method in the", "unique_id in forward_edges.keys(): forward_edges[unique_id].append(node.unique_id) return _sort_values(forward_edges) def _deepcopy(value): return value.from_dict(value.to_dict(omit_none=True))", "# This is used in dbt.task.rpc.sql_commands 'add_new_refs' def deepcopy(self): return", "of a node at runtime, because multiple threads could be", "MutableMapping[str, T], new_item: T): \"\"\"Update dest to overwrite whatever is", "selectors defined in selectors.yml' )) ) disabled: Optional[List[CompileResultNode]] = field(metadata=dict(", "a node is disabled return # patches can't be overwritten", ")), ) adapter_type: Optional[str] = field( default=None, metadata=dict(description='The type name", "self.sources[source.unique_id] = source # type: ignore source_file.sources.append(source.unique_id) def add_node_nofile(self, node:", "@property def source_lookup(self) -> SourceLookup: if self._source_lookup is None: self._source_lookup", "new_exposure: ParsedExposure): _update_into(self.exposures, new_exposure) def update_node(self, new_node: ManifestNode): _update_into(self.nodes, new_node)", "source_file: SchemaSourceFile, source: UnpatchedSourceDefinition ): # sources can't be overwritten!", "metadata=dict(description=( 'The macros defined in the dbt project and its", "field(default_factory=dict) metadata: ManifestMetadata = field(default_factory=ManifestMetadata) flat_graph: Dict[str, Any] = field(default_factory=dict)", "existing = self.nodes[new_node.unique_id] if getattr(existing, 'compiled', False): # already compiled", "x: None, 'deserialize': lambda x: None} ) _lock: Lock =", "class Searchable(Protocol): resource_type: NodeType package_name: str @property def search_name(self) ->", "2 Root = 3 class Specificity(enum.IntEnum): Default = 1 Adapter", "the 'graph' context property # in the ProviderContext class. self.flat_graph", "self._doc_lookup, self._source_lookup, self._ref_lookup, ) return self.__class__, args class MacroManifest(MacroMethods): def", "\"\"\" backward_edges: Dict[str, List[str]] = {} # pre-populate the forward", "is used in context.common by each node, so we want", "add_node(self, source_file: AnySourceFile, node: ManifestNodes, test_from=None): self.add_node_nofile(node) if isinstance(source_file, SchemaSourceFile):", "dependents', )) metadata: ManifestMetadata = field(metadata=dict( description='Metadata about the manifest',", "None: return False if key not in self.files: return False", "2 for the \"Compilation Error\" indent # note that the", "pkg_dct[package] else: return None class DocLookup(dbtClassMixin): def __init__(self, manifest: 'Manifest'):", "= field(default_factory=FileHash.empty) profile_hash: FileHash = field(default_factory=FileHash.empty) project_hashes: MutableMapping[str, FileHash] =", "= field(default_factory=ManifestStateCheck) # Moved from the ParseResult object source_patches: MutableMapping[SourceKey,", "their dependents', )) metadata: ManifestMetadata = field(metadata=dict( description='Metadata about the", "and ( node.resource_type in refables and not node.is_ephemeral and unique_id", "project_id: Optional[str] = field( default=None, metadata={ 'description': 'A unique identifier", "\"\"\" return {k: sorted(v) for k, v in dct.items()} def", "something terrible has happened raise dbt.exceptions.InternalException( 'Expected node {} not", "pkg in candidates: source = self.source_lookup.find(key, pkg, self) if source", "node.unique_id def populate(self, manifest): for node in manifest.nodes.values(): self.add_node(node) def", "supported, so ensure it's empty self.source_patches = {} return self", "in context.common by each node, so we want to only", ") def build_parent_and_child_maps(self): edge_members = list(chain( self.nodes.values(), self.sources.values(), self.exposures.values(), ))", "specificity = Specificity.Adapter full_name = dbt.utils.get_materialization_macro_name( materialization_name=materialization_name, adapter_type=adapter_type, with_prefix=False, )", "._include_in_cost def expect(self, unique_id: str) -> GraphMemberNode: if unique_id in", "self, project_name: str, materialization_name: str, adapter_type: Optional[str], ) -> CandidateList:", "is None: if not pkg_dct: return None else: return next(iter(pkg_dct.values()))", "the given name, package, and type. If package is None,", "context.common by each node, so we want to only build", "locality=candidate.locality, macro=candidate.macro, specificity=specificity, ) def __eq__(self, other: object) -> bool:", "by name.\"\"\" for model in haystack: if self._matches(model): return model", "sources: MutableMapping[str, ParsedSourceDefinition] = field(default_factory=dict) macros: MutableMapping[str, ParsedMacro] = field(default_factory=dict)", "in self.sources.items() } } def find_disabled_by_name( self, name: str, package:", "class declaration, because they are used as # positional arguments", "# pre-populate the forward edge dict for simplicity forward_edges: Dict[str,", ") return if macro.patch_path: package_name, existing_file_path = macro.patch_path.split('://') raise_duplicate_macro_patch_name(patch, existing_file_path)", "all_resources: resource_type_plural = resource.resource_type.pluralize() if resource_type_plural not in resource_fqns: resource_fqns[resource_type_plural]", "= {} all_resources = chain(self.exposures.values(), self.nodes.values(), self.sources.values()) for resource in", "self.source_patches = {} return self @classmethod def __post_deserialize__(cls, obj): obj._lock", "nodes if n.unique_id.startswith('macro') or n.depends_on.macros } for node in nodes:", "self.source_patches[key]) self.source_patches[key] = patch source_file.source_patches.append(key) def get_used_schemas(self, resource_types=None): return frozenset({", "Dict[PackageName, UniqueID] will # only ever have exactly one value,", "\"\"\"Given a dictionary, sort each value. This makes output deterministic,", "arguments to construct a Manifest. def __reduce_ex__(self, protocol): args =", "forward_edges[unique_id].append(node.unique_id) return _sort_values(forward_edges) def _deepcopy(value): return value.from_dict(value.to_dict(omit_none=True)) class Locality(enum.IntEnum): Core", "@property def ref_lookup(self) -> RefableLookup: if self._ref_lookup is None: self._ref_lookup", "filter(candidate: MacroCandidate) -> bool: return candidate.locality != Locality.Imported candidates: CandidateList", "ParseResult # Provide support for copy.deepcopy() - we just need", "if filter is None or filter(candidate): candidates.append(candidate) return candidates @dataclass", "= str def find_unique_id_for_package(storage, key, package: Optional[PackageName]): if key not", "that are in both the Manifest # and the MacroManifest", "cycle from dbt.adapters.factory import get_adapter_package_names candidates: CandidateList = CandidateList() packages", "__pre_serialize__(self): # serialization won't work with anything except an empty", "node.depends_on.macros: if unique_id in forward_edges.keys(): forward_edges[unique_id].append(node.unique_id) return _sort_values(forward_edges) def _deepcopy(value):", "= 1 Adapter = 2 @dataclass class MacroCandidate: locality: Locality", "def default(cls): return cls( dbt_schema_version=str(WritableManifest.dbt_schema_version), ) def _sort_values(dct): \"\"\"Given a", "return result def _materialization_candidates_for( self, project_name: str, materialization_name: str, adapter_type:", "Just to make mypy happy. There must be a better", "if unique_id is not None: return self.perform_lookup(unique_id, manifest) return None", "node: Optional[ManifestNode] = None disabled: Optional[ManifestNode] = None candidates =", "# Called by dbt.parser.manifest._resolve_refs_for_exposure # and dbt.parser.manifest._process_refs_for_node def resolve_ref( self,", "dbt.utils NodeEdgeMap = Dict[str, List[str]] PackageName = str DocName =", "is used to determine priority: - locally defined macros come", "default=None, metadata={ 'description': 'A unique identifier for the user', },", "atype in (adapter_type, None) )) return candidates.last() def get_resource_fqns(self) ->", "= self.find_disabled_by_name( target_model_name, pkg ) if disabled is not None:", "= field( default=None, metadata={ 'description': 'A unique identifier for the", "forward_edges.keys(): forward_edges[unique_id].append(node.unique_id) return _sort_values(forward_edges) def _deepcopy(value): return value.from_dict(value.to_dict(omit_none=True)) class Locality(enum.IntEnum):", "self.analysis_lookup.get_unique_id(patch.name, None) else: raise dbt.exceptions.InternalException( f'Unexpected yaml_key {patch.yaml_key} for patch", "def add_source_patch( self, source_file: SchemaSourceFile, patch: SourcePatch, ) -> None:", "None disabled: Optional[ManifestNode] = None candidates = _search_packages( current_project, node_package,", "Manifest. def __reduce_ex__(self, protocol): args = ( self.nodes, self.sources, self.macros,", "AnySourceFile, node: CompileResultNode, test_from=None): self.add_disabled_nofile(node) if isinstance(source_file, SchemaSourceFile): assert test_from", "def __lt__(self, other: object) -> bool: if not isinstance(other, MacroCandidate):", "class RefableLookup(dbtClassMixin): # model, seed, snapshot _lookup_types: ClassVar[set] = set(NodeType.refable())", "project_name) ) def find_materialization_macro_by_name( self, project_name: str, materialization_name: str, adapter_type:", "raise_duplicate_patch_name, raise_duplicate_macro_patch_name, raise_duplicate_source_patch_name, ) from dbt.helper_types import PathSet from dbt.logger", "Dict[str, List[str]] = {n.unique_id: [] for n in nodes} for", "else: return next(iter(pkg_dct.values())) elif package in pkg_dct: return pkg_dct[package] else:", "and node.config.enabled: return node # it's possible that the node", "multiple threads could be just-in-time compiling the same ephemeral dependency,", "'Whether dbt is configured to send anonymous usage statistics' )),", ") dest[unique_id] = new_item # This contains macro methods that", "= macros self.metadata = ManifestMetadata() # This is returned by", "None def add_source(self, source: ParsedSourceDefinition): key = (source.source_name, source.name) if", "candidates: source = self.source_lookup.find(key, pkg, self) if source is not", "# sources can't be overwritten! _check_duplicates(source, self.sources) self.sources[source.unique_id] = source", "if unique_id in self.nodes: return self.nodes[unique_id] elif unique_id in self.sources:", "SourceFile) -> bool: key = source_file.file_id if key is None:", "= field(default_factory=dict) sources: MutableMapping[str, ParsedSourceDefinition] = field(default_factory=dict) macros: MutableMapping[str, ParsedMacro]", "args tuple in the right position. nodes: MutableMapping[str, ManifestNode] =", "v in self.exposures.items() }, 'nodes': { k: v.to_dict(omit_none=False) for k,", "Called by RunTask.defer_to_manifest def merge_from_artifact( self, adapter, other: 'WritableManifest', selected:", "the target' )) parent_map: Optional[NodeEdgeMap] = field(metadata=dict( description='A mapping from", "= (source.source_name, source.name) if key not in self.storage: self.storage[key] =", "internal_packages: Set[str] ) -> Locality: if macro.package_name == root_project_name: return", "a macro in the graph by its name and package", "view of the manifest. If the existing node is not", "= NameSearcher( name, package, NodeType.refable() ) result = searcher.search(self.disabled) return", "patches must be unique key = (patch.overrides, patch.name) if key", "vars_hash: FileHash = field(default_factory=FileHash.empty) profile_hash: FileHash = field(default_factory=FileHash.empty) project_hashes: MutableMapping[str,", "source_file: SourceFile, macro: ParsedMacro): if macro.unique_id in self.macros: # detect", "the dbt project and its dependencies' )) ) selectors: Mapping[UniqueID,", "list of ParsedNodes and return them as two separate dictionaries,", "were formerly in ParseResult def add_macro(self, source_file: SourceFile, macro: ParsedMacro):", "root_project_name: return Locality.Root elif macro.package_name in internal_packages: return Locality.Core else:", "= macro.patch_path.split('://') raise_duplicate_macro_patch_name(patch, existing_file_path) source_file.macro_patches[patch.name] = unique_id macro.patch(patch) def add_source_patch(", "for k, v in self.macros.items()}, docs={k: _deepcopy(v) for k, v", "None: return Disabled(disabled) return None # Called by dbt.parser.manifest._resolve_sources_for_exposure #", "exposures defined in the dbt project and its dependencies' ))", "manifest.nodes: raise dbt.exceptions.InternalException( f'Node {unique_id} found in cache but not", "# in the Manifest class declaration, because they are used", "_deepcopy(v) for k, v in self.selectors.items()}, metadata=self.metadata, disabled=[_deepcopy(n) for n", "dbt.exceptions.InternalException( f'Source {unique_id} found in cache but not found in", ") from typing_extensions import Protocol from uuid import UUID from", "macro: ParsedMacro, root_project_name: str, internal_packages: Set[str] ) -> Locality: if", "x: None} ) _analysis_lookup: Optional[AnalysisLookup] = field( default=None, metadata={'serialize': lambda", ") -> CandidateList: if adapter_type is None: specificity = Specificity.Default", "# detect that the macro exists and emit an error", "add_node_nofile(self, node: ManifestNodes): # nodes can't be overwritten! _check_duplicates(node, self.nodes)", "raise_compiler_error, warn_or_error, raise_duplicate_patch_name, raise_duplicate_macro_patch_name, raise_duplicate_source_patch_name, ) from dbt.helper_types import PathSet", "= self.macros[macro.unique_id].original_file_path # subtract 2 for the \"Compilation Error\" indent", "candidates: result = self.doc_lookup.find(name, pkg, self) if result is not", "= 0 @dataclass class ManifestStateCheck(dbtClassMixin): vars_hash: FileHash = field(default_factory=FileHash.empty) profile_hash:", "= {} return self @classmethod def __post_deserialize__(cls, obj): obj._lock =", "new_exposure) def update_node(self, new_node: ManifestNode): _update_into(self.nodes, new_node) def update_source(self, new_source:", "Specificity.Adapter full_name = dbt.utils.get_materialization_macro_name( materialization_name=materialization_name, adapter_type=adapter_type, with_prefix=False, ) return CandidateList(", "metadata={'serialize': lambda x: None, 'deserialize': lambda x: None} ) _source_lookup:", "__init__(self): self.macros = [] self.metadata = {} def find_macro_by_name( self,", "equal: raise_compiler_error( 'Found two materializations with the name {} (packages", "lambda x: None, 'deserialize': lambda x: None} ) _ref_lookup: Optional[RefableLookup]", "= chain(self.exposures.values(), self.nodes.values(), self.sources.values()) for resource in all_resources: resource_type_plural =", "i.e. the Manifest. # The order of the arguments must", "with a lock. The only time we should want to", "raise_duplicate_source_patch_name(patch, self.source_patches[key]) self.source_patches[key] = patch source_file.source_patches.append(key) def get_used_schemas(self, resource_types=None): return", "self.storage[key][source.package_name] = source.unique_id def populate(self, manifest): for source in manifest.sources.values():", "dbt.exceptions.InternalException( f'Unexpected yaml_key {patch.yaml_key} for patch in ' f'file {source_file.path.original_file_path}'", "sorted(v) for k, v in dct.items()} def build_node_edges(nodes: List[ManifestNode]): \"\"\"Build", "node in manifest.nodes.values(): self.add_node(node) def perform_lookup( self, unique_id: UniqueID, manifest", "the 'dbt' internal project \"\"\" def filter(candidate: MacroCandidate) -> bool:", "obj def sync_update_node( self, new_node: NonSourceCompiledNode ) -> NonSourceCompiledNode: \"\"\"update", "def get_unique_id(self, key, package: Optional[PackageName]): return find_unique_id_for_package(self.storage, key, package) def", "Locality(enum.IntEnum): Core = 1 Imported = 2 Root = 3", "given list of ParsedNodes and return them as two separate", "to lock is when compiling an ephemeral ancestor of a", "to their dependents', )) metadata: ManifestMetadata = field(metadata=dict( description='Metadata about", "dictionaries, each mapping unique IDs to lists of edges. \"\"\"", ") macros: Mapping[UniqueID, ParsedMacro] = field( metadata=dict(description=( 'The macros defined", "haystack: Iterable[N]) -> Optional[N]: \"\"\"Find an entry in the given", "If the existing node is not compiled, update it with", "{} self.storage[doc.name][doc.package_name] = doc.unique_id def populate(self, manifest): for doc in", "macro=candidate.macro, specificity=specificity, ) def __eq__(self, other: object) -> bool: if", "bool]] = None if package is not None: def filter(candidate:", "adapter, other: 'WritableManifest', selected: AbstractSet[UniqueID], ) -> None: \"\"\"Given the", "'snapshots']: unique_id = self.ref_lookup.get_unique_id(patch.name, None) elif patch.yaml_key == 'analyses': unique_id", "target_package is not None: return [target_package] elif current_project == node_package:", "test_from=None): self.add_disabled_nofile(node) if isinstance(source_file, SchemaSourceFile): assert test_from source_file.add_test(node.unique_id, test_from) else:", "ProviderContext class. self.flat_graph = {} AnyManifest = Union[Manifest, MacroManifest] @dataclass", "def build_parent_and_child_maps(self): edge_members = list(chain( self.nodes.values(), self.sources.values(), self.exposures.values(), )) forward_edges,", "result def _materialization_candidates_for( self, project_name: str, materialization_name: str, adapter_type: Optional[str],", "}, ) user_id: Optional[UUID] = field( default=None, metadata={ 'description': 'A", "return None pkg_dct: Mapping[PackageName, UniqueID] = storage[key] if package is", "'{}). dbt cannot resolve this ambiguity' .format(self.macro.name, self.macro.package_name, other.macro.package_name) )", "lock is when compiling an ephemeral ancestor of a node", "from dbt.dataclass_schema import dbtClassMixin from dbt.exceptions import ( CompilationException, raise_duplicate_resource_name,", "want newlines, # this is the result :( msg =", "manifest: 'Manifest'): self.storage: Dict[Tuple[str, str], Dict[PackageName, UniqueID]] = {} self.populate(manifest)", "a {new_item.resource_type} to have a new file ' f'path!' )", "won't work with anything except an empty source_patches because #", "node def add_node(self, source_file: AnySourceFile, node: ManifestNodes, test_from=None): self.add_node_nofile(node) if", "ParsedDocumentation, ParsedNodePatch, ParsedMacroPatch, ParsedSourceDefinition, ParsedExposure, HasUniqueID, UnpatchedSourceDefinition, ManifestNodes ) from", "in manifest' ) return manifest.docs[unique_id] class SourceLookup(dbtClassMixin): def __init__(self, manifest:", "only ever have exactly one value, but doing 3 dict", "my_checksum == source_file.checksum def add_source( self, source_file: SchemaSourceFile, source: UnpatchedSourceDefinition", "UnpatchedSourceDefinition, ManifestNodes ) from dbt.contracts.graph.unparsed import SourcePatch from dbt.contracts.files import", "UniqueID = str def find_unique_id_for_package(storage, key, package: Optional[PackageName]): if key", "SourceLookup(dbtClassMixin): def __init__(self, manifest: 'Manifest'): self.storage: Dict[Tuple[str, str], Dict[PackageName, UniqueID]]", "= {} self.populate(manifest) def get_unique_id(self, key, package: Optional[PackageName]): return find_unique_id_for_package(self.storage,", "_search_packages( current_project, node_package, package ) for pkg in candidates: result", "ever have exactly one value, but doing 3 dict lookups", "self.storage[node.name][node.package_name] = node.unique_id def populate(self, manifest): for node in manifest.nodes.values():", "assert test_from source_file.add_test(node.unique_id, test_from) else: source_file.nodes.append(node.unique_id) def add_exposure(self, source_file: SchemaSourceFile,", "be a better way. def __init__(self): self.macros = [] self.metadata", "[] for n in nodes if n.unique_id.startswith('macro') or n.depends_on.macros }", "import flags from dbt import tracking import dbt.utils NodeEdgeMap =", "NodePatchParser def add_patch( self, source_file: SchemaSourceFile, patch: ParsedNodePatch, ) ->", "add_disabled(self, source_file: AnySourceFile, node: CompileResultNode, test_from=None): self.add_disabled_nofile(node) if isinstance(source_file, SchemaSourceFile):", "backward edges on the given list of ParsedNodes and return", "in manifest.docs.values(): self.add_doc(doc) def perform_lookup( self, unique_id: UniqueID, manifest )", "def update_source(self, new_source: ParsedSourceDefinition): _update_into(self.sources, new_source) def build_flat_graph(self): \"\"\"This attribute", "NotImplemented return self.locality == other.locality def __lt__(self, other: object) ->", "bool: if not isinstance(other, MaterializationCandidate): return NotImplemented equal = (", "is the result :( msg = line_wrap_message( f'''\\ dbt found", "test_from=None): self.add_node_nofile(node) if isinstance(source_file, SchemaSourceFile): assert test_from source_file.add_test(node.unique_id, test_from) else:", "and # dbt.graph.queue.get and ._include_in_cost def expect(self, unique_id: str) ->", "pickle and deepcopy use this. It returns a callable object", "return source if disabled is None: disabled = self.find_disabled_source_by_name( target_source_name,", "write(self, path): self.writable_manifest().write(path) # Called in dbt.compilation.Linker.write_graph and # dbt.graph.queue.get", "= field( default=None, metadata={'serialize': lambda x: None, 'deserialize': lambda x:", "[] self.metadata = {} def find_macro_by_name( self, name: str, root_project_name:", "methods that are in both the Manifest # and the", "AnalysisLookup: if self._analysis_lookup is None: self._analysis_lookup = AnalysisLookup(self) return self._analysis_lookup", "tuple in the right position. nodes: MutableMapping[str, ManifestNode] = field(default_factory=dict)", "of edges. \"\"\" backward_edges: Dict[str, List[str]] = {} # pre-populate", "is None: self._doc_lookup = DocLookup(self) return self._doc_lookup def rebuild_doc_lookup(self): self._doc_lookup", "= None ) -> Optional[ParsedSourceDefinition]: search_name = f'{source_name}.{table_name}' searcher: NameSearcher", "will # only ever have exactly one value, but doing", "one value, but doing 3 dict lookups instead of 1", "= RefableLookup(self) @property def analysis_lookup(self) -> AnalysisLookup: if self._analysis_lookup is", "_lock: Lock = field( default_factory=flags.MP_CONTEXT.Lock, metadata={'serialize': lambda x: None, 'deserialize':", "they two nodes must have the same original file path.", "adapter.get_relation( current.database, current.schema, current.identifier ) ): merged.add(unique_id) self.nodes[unique_id] = node.replace(deferred=True)", "test_from) else: source_file.nodes.append(node.unique_id) def add_doc(self, source_file: SourceFile, doc: ParsedDocumentation): _check_duplicates(doc,", "target_model_name, pkg ) if disabled is not None: return Disabled(disabled)", "metadata={'serialize': lambda x: None, 'deserialize': lambda x: None} ) _ref_lookup:", ") raise_compiler_error(msg) self.macros[macro.unique_id] = macro source_file.macros.append(macro.unique_id) def has_file(self, source_file: SourceFile)", "is None or filter(candidate): candidates.append(candidate) return candidates @dataclass class ParsingInfo:", "metadata=dict(description='The type name of the adapter'), ) def __post_init__(self): if", "make mypy happy. There must be a better way. def", "yaml_key {patch.yaml_key} for patch in ' f'file {source_file.path.original_file_path}' ) if", "continue candidate = MacroCandidate( locality=_get_locality(macro, root_project_name, packages), macro=macro, ) if", "self.disabled, self.files, self.metadata, self.flat_graph, self.state_check, self.source_patches, self._disabled, self._doc_lookup, self._source_lookup, self._ref_lookup,", "_doc_lookup: Optional[DocLookup] = field( default=None, metadata={'serialize': lambda x: None, 'deserialize':", "isinstance(other, MaterializationCandidate): return NotImplemented equal = ( self.specificity == other.specificity", "These attributes are both positional and by keyword. If an", "Callable, Iterable, Generic, cast, AbstractSet, ClassVar ) from typing_extensions import", "-> RefableLookup: if self._ref_lookup is None: self._ref_lookup = RefableLookup(self) return", "-> None: # source patches must be unique key =", "node.depends_on_nodes: if unique_id in forward_edges.keys(): forward_edges[unique_id].append(node.unique_id) return _sort_values(forward_edges), _sort_values(backward_edges) #", "self, target_source_name: str, target_table_name: str, current_project: str, node_package: str )", "def __lt__(self, other: object) -> bool: if not isinstance(other, MaterializationCandidate):", "found in manifest' ) return manifest.nodes[unique_id] class AnalysisLookup(RefableLookup): _lookup_types: ClassVar[set]", "node.name not in self.storage: self.storage[node.name] = {} self.storage[node.name][node.package_name] = node.unique_id", "tuple of arguments # for the object, i.e. the Manifest.", "'WritableManifest', selected: AbstractSet[UniqueID], ) -> None: \"\"\"Given the selected unique", "dbt project and its dependencies' )) ) sources: Mapping[UniqueID, ParsedSourceDefinition]", "else: return Locality.Imported class Searchable(Protocol): resource_type: NodeType package_name: str @property", "node in nodes: for unique_id in node.depends_on.macros: if unique_id in", "Make sure you don't call this until you're done with", "( self.specificity == other.specificity and self.locality == other.locality ) if", "sample = list(islice(merged, 5)) logger.debug( f'Merged {len(merged)} items from state", "in forward_edges.keys(): forward_edges[unique_id].append(node.unique_id) return _sort_values(forward_edges), _sort_values(backward_edges) # Build a map", "graph by its name and package name, or None for", "source_file.checksum def add_source( self, source_file: SchemaSourceFile, source: UnpatchedSourceDefinition ): #", "macro from the 'dbt' internal project \"\"\" def filter(candidate: MacroCandidate)", "RefableLookup: if self._ref_lookup is None: self._ref_lookup = RefableLookup(self) return self._ref_lookup", "is from ParseResult _disabled: MutableMapping[str, List[CompileResultNode]] = field(default_factory=dict) _doc_lookup: Optional[DocLookup]", "AnalysisLookup(self) return self._analysis_lookup # Called by dbt.parser.manifest._resolve_refs_for_exposure # and dbt.parser.manifest._process_refs_for_node", "return cast(NonSourceCompiledNode, existing) _update_into(self.nodes, new_node) return new_node def update_exposure(self, new_exposure:", "the manifest and return the existing node. \"\"\" with self._lock:", "unique identifier for the user', }, ) send_anonymous_usage_stats: Optional[bool] =", "self.storage: self.storage[key] = {} self.storage[key][source.package_name] = source.unique_id def populate(self, manifest):", "is None: # This will usually happen when a node", "package: Optional[PackageName]): return find_unique_id_for_package(self.storage, key, package) def find(self, key, package:", "only build it once and avoid any concurrency issues around", "lambda x: None} ) _parsing_info: ParsingInfo = field( default_factory=ParsingInfo, metadata={'serialize':", "source_file.add_test(node.unique_id, test_from) else: source_file.nodes.append(node.unique_id) def add_exposure(self, source_file: SchemaSourceFile, exposure: ParsedExposure):", "configured to send anonymous usage statistics' )), ) adapter_type: Optional[str]", "nodes: for unique_id in node.depends_on.macros: if unique_id in forward_edges.keys(): forward_edges[unique_id].append(node.unique_id)", "source in manifest.sources.values(): if hasattr(source, 'source_name'): self.add_source(source) def perform_lookup( self,", "after parsing and during compilation. \"\"\" # These attributes are", "f'Source {unique_id} found in cache but not found in manifest'", "return Manifest( nodes={k: _deepcopy(v) for k, v in self.nodes.items()}, sources={k:", "each value. This makes output deterministic, which helps for tests.", "node.unique_id in self._disabled: self._disabled[node.unique_id].append(node) else: self._disabled[node.unique_id] = [node] def add_disabled(self,", "= field(default_factory=FileHash.empty) project_hashes: MutableMapping[str, FileHash] = field(default_factory=dict) @dataclass class Manifest(MacroMethods,", "current = self.nodes.get(unique_id) if current and ( node.resource_type in refables", "description='A mapping from parent nodes to their dependents', )) metadata:", "self.macros = macros self.metadata = ManifestMetadata() # This is returned", "{ n.unique_id: [] for n in nodes if n.unique_id.startswith('macro') or", "None} ) _lock: Lock = field( default_factory=flags.MP_CONTEXT.Lock, metadata={'serialize': lambda x:", "Optional[Callable[[MacroCandidate], bool]] = None if package is not None: def", "not compiled, update it with the new node and return", "Optional[ParsedSourceDefinition] = None for pkg in candidates: source = self.source_lookup.find(key,", "lambda x: None} ) def __pre_serialize__(self): # serialization won't work", "(source.source_name, source.name) if key not in self.storage: self.storage[key] = {}", "Optional[str] nodetypes: List[NodeType] def _matches(self, model: N) -> bool: \"\"\"Return", "not pkg_dct: return None else: return next(iter(pkg_dct.values())) elif package in", "ParsedMacro): if macro.unique_id in self.macros: # detect that the macro", "new_item # This contains macro methods that are in both", "src: Mapping[str, HasUniqueID] ): if value.unique_id in src: raise_duplicate_resource_name(value, src[value.unique_id])", "_deepcopy(value): return value.from_dict(value.to_dict(omit_none=True)) class Locality(enum.IntEnum): Core = 1 Imported =", "a big deal at all and retains consistency def __init__(self,", "- then macros defined in the root project \"\"\" filter:", "= TypeVar('K_T') V_T = TypeVar('V_T') def _expect_value( key: K_T, src:", "selected unique IDs and a writable manifest, update this manifest", "self.perform_lookup(unique_id, manifest) return None def add_node(self, node: ManifestNode): if node.resource_type", "import line_wrap_message from dbt import flags from dbt import tracking", "= source.unique_id def populate(self, manifest): for source in manifest.sources.values(): if", "unique_id not in selected and not adapter.get_relation( current.database, current.schema, current.identifier", "in self.macros.items()}, docs={k: _deepcopy(v) for k, v in self.docs.items()}, exposures={k:", "\"\"\"update the node with a lock. The only time we", "= node.depends_on_nodes[:] for unique_id in node.depends_on_nodes: if unique_id in forward_edges.keys():", "If the existing node is compiled, do not update the", "== model.package_name def search(self, haystack: Iterable[N]) -> Optional[N]: \"\"\"Find an", "result is not None: assert isinstance(result, ParsedSourceDefinition) return result def", "resource_type: NodeType package_name: str @property def search_name(self) -> str: raise", "this is the result :( msg = line_wrap_message( f'''\\ dbt", "{ k: v.to_dict(omit_none=False) for k, v in self.exposures.items() }, 'nodes':", "end of methods formerly in ParseResult # Provide support for", "return resource_fqns # This is called by 'parse_patch' in the", "is disabled if disabled is None: disabled = self.find_disabled_by_name( target_model_name,", "@classmethod def default(cls): return cls( dbt_schema_version=str(WritableManifest.dbt_schema_version), ) def _sort_values(dct): \"\"\"Given", "Specificity @classmethod def from_macro( cls, candidate: MacroCandidate, specificity: Specificity )", "# Build a map of children of macros def build_macro_edges(nodes:", "package: Optional[str] = None ) -> Optional[ManifestNode]: searcher: NameSearcher =", "source if disabled is None: disabled = self.find_disabled_source_by_name( target_source_name, target_table_name,", "in manifest.docs: raise dbt.exceptions.InternalException( f'Doc {unique_id} found in cache but", "by 'parse_patch' in the NodePatchParser def add_patch( self, source_file: SchemaSourceFile,", "self.macros[macro.unique_id].original_file_path # subtract 2 for the \"Compilation Error\" indent #", "self.files, self.metadata, self.flat_graph, self.state_check, self.source_patches, self._disabled, self._doc_lookup, self._source_lookup, self._ref_lookup, )", "the existing node is compiled, do not update the manifest", "def build_node_edges(nodes: List[ManifestNode]): \"\"\"Build the forward and backward edges on", "patch.yaml_key == 'analyses': unique_id = self.analysis_lookup.get_unique_id(patch.name, None) else: raise dbt.exceptions.InternalException(", "CandidateList(List[M]): def last(self) -> Optional[ParsedMacro]: if not self: return None", "update a {new_item.resource_type} to have a new file ' f'path!'", "to # create the initial version of the object and", "schema_version ) from dbt.dataclass_schema import dbtClassMixin from dbt.exceptions import (", "MutableMapping[str, ParsedMacro] = field(default_factory=dict) docs: MutableMapping[str, ParsedDocumentation] = field(default_factory=dict) exposures:", "formerly in ParseResult # Provide support for copy.deepcopy() - we", "if self.name != model.search_name: return False return self.package is None", "source = self.source_lookup.find(key, pkg, self) if source is not None", "for k, v in self.sources.items()}, macros={k: _deepcopy(v) for k, v", "the manifest. If the existing node is not compiled, update", "manifest.sources: raise dbt.exceptions.InternalException( f'Source {unique_id} found in cache but not", "specificity) for m in self._find_macros_by_name(full_name, project_name) ) def find_materialization_macro_by_name( self,", "its dependencies' )) ) docs: Mapping[UniqueID, ParsedDocumentation] = field( metadata=dict(description=(", "flat_graph: Dict[str, Any] = field(default_factory=dict) state_check: ManifestStateCheck = field(default_factory=ManifestStateCheck) #", "their name. \"\"\" # avoid an import cycle from dbt.adapters.factory", "= self.nodes.get(unique_id) if current and ( node.resource_type in refables and", "send anonymous usage statistics' )), ) adapter_type: Optional[str] = field(", "for the object, i.e. the Manifest. # The order of", "current_project: str, node_package: str, target_package: Optional[str] = None, ) ->", "class DocLookup(dbtClassMixin): def __init__(self, manifest: 'Manifest'): self.storage: Dict[str, Dict[PackageName, UniqueID]]", "self) if result is not None: return result return None", "adapter_type: str ) -> Optional[ParsedMacro]: candidates: CandidateList = CandidateList(chain.from_iterable( self._materialization_candidates_for(", "a Manifest. def __reduce_ex__(self, protocol): args = ( self.nodes, self.sources,", "and package name, or None for any package. The root", "usually happen when a node is disabled return # patches", "avoid an import cycle from dbt.adapters.factory import get_adapter_package_names candidates: CandidateList", "# The order of the arguments must match the order", "build_flat_graph(self): \"\"\"This attribute is used in context.common by each node,", "source.name) if key not in self.storage: self.storage[key] = {} self.storage[key][source.package_name]", "of the arguments must match the order of the attributes", "[node] def add_disabled(self, source_file: AnySourceFile, node: CompileResultNode, test_from=None): self.add_disabled_nofile(node) if", "parent_map: Optional[NodeEdgeMap] = field(metadata=dict( description='A mapping from child nodes to", "tracking.active_user.id if self.send_anonymous_usage_stats is None: self.send_anonymous_usage_stats = ( not tracking.active_user.do_not_track", "ManifestMetadata() # This is returned by the 'graph' context property", "GraphMemberNode: if unique_id in self.nodes: return self.nodes[unique_id] elif unique_id in", "for k, v in dct.items()} def build_node_edges(nodes: List[ManifestNode]): \"\"\"Build the", "seed, snapshot _lookup_types: ClassVar[set] = set(NodeType.refable()) # refables are actually", "self.find_disabled_by_name( target_model_name, pkg ) if disabled is not None: return", "macro \"{patch.name}\" ' f'which was not found' ) return if", "regular ones, but ignore imported packages. - if there is", "name {} (packages {} and ' '{}). dbt cannot resolve", "= 0 static_analysis_path_count: int = 0 @dataclass class ManifestStateCheck(dbtClassMixin): vars_hash:", "'The exposures defined in the dbt project and its dependencies'", "field(default_factory=FileHash.empty) project_hashes: MutableMapping[str, FileHash] = field(default_factory=dict) @dataclass class Manifest(MacroMethods, DataClassMessagePackMixin,", "else: self._disabled[node.unique_id] = [node] def add_disabled(self, source_file: AnySourceFile, node: CompileResultNode,", "Optional[PackageName]): return find_unique_id_for_package(self.storage, key, package) def find(self, key, package: Optional[PackageName],", "package, and type. If package is None, any package is", "_update_into(self.nodes, new_node) return new_node def update_exposure(self, new_exposure: ParsedExposure): _update_into(self.exposures, new_exposure)", "type: ignore source_file.sources.append(source.unique_id) def add_node_nofile(self, node: ManifestNodes): # nodes can't", ") return CandidateList( MaterializationCandidate.from_macro(m, specificity) for m in self._find_macros_by_name(full_name, project_name)", "node.replace(deferred=True) # log up to 5 items sample = list(islice(merged,", "manifest'.format(unique_id) ) @property def doc_lookup(self) -> DocLookup: if self._doc_lookup is", "imported macros - then macros defined in the root project", "resource_fqns: Dict[str, Set[Tuple[str, ...]]] = {} all_resources = chain(self.exposures.values(), self.nodes.values(),", "raise dbt.exceptions.RuntimeException( f'got an update_{new_item.resource_type} call with an ' f'unrecognized", ")) ) selectors: Mapping[UniqueID, Any] = field( metadata=dict(description=( 'The selectors", "# This contains macro methods that are in both the", "current_project: str, node_package: str ) -> MaybeParsedSource: key = (target_source_name,", "for pkg in candidates: node = self.ref_lookup.find(target_model_name, pkg, self) if", "candidates: CandidateList = self._find_macros_by_name( name=f'generate_{component}_name', root_project_name=root_project_name, # filter out imported", "{} return self @classmethod def __post_deserialize__(cls, obj): obj._lock = flags.MP_CONTEXT.Lock()", "dbt project and its dependencies' )) ) docs: Mapping[UniqueID, ParsedDocumentation]", "bool: key = source_file.file_id if key is None: return False", "value, but doing 3 dict lookups instead of 1 #", "= Optional[Union[ ParsedSourceDefinition, Disabled[ParsedSourceDefinition], ]] MaybeNonSource = Optional[Union[ ManifestNode, Disabled[ManifestNode]", "else: raise dbt.exceptions.InternalException( f'Unexpected yaml_key {patch.yaml_key} for patch in '", "= tracking.active_user.id if self.send_anonymous_usage_stats is None: self.send_anonymous_usage_stats = ( not", "None} ) _parsing_info: ParsingInfo = field( default_factory=ParsingInfo, metadata={'serialize': lambda x:", "= field(metadata=dict( description='A mapping from parent nodes to their dependents',", "based ' 'on cached file information: {}!' .format(key, name, old_file)", "in resource_fqns: resource_fqns[resource_type_plural] = set() resource_fqns[resource_type_plural].add(tuple(resource.fqn)) return resource_fqns # This", "candidate: MacroCandidate, specificity: Specificity ) -> 'MaterializationCandidate': return cls( locality=candidate.locality,", "-> bool: if not isinstance(other, MacroCandidate): return NotImplemented return self.locality", "dbt_schema_version=str(WritableManifest.dbt_schema_version), ) def _sort_values(dct): \"\"\"Given a dictionary, sort each value.", "MutableMapping[SourceKey, SourcePatch] = field(default_factory=dict) # following is from ParseResult _disabled:", ") def _sort_values(dct): \"\"\"Given a dictionary, sort each value. This", "AnySourceFile] = field(default_factory=dict) metadata: ManifestMetadata = field(default_factory=ManifestMetadata) flat_graph: Dict[str, Any]", "two nodes must have the same original file path. \"\"\"", "{} AnyManifest = Union[Manifest, MacroManifest] @dataclass @schema_version('manifest', 2) class WritableManifest(ArtifactMixin):", "str, ) -> MaybeNonSource: node: Optional[ManifestNode] = None disabled: Optional[ManifestNode]", "instead of 1 # is not a big deal at", "object and a tuple of arguments # for the object,", "# subtract 2 for the \"Compilation Error\" indent # note", "existing_file_path) source_file.append_patch(patch.yaml_key, unique_id) node.patch(patch) def add_macro_patch( self, source_file: SchemaSourceFile, patch:", "project \"\"\" def filter(candidate: MacroCandidate) -> bool: return candidate.locality !=", "MaybeNonSource: node: Optional[ManifestNode] = None disabled: Optional[ManifestNode] = None candidates", "manifest.docs.values(): self.add_doc(doc) def perform_lookup( self, unique_id: UniqueID, manifest ) ->", "ArtifactMixin, schema_version ) from dbt.dataclass_schema import dbtClassMixin from dbt.exceptions import", "return _sort_values(forward_edges), _sort_values(backward_edges) # Build a map of children of", "macros are similar to regular ones, but ignore imported packages.", "each node, so we want to only build it once", "docs are always enabled. \"\"\" candidates = _search_packages( current_project, node_package,", "project_name: str, materialization_name: str, adapter_type: str ) -> Optional[ParsedMacro]: candidates:", "adapter_type: Optional[str] = field( default=None, metadata=dict(description='The type name of the", "= source # type: ignore source_file.sources.append(source.unique_id) def add_node_nofile(self, node: ManifestNodes):", "not None and node.config.enabled: return node # it's possible that", "str def find_unique_id_for_package(storage, key, package: Optional[PackageName]): if key not in", "key, package) def find(self, key, package: Optional[PackageName], manifest: 'Manifest'): unique_id", "# serialization won't work with anything except an empty source_patches", "import ( CompileResultNode, ManifestNode, NonSourceCompiledNode, GraphMemberNode ) from dbt.contracts.graph.parsed import", "the # args tuple in the right position. nodes: MutableMapping[str,", "if macro.patch_path: package_name, existing_file_path = macro.patch_path.split('://') raise_duplicate_macro_patch_name(patch, existing_file_path) source_file.macro_patches[patch.name] =", "raise_compiler_error(msg) self.macros[macro.unique_id] = macro source_file.macros.append(macro.unique_id) def has_file(self, source_file: SourceFile) ->", "call this until you're done with building your manifest! \"\"\"", "in candidates: result = self.doc_lookup.find(name, pkg, self) if result is", "class MacroManifest(MacroMethods): def __init__(self, macros): self.macros = macros self.metadata =", "= forward_edges self.parent_map = backward_edges def build_macro_child_map(self): edge_members = list(chain(", "manifest): for node in manifest.nodes.values(): self.add_node(node) def perform_lookup( self, unique_id:", "self, component: str, root_project_name: str ) -> Optional[ParsedMacro]: \"\"\" The", "None: # This will usually happen when a node is", "update it with the new node and return that. If", "build_macro_edges(nodes: List[Any]): forward_edges: Dict[str, List[str]] = { n.unique_id: [] for", "{ k: v.to_dict(omit_none=False) for k, v in self.sources.items() } }", "def build_flat_graph(self): \"\"\"This attribute is used in context.common by each", "def __post_init__(self): if tracking.active_user is None: return if self.user_id is", "if node: if node.patch_path: package_name, existing_file_path = node.patch_path.split('://') raise_duplicate_patch_name(patch, existing_file_path)", "existing_file_path) source_file.macro_patches[patch.name] = unique_id macro.patch(patch) def add_source_patch( self, source_file: SchemaSourceFile,", "_sort_values(forward_edges) def _deepcopy(value): return value.from_dict(value.to_dict(omit_none=True)) class Locality(enum.IntEnum): Core = 1", "return False return False @dataclass class MaterializationCandidate(MacroCandidate): specificity: Specificity @classmethod", "class Disabled(Generic[D]): target: D MaybeDocumentation = Optional[ParsedDocumentation] MaybeParsedSource = Optional[Union[", "Dict[str, List[str]] = {} # pre-populate the forward edge dict", "next(iter(pkg_dct.values())) elif package in pkg_dct: return pkg_dct[package] else: return None", "def add_patch( self, source_file: SchemaSourceFile, patch: ParsedNodePatch, ) -> None:", "N = TypeVar('N', bound=Searchable) @dataclass class NameSearcher(Generic[N]): name: str package:", "'deserialize': lambda x: None} ) def __pre_serialize__(self): # serialization won't", "k, v in self.exposures.items() }, 'nodes': { k: v.to_dict(omit_none=False) for", "lambda x: None, 'deserialize': lambda x: None} ) _lock: Lock", "dbt import tracking import dbt.utils NodeEdgeMap = Dict[str, List[str]] PackageName", "manifest): for source in manifest.sources.values(): if hasattr(source, 'source_name'): self.add_source(source) def", "when a node is disabled return # patches can't be", "= build_macro_edges(edge_members) return forward_edges def writable_manifest(self): self.build_parent_and_child_maps() return WritableManifest( nodes=self.nodes,", "def resolve_source( self, target_source_name: str, target_table_name: str, current_project: str, node_package:", "import enum from dataclasses import dataclass, field from itertools import", "import PathSet from dbt.logger import GLOBAL_LOGGER as logger from dbt.node_types", "- {macro.original_file_path} - {other_path} ''', subtract=2 ) raise_compiler_error(msg) self.macros[macro.unique_id] =", "# type: ignore source_file.sources.append(source.unique_id) def add_node_nofile(self, node: ManifestNodes): # nodes", "Optional[ParsedMacro]: if not self: return None self.sort() return self[-1].macro def", "node.patch_path.split('://') raise_duplicate_patch_name(patch, existing_file_path) source_file.append_patch(patch.yaml_key, unique_id) node.patch(patch) def add_macro_patch( self, source_file:", "ensure it's empty self.source_patches = {} return self @classmethod def", "class WritableManifest(ArtifactMixin): nodes: Mapping[UniqueID, ManifestNode] = field( metadata=dict(description=( 'The nodes", "doing 3 dict lookups instead of 1 # is not", "Optional[str] = field( default=None, metadata=dict(description='The type name of the adapter'),", "= 3 class Specificity(enum.IntEnum): Default = 1 Adapter = 2", "at dest[new_item.unique_id] with new_itme. There must be an existing value", "\"\"\" # avoid an import cycle from dbt.adapters.factory import get_adapter_package_names", "MaterializationCandidate): return NotImplemented equal = ( self.specificity == other.specificity and", "is None: disabled = self.find_disabled_by_name( target_model_name, pkg ) if disabled", "find_disabled_source_by_name( self, source_name: str, table_name: str, package: Optional[str] = None", "metadata: ManifestMetadata = field(metadata=dict( description='Metadata about the manifest', )) def", "disabled=self.disabled, child_map=self.child_map, parent_map=self.parent_map, ) def write(self, path): self.writable_manifest().write(path) # Called", "container of NodeTypes that implements the 'in' operator. \"\"\" if", "x: None, 'deserialize': lambda x: None} ) _parsing_info: ParsingInfo =", "filter=filter, ) return candidates.last() def find_generate_macro_by_name( self, component: str, root_project_name:", "and a writable manifest, update this manifest by replacing any", "of the disabled nodes in the target' )) parent_map: Optional[NodeEdgeMap]", "that the line wrap eats newlines, so if you want", "= storage[key] if package is None: if not pkg_dct: return", "and backward edges on the given list of ParsedNodes and", "= f'macro.{patch.package_name}.{patch.name}' macro = self.macros.get(unique_id) if not macro: warn_or_error( f'WARNING:", "raise_duplicate_macro_patch_name, raise_duplicate_source_patch_name, ) from dbt.helper_types import PathSet from dbt.logger import", "ManifestNode, Disabled[ManifestNode] ]] T = TypeVar('T', bound=GraphMemberNode) def _update_into(dest: MutableMapping[str,", "_deepcopy(v) for k, v in self.sources.items()}, macros={k: _deepcopy(v) for k,", "in self.disabled], files={k: _deepcopy(v) for k, v in self.files.items()}, state_check=_deepcopy(self.state_check),", "None if package is not None: def filter(candidate: MacroCandidate) ->", "self.ref_lookup.find(target_model_name, pkg, self) if node is not None and node.config.enabled:", "None def add_doc(self, doc: ParsedDocumentation): if doc.name not in self.storage:", "for k, v in self.sources.items() } } def find_disabled_by_name( self,", "are always enabled. \"\"\" candidates = _search_packages( current_project, node_package, package", "self.exposures, self.selectors, self.disabled, self.files, self.metadata, self.flat_graph, self.state_check, self.source_patches, self._disabled, self._doc_lookup,", "Dict, List, Optional, Union, Mapping, MutableMapping, Any, Set, Tuple, TypeVar,", "in ' f'file {source_file.path.original_file_path}' ) if unique_id is None: #", "search_name(self) -> str: raise NotImplementedError('search_name not implemented') N = TypeVar('N',", ")) child_map: Optional[NodeEdgeMap] = field(metadata=dict( description='A mapping from parent nodes", "from dataclasses import dataclass, field from itertools import chain, islice", "node: CompileResultNode, test_from=None): self.add_disabled_nofile(node) if isinstance(source_file, SchemaSourceFile): assert test_from source_file.add_test(node.unique_id,", "two separate dictionaries, each mapping unique IDs to lists of", "} for node in nodes: for unique_id in node.depends_on.macros: if", "source_file: AnySourceFile, node: ManifestNodes, test_from=None): self.add_node_nofile(node) if isinstance(source_file, SchemaSourceFile): assert", "refables = set(NodeType.refable()) merged = set() for unique_id, node in", "Dict[str, List[str]] = { n.unique_id: [] for n in nodes", "= _search_packages( current_project, node_package, package ) for pkg in candidates:", "macros are fully namespaced unique_id = f'macro.{patch.package_name}.{patch.name}' macro = self.macros.get(unique_id)", "out imported packages filter=filter, ) return candidates.last() def _find_macros_by_name( self,", "value.from_dict(value.to_dict(omit_none=True)) class Locality(enum.IntEnum): Core = 1 Imported = 2 Root", "= set(NodeType.refable()) merged = set() for unique_id, node in other.nodes.items():", "T = TypeVar('T', bound=GraphMemberNode) def _update_into(dest: MutableMapping[str, T], new_item: T):", "patch: SourcePatch, ) -> None: # source patches must be", "chain(self.nodes.values(), self.sources.values()) ) # This is used in dbt.task.rpc.sql_commands 'add_new_refs'", "new_itme. There must be an existing value to overwrite, and", "current_project, node_package, target_model_package ) for pkg in candidates: node =", "macro: ParsedMacro def __eq__(self, other: object) -> bool: if not", "in self.nodes.items() }, 'sources': { k: v.to_dict(omit_none=False) for k, v", "self.storage[doc.name] = {} self.storage[doc.name][doc.package_name] = doc.unique_id def populate(self, manifest): for", "package) def find(self, key, package: Optional[PackageName], manifest: 'Manifest'): unique_id =", "raise_duplicate_source_patch_name, ) from dbt.helper_types import PathSet from dbt.logger import GLOBAL_LOGGER", "Root = 3 class Specificity(enum.IntEnum): Default = 1 Adapter =", "forward_edges: Dict[str, List[str]] = { n.unique_id: [] for n in", "candidates.last() def find_generate_macro_by_name( self, component: str, root_project_name: str ) ->", "manifest. If the existing node is not compiled, update it", ") -> 'MaterializationCandidate': return cls( locality=candidate.locality, macro=candidate.macro, specificity=specificity, ) def", "'add_new_refs' def deepcopy(self): return Manifest( nodes={k: _deepcopy(v) for k, v", "-> List[Optional[str]]: if target_package is not None: return [target_package] elif", "-> MaybeParsedSource: key = (target_source_name, target_table_name) candidates = _search_packages(current_project, node_package)", "if key is None: return False if key not in", "[current_project, None] else: return [current_project, node_package, None] @dataclass class ManifestMetadata(BaseArtifactMetadata):", "name and package name, or None for any package. The", "object) -> bool: if not isinstance(other, MacroCandidate): return NotImplemented return", "consistent view of the manifest. If the existing node is", "is compiled, do not update the manifest and return the", "= self.macros.get(unique_id) if not macro: warn_or_error( f'WARNING: Found documentation for", "the dbt project and its dependencies' )) ) exposures: Mapping[UniqueID,", "return result def find_disabled_source_by_name( self, source_name: str, table_name: str, package:", "\"\"\"This attribute is used in context.common by each node, so", "= Specificity.Default else: specificity = Specificity.Adapter full_name = dbt.utils.get_materialization_macro_name( materialization_name=materialization_name,", "in cached \"result.{}\" based ' 'on cached file information: {}!'", "not self: return None self.sort() return self[-1].macro def _get_locality( macro:", "not None: return self.perform_lookup(unique_id, manifest) return None def add_source(self, source:", "NameSearcher(Generic[N]): name: str package: Optional[str] nodetypes: List[NodeType] def _matches(self, model:", "TypeVar('V_T') def _expect_value( key: K_T, src: Mapping[K_T, V_T], old_file: SourceFile,", "ParsedExposure] = field(default_factory=dict) selectors: MutableMapping[str, Any] = field(default_factory=dict) disabled: List[CompileResultNode]", "def get_used_databases(self): return frozenset( x.database for x in chain(self.nodes.values(), self.sources.values())", "macros={k: _deepcopy(v) for k, v in self.macros.items()}, docs={k: _deepcopy(v) for", "in self.exposures.items()}, selectors={k: _deepcopy(v) for k, v in self.selectors.items()}, metadata=self.metadata,", "'The macros defined in the dbt project and its dependencies'", "selected and not adapter.get_relation( current.database, current.schema, current.identifier ) ): merged.add(unique_id)", "methods formerly in ParseResult # Provide support for copy.deepcopy() -", "# This is returned by the 'graph' context property #", "HasUniqueID, src: Mapping[str, HasUniqueID] ): if value.unique_id in src: raise_duplicate_resource_name(value,", "dbt.utils.get_materialization_macro_name( materialization_name=materialization_name, adapter_type=adapter_type, with_prefix=False, ) return CandidateList( MaterializationCandidate.from_macro(m, specificity) for", "class NameSearcher(Generic[N]): name: str package: Optional[str] nodetypes: List[NodeType] def _matches(self,", "method in the # args tuple in the right position.", "retains consistency def __init__(self, manifest: 'Manifest'): self.storage: Dict[str, Dict[PackageName, UniqueID]]", "@dataclass class ManifestMetadata(BaseArtifactMetadata): \"\"\"Metadata for the manifest.\"\"\" dbt_schema_version: str =", "name: str, root_project_name: str, package: Optional[str] ) -> Optional[ParsedMacro]: \"\"\"Find", "source_lookup(self) -> SourceLookup: if self._source_lookup is None: self._source_lookup = SourceLookup(self)", "= field(default_factory=dict) docs: MutableMapping[str, ParsedDocumentation] = field(default_factory=dict) exposures: MutableMapping[str, ParsedExposure]", "True if self.specificity > other.specificity: return False if self.locality <", "manifest ) -> ManifestNode: if unique_id not in manifest.nodes: raise", "-> Optional[ParsedSourceDefinition]: search_name = f'{source_name}.{table_name}' searcher: NameSearcher = NameSearcher( search_name,", "return pkg_dct[package] else: return None class DocLookup(dbtClassMixin): def __init__(self, manifest:", "class MacroMethods: # Just to make mypy happy. There must", "Optional[PackageName]): if key not in storage: return None pkg_dct: Mapping[PackageName,", "cls( dbt_schema_version=str(WritableManifest.dbt_schema_version), ) def _sort_values(dct): \"\"\"Given a dictionary, sort each", "package is allowed. nodetypes should be a container of NodeTypes", "backward_edges[node.unique_id] = node.depends_on_nodes[:] for unique_id in node.depends_on_nodes: if unique_id in", "for copy.deepcopy() - we just need to avoid the lock!", "disabled=[_deepcopy(n) for n in self.disabled], files={k: _deepcopy(v) for k, v", "the forward and backward edges on the given list of", "snapshot _lookup_types: ClassVar[set] = set(NodeType.refable()) # refables are actually unique,", "self.exposures.items()}, selectors={k: _deepcopy(v) for k, v in self.selectors.items()}, metadata=self.metadata, disabled=[_deepcopy(n)", "Dict[str, Set[Tuple[str, ...]]] = {} all_resources = chain(self.exposures.values(), self.nodes.values(), self.sources.values())", "from the ParseResult object source_patches: MutableMapping[SourceKey, SourcePatch] = field(default_factory=dict) #", "nodes defined in the dbt project and its dependencies' ))", "ManifestNodes, test_from=None): self.add_node_nofile(node) if isinstance(source_file, SchemaSourceFile): assert test_from source_file.add_test(node.unique_id, test_from)", "dbt.contracts.util import ( BaseArtifactMetadata, SourceKey, ArtifactMixin, schema_version ) from dbt.dataclass_schema", "other: object) -> bool: if not isinstance(other, MacroCandidate): return NotImplemented", "} } def find_disabled_by_name( self, name: str, package: Optional[str] =", "which helps for tests. \"\"\" return {k: sorted(v) for k,", "sources: Mapping[UniqueID, ParsedSourceDefinition] = field( metadata=dict(description=( 'The sources defined in", "happened raise dbt.exceptions.InternalException( 'Expected node {} not found in manifest'.format(unique_id)", "if not isinstance(other, MaterializationCandidate): return NotImplemented if self.specificity < other.specificity:", "dependency, and we want them to have a consistent view", "v.to_dict(omit_none=False) for k, v in self.nodes.items() }, 'sources': { k:", "self.macros = [] self.metadata = {} def find_macro_by_name( self, name:", "# Called by RunTask.defer_to_manifest def merge_from_artifact( self, adapter, other: 'WritableManifest',", "a container of NodeTypes that implements the 'in' operator. \"\"\"", "FileHash] = field(default_factory=dict) @dataclass class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin): \"\"\"The manifest", "\"\"\" with self._lock: existing = self.nodes[new_node.unique_id] if getattr(existing, 'compiled', False):", "if node.unique_id in self._disabled: self._disabled[node.unique_id].append(node) else: self._disabled[node.unique_id] = [node] def", "and retains consistency def __init__(self, manifest: 'Manifest'): self.storage: Dict[str, Dict[PackageName,", "field( default_factory=lambda: str(WritableManifest.dbt_schema_version) ) project_id: Optional[str] = field( default=None, metadata={", "work with anything except an empty source_patches because # tuple", "key not in storage: return None pkg_dct: Mapping[PackageName, UniqueID] =", "def update_exposure(self, new_exposure: ParsedExposure): _update_into(self.exposures, new_exposure) def update_node(self, new_node: ManifestNode):", "order of the attributes # in the Manifest class declaration,", "in the Manifest class declaration, because they are used as", "BaseArtifactMetadata, SourceKey, ArtifactMixin, schema_version ) from dbt.dataclass_schema import dbtClassMixin from", "for unique_id in node.depends_on.macros: if unique_id in forward_edges.keys(): forward_edges[unique_id].append(node.unique_id) return", "of 1 # is not a big deal at all", "# macros are fully namespaced unique_id = f'macro.{patch.package_name}.{patch.name}' macro =", "= SourceLookup(self) return self._source_lookup def rebuild_source_lookup(self): self._source_lookup = SourceLookup(self) @property", "str RefName = str UniqueID = str def find_unique_id_for_package(storage, key,", "copy.deepcopy() - we just need to avoid the lock! #", "unique_id in forward_edges.keys(): forward_edges[unique_id].append(node.unique_id) return _sort_values(forward_edges), _sort_values(backward_edges) # Build a", "self.nodes[node.unique_id] = node def add_node(self, source_file: AnySourceFile, node: ManifestNodes, test_from=None):", "def perform_lookup( self, unique_id: UniqueID, manifest ) -> ParsedDocumentation: if", "if disabled is not None: return Disabled(disabled) return None #", "None ) -> Optional[ManifestNode]: searcher: NameSearcher = NameSearcher( name, package,", "by its name and package name, or None for any", "= None ) -> CandidateList: \"\"\"Find macros by their name.", "return cls( dbt_schema_version=str(WritableManifest.dbt_schema_version), ) def _sort_values(dct): \"\"\"Given a dictionary, sort", "'sources': { k: v.to_dict(omit_none=False) for k, v in self.sources.items() }", "candidates.last() def get_resource_fqns(self) -> Mapping[str, PathSet]: resource_fqns: Dict[str, Set[Tuple[str, ...]]]", "unique_id in self.nodes: return self.nodes[unique_id] elif unique_id in self.sources: return", "def _deepcopy(value): return value.from_dict(value.to_dict(omit_none=True)) class Locality(enum.IntEnum): Core = 1 Imported", "filter(candidate: MacroCandidate) -> bool: return package == candidate.macro.package_name candidates: CandidateList", "UniqueID, manifest ) -> ManifestNode: if unique_id not in manifest.nodes:", "add_source_patch( self, source_file: SchemaSourceFile, patch: SourcePatch, ) -> None: #", "get_used_schemas(self, resource_types=None): return frozenset({ (node.database, node.schema) for node in chain(self.nodes.values(),", "-> Optional[N]: \"\"\"Find an entry in the given iterable by", "exposures=self.exposures, selectors=self.selectors, metadata=self.metadata, disabled=self.disabled, child_map=self.child_map, parent_map=self.parent_map, ) def write(self, path):", "so ensure it's empty self.source_patches = {} return self @classmethod", "None, ) -> List[Optional[str]]: if target_package is not None: return", "isinstance(result, ParsedSourceDefinition) return result def _materialization_candidates_for( self, project_name: str, materialization_name:", "in self.source_patches: raise_duplicate_source_patch_name(patch, self.source_patches[key]) self.source_patches[key] = patch source_file.source_patches.append(key) def get_used_schemas(self,", "is not a big deal at all and retains consistency", "assert test_from source_file.add_test(node.unique_id, test_from) else: source_file.nodes.append(node.unique_id) def add_doc(self, source_file: SourceFile,", "{n.unique_id: [] for n in nodes} for node in nodes:", "the 'in' operator. \"\"\" if model.resource_type not in self.nodetypes: return", "dict for simplicity forward_edges: Dict[str, List[str]] = {n.unique_id: [] for", "self.nodes.items()}, sources={k: _deepcopy(v) for k, v in self.sources.items()}, macros={k: _deepcopy(v)", "macros defined in the root project \"\"\" filter: Optional[Callable[[MacroCandidate], bool]]", "in self.files.items()}, state_check=_deepcopy(self.state_check), ) def build_parent_and_child_maps(self): edge_members = list(chain( self.nodes.values(),", "use this. It returns a callable object used to #", "_check_duplicates( value: HasUniqueID, src: Mapping[str, HasUniqueID] ): if value.unique_id in", "with the name {} (packages {} and ' '{}). dbt", "or filter(candidate): candidates.append(candidate) return candidates @dataclass class ParsingInfo: static_analysis_parsed_path_count: int", "v in self.macros.items()}, docs={k: _deepcopy(v) for k, v in self.docs.items()},", "self.writable_manifest().write(path) # Called in dbt.compilation.Linker.write_graph and # dbt.graph.queue.get and ._include_in_cost", ") _source_lookup: Optional[SourceLookup] = field( default=None, metadata={'serialize': lambda x: None,", "is not None: return self.perform_lookup(unique_id, manifest) return None def add_doc(self,", "current_project: str, node_package: str, ) -> MaybeNonSource: node: Optional[ManifestNode] =", "= ( self.specificity == other.specificity and self.locality == other.locality )", "callable object used to # create the initial version of", "None, 'deserialize': lambda x: None} ) _analysis_lookup: Optional[AnalysisLookup] = field(", "self._ref_lookup, ) return self.__class__, args class MacroManifest(MacroMethods): def __init__(self, macros):", "List[str]] PackageName = str DocName = str RefName = str", "until you're done with building your manifest! \"\"\" self.flat_graph =", "disabled return # patches can't be overwritten node = self.nodes.get(unique_id)", "{} # pre-populate the forward edge dict for simplicity forward_edges:", "_lookup_types: ClassVar[set] = set(NodeType.refable()) # refables are actually unique, so", "ManifestStateCheck = field(default_factory=ManifestStateCheck) # Moved from the ParseResult object source_patches:", "raise_duplicate_patch_name(patch, existing_file_path) source_file.append_patch(patch.yaml_key, unique_id) node.patch(patch) def add_macro_patch( self, source_file: SchemaSourceFile,", "node = self.ref_lookup.find(target_model_name, pkg, self) if node is not None", ") docs: Mapping[UniqueID, ParsedDocumentation] = field( metadata=dict(description=( 'The docs defined", "raise dbt.exceptions.RuntimeException( f'cannot update a {new_item.resource_type} to have a new", "current.identifier ) ): merged.add(unique_id) self.nodes[unique_id] = node.replace(deferred=True) # log up", "unique_id in self.sources: return self.sources[unique_id] elif unique_id in self.exposures: return", "implemented') N = TypeVar('N', bound=Searchable) @dataclass class NameSearcher(Generic[N]): name: str", "not in selected and not adapter.get_relation( current.database, current.schema, current.identifier )", "arguments must match the order of the attributes # in", "'The docs defined in the dbt project and its dependencies'", "not None: return [target_package] elif current_project == node_package: return [current_project,", "candidates: CandidateList = CandidateList() packages = set(get_adapter_package_names(self.metadata.adapter_type)) for unique_id, macro", "pkg in candidates: node = self.ref_lookup.find(target_model_name, pkg, self) if node", "the existing node. \"\"\" with self._lock: existing = self.nodes[new_node.unique_id] if", "f'{source_name}.{table_name}' searcher: NameSearcher = NameSearcher( search_name, package, [NodeType.Source] ) result", "this ambiguity' .format(self.macro.name, self.macro.package_name, other.macro.package_name) ) return equal def __lt__(self,", "''', subtract=2 ) raise_compiler_error(msg) self.macros[macro.unique_id] = macro source_file.macros.append(macro.unique_id) def has_file(self,", "match the order of the attributes # in the Manifest", "overwrite whatever is at dest[new_item.unique_id] with new_itme. There must be", "package_name, existing_file_path = node.patch_path.split('://') raise_duplicate_patch_name(patch, existing_file_path) source_file.append_patch(patch.yaml_key, unique_id) node.patch(patch) def", "value: HasUniqueID, src: Mapping[str, HasUniqueID] ): if value.unique_id in src:", "str, node_package: str, ) -> MaybeNonSource: node: Optional[ManifestNode] = None", "f'Merged {len(merged)} items from state (sample: {sample})' ) # Methods", "Union, Mapping, MutableMapping, Any, Set, Tuple, TypeVar, Callable, Iterable, Generic,", "docs={k: _deepcopy(v) for k, v in self.docs.items()}, exposures={k: _deepcopy(v) for", "docs defined in the dbt project and its dependencies' ))", "the project', }, ) user_id: Optional[UUID] = field( default=None, metadata={", "candidate.macro.package_name candidates: CandidateList = self._find_macros_by_name( name=name, root_project_name=root_project_name, filter=filter, ) return", ") -> None: if patch.yaml_key in ['models', 'seeds', 'snapshots']: unique_id", "from typing import ( Dict, List, Optional, Union, Mapping, MutableMapping,", "in dest: raise dbt.exceptions.RuntimeException( f'got an update_{new_item.resource_type} call with an", "compilation. \"\"\" # These attributes are both positional and by", ")) forward_edges, backward_edges = build_node_edges(edge_members) self.child_map = forward_edges self.parent_map =", "self.source_patches, self._disabled, self._doc_lookup, self._source_lookup, self._ref_lookup, ) return self.__class__, args class", "the `generate_{component}_name` macro from the 'dbt' internal project \"\"\" def", "dependencies', )) child_map: Optional[NodeEdgeMap] = field(metadata=dict( description='A mapping from parent", "not in self.storage: self.storage[node.name] = {} self.storage[node.name][node.package_name] = node.unique_id def", "ParsedMacro] = field( metadata=dict(description=( 'The macros defined in the dbt", "ignore imported packages. - if there is a `generate_{component}_name` macro", "in the given iterable by name.\"\"\" for model in haystack:", "UniqueID]] = {} self.populate(manifest) def get_unique_id(self, key, package: Optional[PackageName]): return", ")) parent_map: Optional[NodeEdgeMap] = field(metadata=dict( description='A mapping from child nodes", "\"\"\" filter: Optional[Callable[[MacroCandidate], bool]] = None if package is not", "manifest.nodes[unique_id] class AnalysisLookup(RefableLookup): _lookup_types: ClassVar[set] = set(NodeType.Analysis) def _search_packages( current_project:", "implements the 'in' operator. \"\"\" if model.resource_type not in self.nodetypes:", "build_parent_and_child_maps(self): edge_members = list(chain( self.nodes.values(), self.sources.values(), self.exposures.values(), )) forward_edges, backward_edges", ") if disabled is not None: return Disabled(disabled) return None", "metadata=dict(description=( 'The exposures defined in the dbt project and its", "field( default_factory=flags.MP_CONTEXT.Lock, metadata={'serialize': lambda x: None, 'deserialize': lambda x: None}", "set(NodeType.refable()) # refables are actually unique, so the Dict[PackageName, UniqueID]", "return {k: sorted(v) for k, v in dct.items()} def build_node_edges(nodes:", "node is disabled return # patches can't be overwritten node", "-> must be a NonSourceCompiledNode return cast(NonSourceCompiledNode, existing) _update_into(self.nodes, new_node)", "exposures={k: _deepcopy(v) for k, v in self.exposures.items()}, selectors={k: _deepcopy(v) for", "sources=self.sources, macros=self.macros, docs=self.docs, exposures=self.exposures, selectors=self.selectors, metadata=self.metadata, disabled=self.disabled, child_map=self.child_map, parent_map=self.parent_map, )", "not isinstance(other, MaterializationCandidate): return NotImplemented if self.specificity < other.specificity: return", "MutableMapping[str, ParsedSourceDefinition] = field(default_factory=dict) macros: MutableMapping[str, ParsedMacro] = field(default_factory=dict) docs:", "for n in self.disabled], files={k: _deepcopy(v) for k, v in", "Optional[ParsedDocumentation]: \"\"\"Resolve the given documentation. This follows the same algorithm", "lambda x: None} ) _analysis_lookup: Optional[AnalysisLookup] = field( default=None, metadata={'serialize':", "node_package) source: Optional[ParsedSourceDefinition] = None disabled: Optional[ParsedSourceDefinition] = None for", "source_file.source_patches.append(key) def get_used_schemas(self, resource_types=None): return frozenset({ (node.database, node.schema) for node", "import SourcePatch from dbt.contracts.files import SourceFile, SchemaSourceFile, FileHash, AnySourceFile from", "is not None: return Disabled(disabled) return None # Called by", "field from itertools import chain, islice from mashumaro import DataClassMessagePackMixin", "during compilation. \"\"\" # These attributes are both positional and", "unique_id is None: # This will usually happen when a", "a callable object used to # create the initial version", "project and its dependencies' )) ) macros: Mapping[UniqueID, ParsedMacro] =", "_expect_value( key: K_T, src: Mapping[K_T, V_T], old_file: SourceFile, name: str", "merged.add(unique_id) self.nodes[unique_id] = node.replace(deferred=True) # log up to 5 items", "DocLookup: if self._doc_lookup is None: self._doc_lookup = DocLookup(self) return self._doc_lookup", "None] @dataclass class ManifestMetadata(BaseArtifactMetadata): \"\"\"Metadata for the manifest.\"\"\" dbt_schema_version: str", "self._source_lookup = SourceLookup(self) return self._source_lookup def rebuild_source_lookup(self): self._source_lookup = SourceLookup(self)", "None: disabled = self.find_disabled_source_by_name( target_source_name, target_table_name, pkg ) if disabled", "# and dbt.parser.manifest._process_refs_for_node def resolve_ref( self, target_model_name: str, target_model_package: Optional[str],", "( node.resource_type in refables and not node.is_ephemeral and unique_id not", "0 @dataclass class ManifestStateCheck(dbtClassMixin): vars_hash: FileHash = field(default_factory=FileHash.empty) profile_hash: FileHash", "set(NodeType.Analysis) def _search_packages( current_project: str, node_package: str, target_package: Optional[str] =", "= field(default_factory=list) files: MutableMapping[str, AnySourceFile] = field(default_factory=dict) metadata: ManifestMetadata =", "self.selectors, self.disabled, self.files, self.metadata, self.flat_graph, self.state_check, self.source_patches, self._disabled, self._doc_lookup, self._source_lookup,", "it's possible that the node is disabled if disabled is", "is returned by the 'graph' context property # in the", "is allowed. nodetypes should be a container of NodeTypes that", "result = self.doc_lookup.find(name, pkg, self) if result is not None:", "None candidates = _search_packages( current_project, node_package, target_model_package ) for pkg", "self._analysis_lookup is None: self._analysis_lookup = AnalysisLookup(self) return self._analysis_lookup # Called", "None # Called by dbt.parser.manifest._resolve_sources_for_exposure # and dbt.parser.manifest._process_source_for_node def resolve_source(", "'Manifest' ) -> ParsedSourceDefinition: if unique_id not in manifest.sources: raise", "for atype in (adapter_type, None) )) return candidates.last() def get_resource_fqns(self)", ") return equal def __lt__(self, other: object) -> bool: if", ":( msg = line_wrap_message( f'''\\ dbt found two macros named", ") result = searcher.search(self.disabled) return result def find_disabled_source_by_name( self, source_name:", "its dependencies' )) ) macros: Mapping[UniqueID, ParsedMacro] = field( metadata=dict(description=(", "as docs are always enabled. \"\"\" candidates = _search_packages( current_project,", "v in self.selectors.items()}, metadata=self.metadata, disabled=[_deepcopy(n) for n in self.disabled], files={k:", "for m in self._find_macros_by_name(full_name, project_name) ) def find_materialization_macro_by_name( self, project_name:", "dbt.exceptions.RuntimeException( f'got an update_{new_item.resource_type} call with an ' f'unrecognized {new_item.resource_type}:", "return candidates.last() def get_resource_fqns(self) -> Mapping[str, PathSet]: resource_fqns: Dict[str, Set[Tuple[str,", "not in self.nodetypes: return False if self.name != model.search_name: return", "node: if node.patch_path: package_name, existing_file_path = node.patch_path.split('://') raise_duplicate_patch_name(patch, existing_file_path) source_file.append_patch(patch.yaml_key,", "return self.exposures[unique_id] else: # something terrible has happened raise dbt.exceptions.InternalException(", "Dict[Tuple[str, str], Dict[PackageName, UniqueID]] = {} self.populate(manifest) def get_unique_id(self, key,", "source_file: SchemaSourceFile, patch: SourcePatch, ) -> None: # source patches", "then macros defined in the root project \"\"\" filter: Optional[Callable[[MacroCandidate],", "candidates: CandidateList = self._find_macros_by_name( name=name, root_project_name=root_project_name, filter=filter, ) return candidates.last()", ") return candidates.last() def find_generate_macro_by_name( self, component: str, root_project_name: str", "dest[unique_id] = new_item # This contains macro methods that are", "metadata: ManifestMetadata = field(default_factory=ManifestMetadata) flat_graph: Dict[str, Any] = field(default_factory=dict) state_check:", ") -> MaybeNonSource: node: Optional[ManifestNode] = None disabled: Optional[ManifestNode] =", "by the 'graph' context property # in the ProviderContext class.", "raise_duplicate_resource_name, raise_compiler_error, warn_or_error, raise_duplicate_patch_name, raise_duplicate_macro_patch_name, raise_duplicate_source_patch_name, ) from dbt.helper_types import", "str) -> GraphMemberNode: if unique_id in self.nodes: return self.nodes[unique_id] elif", "None and node.config.enabled: return node # it's possible that the", "field(default_factory=dict) # following is from ParseResult _disabled: MutableMapping[str, List[CompileResultNode]] =", "find_generate_macro_by_name( self, component: str, root_project_name: str ) -> Optional[ParsedMacro]: \"\"\"", "node = self.nodes.get(unique_id) if node: if node.patch_path: package_name, existing_file_path =", "= self.ref_lookup.get_unique_id(patch.name, None) elif patch.yaml_key == 'analyses': unique_id = self.analysis_lookup.get_unique_id(patch.name,", "def search_name(self) -> str: raise NotImplementedError('search_name not implemented') N =", "str package: Optional[str] nodetypes: List[NodeType] def _matches(self, model: N) ->", "following is from ParseResult _disabled: MutableMapping[str, List[CompileResultNode]] = field(default_factory=dict) _doc_lookup:", "return my_checksum == source_file.checksum def add_source( self, source_file: SchemaSourceFile, source:", "'The nodes defined in the dbt project and its dependencies'", "n.unique_id.startswith('macro') or n.depends_on.macros } for node in nodes: for unique_id", "Any] = field(default_factory=dict) state_check: ManifestStateCheck = field(default_factory=ManifestStateCheck) # Moved from", "forward_edges = build_macro_edges(edge_members) return forward_edges def writable_manifest(self): self.build_parent_and_child_maps() return WritableManifest(", "find_materialization_macro_by_name( self, project_name: str, materialization_name: str, adapter_type: str ) ->", "Optional[str], current_project: str, node_package: str, ) -> MaybeNonSource: node: Optional[ManifestNode]", "self._disabled[node.unique_id].append(node) else: self._disabled[node.unique_id] = [node] def add_disabled(self, source_file: AnySourceFile, node:", "doc: ParsedDocumentation): _check_duplicates(doc, self.docs) self.docs[doc.unique_id] = doc source_file.docs.append(doc.unique_id) # end", "expect(self, unique_id: str) -> GraphMemberNode: if unique_id in self.nodes: return", "\"\"\" refables = set(NodeType.refable()) merged = set() for unique_id, node", "if value.unique_id in src: raise_duplicate_resource_name(value, src[value.unique_id]) K_T = TypeVar('K_T') V_T", "patch: ParsedMacroPatch, ) -> None: # macros are fully namespaced", "UniqueID, manifest: 'Manifest' ) -> ParsedSourceDefinition: if unique_id not in", "unique_id = new_item.unique_id if unique_id not in dest: raise dbt.exceptions.RuntimeException(", "self.package == model.package_name def search(self, haystack: Iterable[N]) -> Optional[N]: \"\"\"Find", "an attribute # is added it must all be added", "== other.locality def __lt__(self, other: object) -> bool: if not", "package: Optional[str] ) -> Optional[ParsedMacro]: \"\"\"Find a macro in the", "ParsedDocumentation] = field( metadata=dict(description=( 'The docs defined in the dbt", "has_file(self, source_file: SourceFile) -> bool: key = source_file.file_id if key", "if not macro: warn_or_error( f'WARNING: Found documentation for macro \"{patch.name}\"", "and return the existing node. \"\"\" with self._lock: existing =", "empty self.source_patches = {} return self @classmethod def __post_deserialize__(cls, obj):", "source_file: SchemaSourceFile, patch: ParsedNodePatch, ) -> None: if patch.yaml_key in", "None: def filter(candidate: MacroCandidate) -> bool: return package == candidate.macro.package_name", "field( metadata=dict(description=( 'The docs defined in the dbt project and", "'description': 'A unique identifier for the user', }, ) send_anonymous_usage_stats:", "candidates.append(candidate) return candidates @dataclass class ParsingInfo: static_analysis_parsed_path_count: int = 0", "cannot resolve this ambiguity' .format(self.macro.name, self.macro.package_name, other.macro.package_name) ) return equal", "self.locality > other.locality: return False return False M = TypeVar('M',", "\"\"\" def filter(candidate: MacroCandidate) -> bool: return candidate.locality != Locality.Imported", "field(default_factory=dict) exposures: MutableMapping[str, ParsedExposure] = field(default_factory=dict) selectors: MutableMapping[str, Any] =", "Any] = field(default_factory=dict) disabled: List[CompileResultNode] = field(default_factory=list) files: MutableMapping[str, AnySourceFile]", "= self.nodes.get(unique_id) if node: if node.patch_path: package_name, existing_file_path = node.patch_path.split('://')", "package_name, existing_file_path = macro.patch_path.split('://') raise_duplicate_macro_patch_name(patch, existing_file_path) source_file.macro_patches[patch.name] = unique_id macro.patch(patch)", "in candidates: node = self.ref_lookup.find(target_model_name, pkg, self) if node is", "Methods that were formerly in ParseResult def add_macro(self, source_file: SourceFile,", "class AnalysisLookup(RefableLookup): _lookup_types: ClassVar[set] = set(NodeType.Analysis) def _search_packages( current_project: str,", "new file ' f'path!' ) dest[unique_id] = new_item # This", "def rebuild_doc_lookup(self): self._doc_lookup = DocLookup(self) @property def source_lookup(self) -> SourceLookup:", "'The selectors defined in selectors.yml' )) ) disabled: Optional[List[CompileResultNode]] =", "manifest' ) return manifest.docs[unique_id] class SourceLookup(dbtClassMixin): def __init__(self, manifest: 'Manifest'):", "chain, islice from mashumaro import DataClassMessagePackMixin from multiprocessing.synchronize import Lock", "import get_adapter_package_names candidates: CandidateList = CandidateList() packages = set(get_adapter_package_names(self.metadata.adapter_type)) for", "node_package, package ) for pkg in candidates: result = self.doc_lookup.find(name,", "def deepcopy(self): return Manifest( nodes={k: _deepcopy(v) for k, v in", ") -> List[Optional[str]]: if target_package is not None: return [target_package]", "dbt.exceptions.RuntimeException( f'cannot update a {new_item.resource_type} to have a new file", "# These attributes are both positional and by keyword. If", "SourceKey, ArtifactMixin, schema_version ) from dbt.dataclass_schema import dbtClassMixin from dbt.exceptions", "source_name: str, table_name: str, package: Optional[str] = None ) ->", "def analysis_lookup(self) -> AnalysisLookup: if self._analysis_lookup is None: self._analysis_lookup =", "in the project \"{macro.package_name}\". To fix this error, rename or", "self, source_file: SchemaSourceFile, patch: ParsedMacroPatch, ) -> None: # macros", "avoid the lock! # pickle and deepcopy use this. It", "the object and a tuple of arguments # for the", "and a tuple of arguments # for the object, i.e.", "any package is allowed. nodetypes should be a container of", "DataClassMessagePackMixin from multiprocessing.synchronize import Lock from typing import ( Dict,", "# This is called by 'parse_patch' in the NodePatchParser def", "to send anonymous usage statistics' )), ) adapter_type: Optional[str] =", "ClassVar[set] = set(NodeType.refable()) # refables are actually unique, so the", "= SourceLookup(self) @property def ref_lookup(self) -> RefableLookup: if self._ref_lookup is", "\"\"\" # These attributes are both positional and by keyword.", "Mapping[UniqueID, ManifestNode] = field( metadata=dict(description=( 'The nodes defined in the", "happen when a node is disabled return # patches can't", "table_name: str, package: Optional[str] = None ) -> Optional[ParsedSourceDefinition]: search_name", "disabled is not None: return Disabled(disabled) return None # Called", "arguments # for the object, i.e. the Manifest. # The", "TypeVar('T', bound=GraphMemberNode) def _update_into(dest: MutableMapping[str, T], new_item: T): \"\"\"Update dest", "Optional[RefableLookup] = field( default=None, metadata={'serialize': lambda x: None, 'deserialize': lambda", "Dict[str, Any] = field(default_factory=dict) state_check: ManifestStateCheck = field(default_factory=ManifestStateCheck) # Moved", "DocLookup(self) return self._doc_lookup def rebuild_doc_lookup(self): self._doc_lookup = DocLookup(self) @property def", "manifest): for doc in manifest.docs.values(): self.add_doc(doc) def perform_lookup( self, unique_id:", "model.search_name: return False return self.package is None or self.package ==", "source_file: SchemaSourceFile, patch: ParsedMacroPatch, ) -> None: # macros are", "self.nodes.values(), self.sources.values(), self.exposures.values(), )) forward_edges, backward_edges = build_node_edges(edge_members) self.child_map =", "except the is_enabled checks are unnecessary as docs are always", "= doc source_file.docs.append(doc.unique_id) # end of methods formerly in ParseResult", "locally defined macros come first - then imported macros -", "raise NotImplementedError('search_name not implemented') N = TypeVar('N', bound=Searchable) @dataclass class", "Optional[ParsedSourceDefinition]: search_name = f'{source_name}.{table_name}' searcher: NameSearcher = NameSearcher( search_name, package,", "Mapping[PackageName, UniqueID] = storage[key] if package is None: if not", "self.nodes, self.sources, self.macros, self.docs, self.exposures, self.selectors, self.disabled, self.files, self.metadata, self.flat_graph,", "lambda x: None} ) _source_lookup: Optional[SourceLookup] = field( default=None, metadata={'serialize':", "Manifest class declaration, because they are used as # positional", "None} ) _source_lookup: Optional[SourceLookup] = field( default=None, metadata={'serialize': lambda x:", "def find_materialization_macro_by_name( self, project_name: str, materialization_name: str, adapter_type: str )", "detect that the macro exists and emit an error other_path", "= field(default_factory=dict) @dataclass class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin): \"\"\"The manifest for", "islice from mashumaro import DataClassMessagePackMixin from multiprocessing.synchronize import Lock from", "D = TypeVar('D') @dataclass class Disabled(Generic[D]): target: D MaybeDocumentation =", "Mapping[str, PathSet]: resource_fqns: Dict[str, Set[Tuple[str, ...]]] = {} all_resources =", "= self.files[key].checksum return my_checksum == source_file.checksum def add_source( self, source_file:", "resolve_ref except the is_enabled checks are unnecessary as docs are", "imported packages. - if there is a `generate_{component}_name` macro in", "1 Imported = 2 Root = 3 class Specificity(enum.IntEnum): Default", "def add_node(self, source_file: AnySourceFile, node: ManifestNodes, test_from=None): self.add_node_nofile(node) if isinstance(source_file,", "and ' '{}). dbt cannot resolve this ambiguity' .format(self.macro.name, self.macro.package_name,", "same algorithm as resolve_ref except the is_enabled checks are unnecessary", "overwrite, and they two nodes must have the same original", "it. Make sure you don't call this until you're done", "nodes must have the same original file path. \"\"\" unique_id", "MacroManifest class MacroMethods: # Just to make mypy happy. There", ") _ref_lookup: Optional[RefableLookup] = field( default=None, metadata={'serialize': lambda x: None,", "in manifest.sources: raise dbt.exceptions.InternalException( f'Source {unique_id} found in cache but", "edges on the given list of ParsedNodes and return them", "rename or remove one of the following macros: - {macro.original_file_path}", "== 'analyses': unique_id = self.analysis_lookup.get_unique_id(patch.name, None) else: raise dbt.exceptions.InternalException( f'Unexpected", "ignore source_file.sources.append(source.unique_id) def add_node_nofile(self, node: ManifestNodes): # nodes can't be", "if patch.yaml_key in ['models', 'seeds', 'snapshots']: unique_id = self.ref_lookup.get_unique_id(patch.name, None)", "field( default=None, metadata=dict(description=( 'Whether dbt is configured to send anonymous", "= TypeVar('N', bound=Searchable) @dataclass class NameSearcher(Generic[N]): name: str package: Optional[str]", "self.sources.values()) if not resource_types or node.resource_type in resource_types }) def", "not found in manifest'.format(unique_id) ) @property def doc_lookup(self) -> DocLookup:", "metadata=dict(description=( 'The sources defined in the dbt project and its", "unique_id = self.ref_lookup.get_unique_id(patch.name, None) elif patch.yaml_key == 'analyses': unique_id =", "name, package, NodeType.refable() ) result = searcher.search(self.disabled) return result def", "unique_id = self.analysis_lookup.get_unique_id(patch.name, None) else: raise dbt.exceptions.InternalException( f'Unexpected yaml_key {patch.yaml_key}", "Optional[str] = field( default=None, metadata={ 'description': 'A unique identifier for", "nodes: Mapping[UniqueID, ManifestNode] = field( metadata=dict(description=( 'The nodes defined in", "self.nodes[unique_id] = node.replace(deferred=True) # log up to 5 items sample", "dct.items()} def build_node_edges(nodes: List[ManifestNode]): \"\"\"Build the forward and backward edges", "return candidate.locality != Locality.Imported candidates: CandidateList = self._find_macros_by_name( name=f'generate_{component}_name', root_project_name=root_project_name,", "state (sample: {sample})' ) # Methods that were formerly in", "def build_macro_edges(nodes: List[Any]): forward_edges: Dict[str, List[str]] = { n.unique_id: []", "the project \"{macro.package_name}\". To fix this error, rename or remove", "self.files.items()}, state_check=_deepcopy(self.state_check), ) def build_parent_and_child_maps(self): edge_members = list(chain( self.nodes.values(), self.sources.values(),", "return self._doc_lookup def rebuild_doc_lookup(self): self._doc_lookup = DocLookup(self) @property def source_lookup(self)", "self: return None self.sort() return self[-1].macro def _get_locality( macro: ParsedMacro,", "cast, AbstractSet, ClassVar ) from typing_extensions import Protocol from uuid", "manifest', )) def _check_duplicates( value: HasUniqueID, src: Mapping[str, HasUniqueID] ):", "self, unique_id: UniqueID, manifest ) -> ParsedDocumentation: if unique_id not", "assert isinstance(result, ParsedSourceDefinition) return result def _materialization_candidates_for( self, project_name: str,", "@dataclass class Disabled(Generic[D]): target: D MaybeDocumentation = Optional[ParsedDocumentation] MaybeParsedSource =", "= str DocName = str RefName = str UniqueID =", "This is returned by the 'graph' context property # in", "SourceFile, name: str ) -> V_T: if key not in", "object source_patches: MutableMapping[SourceKey, SourcePatch] = field(default_factory=dict) # following is from", "perform_lookup( self, unique_id: UniqueID, manifest ) -> ParsedDocumentation: if unique_id", "return None class DocLookup(dbtClassMixin): def __init__(self, manifest: 'Manifest'): self.storage: Dict[str,", "MacroManifest] @dataclass @schema_version('manifest', 2) class WritableManifest(ArtifactMixin): nodes: Mapping[UniqueID, ManifestNode] =", "= field(default_factory=dict) state_check: ManifestStateCheck = field(default_factory=ManifestStateCheck) # Moved from the", "Set[str] ) -> Locality: if macro.package_name == root_project_name: return Locality.Root", "None, 'deserialize': lambda x: None} ) _lock: Lock = field(", "in ParseResult def add_macro(self, source_file: SourceFile, macro: ParsedMacro): if macro.unique_id", "self._doc_lookup def rebuild_doc_lookup(self): self._doc_lookup = DocLookup(self) @property def source_lookup(self) ->", "__init__(self, manifest: 'Manifest'): self.storage: Dict[Tuple[str, str], Dict[PackageName, UniqueID]] = {}", "src: raise_duplicate_resource_name(value, src[value.unique_id]) K_T = TypeVar('K_T') V_T = TypeVar('V_T') def", "macros - then macros defined in the root project \"\"\"", "NodeEdgeMap = Dict[str, List[str]] PackageName = str DocName = str", "# create the initial version of the object and a", "return None # Called by dbt.parser.manifest._resolve_sources_for_exposure # and dbt.parser.manifest._process_source_for_node def", "positional and by keyword. If an attribute # is added", "key: K_T, src: Mapping[K_T, V_T], old_file: SourceFile, name: str )", "Disabled(disabled) return None # Called by dbt.parser.manifest._resolve_sources_for_exposure # and dbt.parser.manifest._process_source_for_node", "a node at runtime, because multiple threads could be just-in-time", "node.patch_path: package_name, existing_file_path = node.patch_path.split('://') raise_duplicate_patch_name(patch, existing_file_path) source_file.append_patch(patch.yaml_key, unique_id) node.patch(patch)", "and not adapter.get_relation( current.database, current.schema, current.identifier ) ): merged.add(unique_id) self.nodes[unique_id]", "MacroCandidate) -> bool: return package == candidate.macro.package_name candidates: CandidateList =", "not adapter.get_relation( current.database, current.schema, current.identifier ) ): merged.add(unique_id) self.nodes[unique_id] =", "macro in the graph by its name and package name,", "= dbt.utils.get_materialization_macro_name( materialization_name=materialization_name, adapter_type=adapter_type, with_prefix=False, ) return CandidateList( MaterializationCandidate.from_macro(m, specificity)", "the given list of ParsedNodes and return them as two", "a new file ' f'path!' ) dest[unique_id] = new_item #", "source_file.docs.append(doc.unique_id) # end of methods formerly in ParseResult # Provide", "metadata=self.metadata, disabled=self.disabled, child_map=self.child_map, parent_map=self.parent_map, ) def write(self, path): self.writable_manifest().write(path) #", "is not None: def filter(candidate: MacroCandidate) -> bool: return package", "= Dict[str, List[str]] PackageName = str DocName = str RefName", "'Manifest'): self.storage: Dict[str, Dict[PackageName, UniqueID]] = {} self.populate(manifest) def get_unique_id(self,", "import NodeType from dbt.ui import line_wrap_message from dbt import flags", "forward_edges, backward_edges = build_node_edges(edge_members) self.child_map = forward_edges self.parent_map = backward_edges", "any concurrency issues around it. Make sure you don't call", "resource_fqns[resource_type_plural].add(tuple(resource.fqn)) return resource_fqns # This is called by 'parse_patch' in", "a better way. def __init__(self): self.macros = [] self.metadata =", "def rebuild_source_lookup(self): self._source_lookup = SourceLookup(self) @property def ref_lookup(self) -> RefableLookup:", "node_package: str, ) -> MaybeNonSource: node: Optional[ManifestNode] = None disabled:", "in node.depends_on.macros: if unique_id in forward_edges.keys(): forward_edges[unique_id].append(node.unique_id) return _sort_values(forward_edges) def", "-> str: raise NotImplementedError('search_name not implemented') N = TypeVar('N', bound=Searchable)", "to regular ones, but ignore imported packages. - if there", "self, source_name: str, table_name: str, package: Optional[str] = None )", "= DocLookup(self) return self._doc_lookup def rebuild_doc_lookup(self): self._doc_lookup = DocLookup(self) @property", "SchemaSourceFile): assert test_from source_file.add_test(node.unique_id, test_from) else: source_file.nodes.append(node.unique_id) def add_exposure(self, source_file:", "def __eq__(self, other: object) -> bool: if not isinstance(other, MaterializationCandidate):", "target' )) parent_map: Optional[NodeEdgeMap] = field(metadata=dict( description='A mapping from child", "dbt.task.rpc.sql_commands 'add_new_refs' def deepcopy(self): return Manifest( nodes={k: _deepcopy(v) for k,", "my_checksum = self.files[key].checksum return my_checksum == source_file.checksum def add_source( self,", "parsing and during compilation. \"\"\" # These attributes are both", "their counterpart. Only non-ephemeral refable nodes are examined. \"\"\" refables", "def add_source( self, source_file: SchemaSourceFile, source: UnpatchedSourceDefinition ): # sources", "MutableMapping[str, ParsedDocumentation] = field(default_factory=dict) exposures: MutableMapping[str, ParsedExposure] = field(default_factory=dict) selectors:", "-> AnalysisLookup: if self._analysis_lookup is None: self._analysis_lookup = AnalysisLookup(self) return", "raise dbt.exceptions.InternalException( f'Source {unique_id} found in cache but not found", "None: return self.perform_lookup(unique_id, manifest) return None def add_node(self, node: ManifestNode):", "Optional[ManifestNode] = None candidates = _search_packages( current_project, node_package, target_model_package )", ") -> Optional[ParsedMacro]: \"\"\"Find a macro in the graph by", "result def find_disabled_source_by_name( self, source_name: str, table_name: str, package: Optional[str]", "- then imported macros - then macros defined in the", "self._ref_lookup = RefableLookup(self) @property def analysis_lookup(self) -> AnalysisLookup: if self._analysis_lookup", "with their counterpart. Only non-ephemeral refable nodes are examined. \"\"\"", "if self.locality > other.locality: return False return False M =", "not found in manifest' ) return manifest.docs[unique_id] class SourceLookup(dbtClassMixin): def", "def add_node_nofile(self, node: ManifestNodes): # nodes can't be overwritten! _check_duplicates(node,", "True if the model matches the given name, package, and", "full_name = dbt.utils.get_materialization_macro_name( materialization_name=materialization_name, adapter_type=adapter_type, with_prefix=False, ) return CandidateList( MaterializationCandidate.from_macro(m,", "dbt project and its dependencies' )) ) selectors: Mapping[UniqueID, Any]", "return self.sources[unique_id] elif unique_id in self.exposures: return self.exposures[unique_id] else: #", "x: None} ) _source_lookup: Optional[SourceLookup] = field( default=None, metadata={'serialize': lambda", "manifest by replacing any unselected nodes with their counterpart. Only", "-> V_T: if key not in src: raise CompilationException( 'Expected", "if unique_id not in dest: raise dbt.exceptions.RuntimeException( f'got an update_{new_item.resource_type}", "unique_id = f'macro.{patch.package_name}.{patch.name}' macro = self.macros.get(unique_id) if not macro: warn_or_error(", "field(default_factory=dict) sources: MutableMapping[str, ParsedSourceDefinition] = field(default_factory=dict) macros: MutableMapping[str, ParsedMacro] =", "( CompilationException, raise_duplicate_resource_name, raise_compiler_error, warn_or_error, raise_duplicate_patch_name, raise_duplicate_macro_patch_name, raise_duplicate_source_patch_name, ) from", "by dbt.parser.manifest._resolve_sources_for_exposure # and dbt.parser.manifest._process_source_for_node def resolve_source( self, target_source_name: str,", "Optional[str] = None ) -> Optional[ParsedSourceDefinition]: search_name = f'{source_name}.{table_name}' searcher:", "raise dbt.exceptions.InternalException( 'Expected node {} not found in manifest'.format(unique_id) )", "return True if self.locality > other.locality: return False return False", "'deserialize': lambda x: None} ) _ref_lookup: Optional[RefableLookup] = field( default=None,", "with self._lock: existing = self.nodes[new_node.unique_id] if getattr(existing, 'compiled', False): #", "key = source_file.file_id if key is None: return False if", "return self @classmethod def __post_deserialize__(cls, obj): obj._lock = flags.MP_CONTEXT.Lock() return", "other_path = self.macros[macro.unique_id].original_file_path # subtract 2 for the \"Compilation Error\"", "backward_edges: Dict[str, List[str]] = {} # pre-populate the forward edge", "isinstance(other, MacroCandidate): return NotImplemented return self.locality == other.locality def __lt__(self,", "- {other_path} ''', subtract=2 ) raise_compiler_error(msg) self.macros[macro.unique_id] = macro source_file.macros.append(macro.unique_id)", "field(default_factory=list) files: MutableMapping[str, AnySourceFile] = field(default_factory=dict) metadata: ManifestMetadata = field(default_factory=ManifestMetadata)", "...]]] = {} all_resources = chain(self.exposures.values(), self.nodes.values(), self.sources.values()) for resource", "if there is a `generate_{component}_name` macro in the root project,", "' f'which was not found' ) return if macro.patch_path: package_name,", "but ignore imported packages. - if there is a `generate_{component}_name`", "tracking.active_user is None: return if self.user_id is None: self.user_id =", "macros self.metadata = ManifestMetadata() # This is returned by the", "add_node(self, node: ManifestNode): if node.resource_type in self._lookup_types: if node.name not", "return next(iter(pkg_dct.values())) elif package in pkg_dct: return pkg_dct[package] else: return", "self.user_id is None: self.user_id = tracking.active_user.id if self.send_anonymous_usage_stats is None:", "candidates @dataclass class ParsingInfo: static_analysis_parsed_path_count: int = 0 static_analysis_path_count: int", "in forward_edges.keys(): forward_edges[unique_id].append(node.unique_id) return _sort_values(forward_edges) def _deepcopy(value): return value.from_dict(value.to_dict(omit_none=True)) class", "child_map: Optional[NodeEdgeMap] = field(metadata=dict( description='A mapping from parent nodes to", "project', }, ) user_id: Optional[UUID] = field( default=None, metadata={ 'description':", "elif unique_id in self.exposures: return self.exposures[unique_id] else: # something terrible", ") return manifest.sources[unique_id] class RefableLookup(dbtClassMixin): # model, seed, snapshot _lookup_types:", "= line_wrap_message( f'''\\ dbt found two macros named \"{macro.name}\" in", "used in dbt.task.rpc.sql_commands 'add_new_refs' def deepcopy(self): return Manifest( nodes={k: _deepcopy(v)", "its dependencies' )) ) selectors: Mapping[UniqueID, Any] = field( metadata=dict(description=(", "} def find_disabled_by_name( self, name: str, package: Optional[str] = None", "resource.resource_type.pluralize() if resource_type_plural not in resource_fqns: resource_fqns[resource_type_plural] = set() resource_fqns[resource_type_plural].add(tuple(resource.fqn))", "the dbt project and its dependencies' )) ) sources: Mapping[UniqueID,", "package) if unique_id is not None: return self.perform_lookup(unique_id, manifest) return", "map of children of macros def build_macro_edges(nodes: List[Any]): forward_edges: Dict[str,", "(node.database, node.schema) for node in chain(self.nodes.values(), self.sources.values()) if not resource_types", "are examined. \"\"\" refables = set(NodeType.refable()) merged = set() for", "the is_enabled checks are unnecessary as docs are always enabled.", "import ( BaseArtifactMetadata, SourceKey, ArtifactMixin, schema_version ) from dbt.dataclass_schema import", "- we just need to avoid the lock! # pickle", "key, package: Optional[PackageName], manifest: 'Manifest'): unique_id = self.get_unique_id(key, package) if", "= field(default_factory=dict) _doc_lookup: Optional[DocLookup] = field( default=None, metadata={'serialize': lambda x:", "root_project_name: str, package: Optional[str] ) -> Optional[ParsedMacro]: \"\"\"Find a macro", "error, rename or remove one of the following macros: -", "patch.yaml_key in ['models', 'seeds', 'snapshots']: unique_id = self.ref_lookup.get_unique_id(patch.name, None) elif", "materialization_name=materialization_name, adapter_type=atype, ) for atype in (adapter_type, None) )) return", "import dataclass, field from itertools import chain, islice from mashumaro", "Optional[SourceLookup] = field( default=None, metadata={'serialize': lambda x: None, 'deserialize': lambda", "node.schema) for node in chain(self.nodes.values(), self.sources.values()) if not resource_types or", "we want them to have a consistent view of the", "ambiguity' .format(self.macro.name, self.macro.package_name, other.macro.package_name) ) return equal def __lt__(self, other:", "self._source_lookup def rebuild_source_lookup(self): self._source_lookup = SourceLookup(self) @property def ref_lookup(self) ->", "in self.exposures.items() }, 'nodes': { k: v.to_dict(omit_none=False) for k, v", ") -> Optional[ParsedDocumentation]: \"\"\"Resolve the given documentation. This follows the", "key not in self.files: return False my_checksum = self.files[key].checksum return", "all_resources = chain(self.exposures.values(), self.nodes.values(), self.sources.values()) for resource in all_resources: resource_type_plural", "False if self.locality < other.locality: return True if self.locality >", "Mapping[UniqueID, ParsedMacro] = field( metadata=dict(description=( 'The macros defined in the", "the right position. nodes: MutableMapping[str, ManifestNode] = field(default_factory=dict) sources: MutableMapping[str,", "!= name: continue candidate = MacroCandidate( locality=_get_locality(macro, root_project_name, packages), macro=macro,", "root_project_name=root_project_name, # filter out imported packages filter=filter, ) return candidates.last()", "root_project_name, packages), macro=macro, ) if filter is None or filter(candidate):", "return self._analysis_lookup # Called by dbt.parser.manifest._resolve_refs_for_exposure # and dbt.parser.manifest._process_refs_for_node def", ") -> ParsedSourceDefinition: if unique_id not in manifest.sources: raise dbt.exceptions.InternalException(", "name=f'generate_{component}_name', root_project_name=root_project_name, # filter out imported packages filter=filter, ) return", "is at dest[new_item.unique_id] with new_itme. There must be an existing", "for pkg in candidates: result = self.doc_lookup.find(name, pkg, self) if", "= self.doc_lookup.find(name, pkg, self) if result is not None: return", "node_package, target_model_package ) for pkg in candidates: node = self.ref_lookup.find(target_model_name,", "{ k: v.to_dict(omit_none=False) for k, v in self.nodes.items() }, 'sources':", "if key in self.source_patches: raise_duplicate_source_patch_name(patch, self.source_patches[key]) self.source_patches[key] = patch source_file.source_patches.append(key)", "def rebuild_ref_lookup(self): self._ref_lookup = RefableLookup(self) @property def analysis_lookup(self) -> AnalysisLookup:", "NotImplemented if self.specificity < other.specificity: return True if self.specificity >", "the given documentation. This follows the same algorithm as resolve_ref", "are in both the Manifest # and the MacroManifest class", "and its dependencies' )) ) selectors: Mapping[UniqueID, Any] = field(", "-> 'MaterializationCandidate': return cls( locality=candidate.locality, macro=candidate.macro, specificity=specificity, ) def __eq__(self,", "{} not found in manifest'.format(unique_id) ) @property def doc_lookup(self) ->", "dbt.parser.manifest._resolve_refs_for_exposure # and dbt.parser.manifest._process_refs_for_node def resolve_ref( self, target_model_name: str, target_model_package:", "the same algorithm as resolve_ref except the is_enabled checks are", "manifest ) -> ParsedDocumentation: if unique_id not in manifest.docs: raise", "other.macro.package_name) ) return equal def __lt__(self, other: object) -> bool:", "k, v in self.nodes.items() }, 'sources': { k: v.to_dict(omit_none=False) for", "Disabled(Generic[D]): target: D MaybeDocumentation = Optional[ParsedDocumentation] MaybeParsedSource = Optional[Union[ ParsedSourceDefinition,", "str UniqueID = str def find_unique_id_for_package(storage, key, package: Optional[PackageName]): if", "the same ephemeral dependency, and we want them to have", "if self._matches(model): return model return None D = TypeVar('D') @dataclass", "= self.find_disabled_source_by_name( target_source_name, target_table_name, pkg ) if disabled is not", "from child nodes to their dependencies', )) child_map: Optional[NodeEdgeMap] =", "# refables are actually unique, so the Dict[PackageName, UniqueID] will", "other.locality: return True if self.locality > other.locality: return False return", "node with a lock. The only time we should want", "= _search_packages(current_project, node_package) source: Optional[ParsedSourceDefinition] = None disabled: Optional[ParsedSourceDefinition] =", "tests. \"\"\" return {k: sorted(v) for k, v in dct.items()}", "macro methods that are in both the Manifest # and", "and emit an error other_path = self.macros[macro.unique_id].original_file_path # subtract 2", "v in self.nodes.items()}, sources={k: _deepcopy(v) for k, v in self.sources.items()},", "v in self.exposures.items()}, selectors={k: _deepcopy(v) for k, v in self.selectors.items()},", "a NonSourceCompiledNode return cast(NonSourceCompiledNode, existing) _update_into(self.nodes, new_node) return new_node def", "self.selectors.items()}, metadata=self.metadata, disabled=[_deepcopy(n) for n in self.disabled], files={k: _deepcopy(v) for", "refables and not node.is_ephemeral and unique_id not in selected and", "ParsedSourceDefinition) return result def _materialization_candidates_for( self, project_name: str, materialization_name: str,", "for unique_id, macro in self.macros.items(): if macro.name != name: continue", "macro.unique_id in self.macros: # detect that the macro exists and", "AbstractSet[UniqueID], ) -> None: \"\"\"Given the selected unique IDs and", "statistics' )), ) adapter_type: Optional[str] = field( default=None, metadata=dict(description='The type", "is added it must all be added in the __reduce_ex__", "exposures: MutableMapping[str, ParsedExposure] = field(default_factory=dict) selectors: MutableMapping[str, Any] = field(default_factory=dict)", "{macro.original_file_path} - {other_path} ''', subtract=2 ) raise_compiler_error(msg) self.macros[macro.unique_id] = macro", "= field(metadata=dict( description='A list of the disabled nodes in the", ")) def _check_duplicates( value: HasUniqueID, src: Mapping[str, HasUniqueID] ): if", "= None, ) -> List[Optional[str]]: if target_package is not None:", "target: D MaybeDocumentation = Optional[ParsedDocumentation] MaybeParsedSource = Optional[Union[ ParsedSourceDefinition, Disabled[ParsedSourceDefinition],", "_update_into(self.sources, new_source) def build_flat_graph(self): \"\"\"This attribute is used in context.common", "Optional[PackageName], manifest: 'Manifest'): unique_id = self.get_unique_id(key, package) if unique_id is", "once and avoid any concurrency issues around it. Make sure", "equal def __lt__(self, other: object) -> bool: if not isinstance(other,", "is_enabled checks are unnecessary as docs are always enabled. \"\"\"", "current_project: str, node_package: str, ) -> Optional[ParsedDocumentation]: \"\"\"Resolve the given", "line_wrap_message( f'''\\ dbt found two macros named \"{macro.name}\" in the", "= NameSearcher( search_name, package, [NodeType.Source] ) result = searcher.search(self.disabled) if", "in the ProviderContext class. self.flat_graph = {} AnyManifest = Union[Manifest,", "# is not a big deal at all and retains", "if current and ( node.resource_type in refables and not node.is_ephemeral", "the same original file path. \"\"\" unique_id = new_item.unique_id if", "= [] self.metadata = {} def find_macro_by_name( self, name: str,", "returns a callable object used to # create the initial", "is None: self._analysis_lookup = AnalysisLookup(self) return self._analysis_lookup # Called by", "found in cache but not found in manifest' ) return", "for the manifest.\"\"\" dbt_schema_version: str = field( default_factory=lambda: str(WritableManifest.dbt_schema_version) )", ") from dbt.contracts.graph.parsed import ( ParsedMacro, ParsedDocumentation, ParsedNodePatch, ParsedMacroPatch, ParsedSourceDefinition,", "IDs to lists of edges. \"\"\" backward_edges: Dict[str, List[str]] =", "CompileResultNode, test_from=None): self.add_disabled_nofile(node) if isinstance(source_file, SchemaSourceFile): assert test_from source_file.add_test(node.unique_id, test_from)", "and its dependencies' )) ) macros: Mapping[UniqueID, ParsedMacro] = field(", "existing node. \"\"\" with self._lock: existing = self.nodes[new_node.unique_id] if getattr(existing,", "\"\"\" self.flat_graph = { 'exposures': { k: v.to_dict(omit_none=False) for k,", "node is disabled if disabled is None: disabled = self.find_disabled_by_name(", "pkg ) if disabled is not None: return Disabled(disabled) return", "None) else: raise dbt.exceptions.InternalException( f'Unexpected yaml_key {patch.yaml_key} for patch in", "def _expect_value( key: K_T, src: Mapping[K_T, V_T], old_file: SourceFile, name:", "k: v.to_dict(omit_none=False) for k, v in self.nodes.items() }, 'sources': {", "def get_used_schemas(self, resource_types=None): return frozenset({ (node.database, node.schema) for node in", "Optional[ManifestNode] = None disabled: Optional[ManifestNode] = None candidates = _search_packages(", "'Expected node {} not found in manifest'.format(unique_id) ) @property def", "field(metadata=dict( description='Metadata about the manifest', )) def _check_duplicates( value: HasUniqueID,", "str, materialization_name: str, adapter_type: Optional[str], ) -> CandidateList: if adapter_type", "lambda x: None} ) _lock: Lock = field( default_factory=flags.MP_CONTEXT.Lock, metadata={'serialize':", "for k, v in self.nodes.items()}, sources={k: _deepcopy(v) for k, v", "be added in the __reduce_ex__ method in the # args", "CandidateList = self._find_macros_by_name( name=f'generate_{component}_name', root_project_name=root_project_name, # filter out imported packages", "Optional[ParsedSourceDefinition] = None disabled: Optional[ParsedSourceDefinition] = None for pkg in", "None} ) def __pre_serialize__(self): # serialization won't work with anything", "candidates: node = self.ref_lookup.find(target_model_name, pkg, self) if node is not", "return candidates.last() def _find_macros_by_name( self, name: str, root_project_name: str, filter:", "for tests. \"\"\" return {k: sorted(v) for k, v in", "target_table_name, pkg ) if disabled is not None: return Disabled(disabled)", "ParsedSourceDefinition: if unique_id not in manifest.sources: raise dbt.exceptions.InternalException( f'Source {unique_id}", "_update_into(self.nodes, new_node) def update_source(self, new_source: ParsedSourceDefinition): _update_into(self.sources, new_source) def build_flat_graph(self):", "else: source_file.nodes.append(node.unique_id) def add_doc(self, source_file: SourceFile, doc: ParsedDocumentation): _check_duplicates(doc, self.docs)", "3 dict lookups instead of 1 # is not a", "rebuild_source_lookup(self): self._source_lookup = SourceLookup(self) @property def ref_lookup(self) -> RefableLookup: if", "self.nodes.values(), self.macros.values(), )) forward_edges = build_macro_edges(edge_members) return forward_edges def writable_manifest(self):", "self._lookup_types: if node.name not in self.storage: self.storage[node.name] = {} self.storage[node.name][node.package_name]", ") -> V_T: if key not in src: raise CompilationException(", "def populate(self, manifest): for source in manifest.sources.values(): if hasattr(source, 'source_name'):", "\"\"\"Find macros by their name. \"\"\" # avoid an import", "ManifestNode] = field( metadata=dict(description=( 'The nodes defined in the dbt", "NameSearcher( search_name, package, [NodeType.Source] ) result = searcher.search(self.disabled) if result", "Called in dbt.compilation.Linker.write_graph and # dbt.graph.queue.get and ._include_in_cost def expect(self,", "MacroCandidate, specificity: Specificity ) -> 'MaterializationCandidate': return cls( locality=candidate.locality, macro=candidate.macro,", "macro=macro, ) if filter is None or filter(candidate): candidates.append(candidate) return", "x: None, 'deserialize': lambda x: None} ) def __pre_serialize__(self): #", "def get_resource_fqns(self) -> Mapping[str, PathSet]: resource_fqns: Dict[str, Set[Tuple[str, ...]]] =", "any unselected nodes with their counterpart. Only non-ephemeral refable nodes", "List[str]] = {n.unique_id: [] for n in nodes} for node", "self.docs.items()}, exposures={k: _deepcopy(v) for k, v in self.exposures.items()}, selectors={k: _deepcopy(v)", ")) ) exposures: Mapping[UniqueID, ParsedExposure] = field( metadata=dict(description=( 'The exposures", "= doc.unique_id def populate(self, manifest): for doc in manifest.docs.values(): self.add_doc(doc)", "docs: MutableMapping[str, ParsedDocumentation] = field(default_factory=dict) exposures: MutableMapping[str, ParsedExposure] = field(default_factory=dict)", "import dbtClassMixin from dbt.exceptions import ( CompilationException, raise_duplicate_resource_name, raise_compiler_error, warn_or_error,", "{patch.yaml_key} for patch in ' f'file {source_file.path.original_file_path}' ) if unique_id", "unselected nodes with their counterpart. Only non-ephemeral refable nodes are", "not isinstance(other, MaterializationCandidate): return NotImplemented equal = ( self.specificity ==", "ParseResult _disabled: MutableMapping[str, List[CompileResultNode]] = field(default_factory=dict) _doc_lookup: Optional[DocLookup] = field(", "= field( default_factory=ParsingInfo, metadata={'serialize': lambda x: None, 'deserialize': lambda x:", "mapping from parent nodes to their dependents', )) metadata: ManifestMetadata", "in self.storage: self.storage[key] = {} self.storage[key][source.package_name] = source.unique_id def populate(self,", "f'''\\ dbt found two macros named \"{macro.name}\" in the project", "of the attributes # in the Manifest class declaration, because", "if not isinstance(other, MacroCandidate): return NotImplemented if self.locality < other.locality:", "= AnalysisLookup(self) return self._analysis_lookup # Called by dbt.parser.manifest._resolve_refs_for_exposure # and", "self._disabled[node.unique_id] = [node] def add_disabled(self, source_file: AnySourceFile, node: CompileResultNode, test_from=None):", "whatever is at dest[new_item.unique_id] with new_itme. There must be an", "field(default_factory=ManifestMetadata) flat_graph: Dict[str, Any] = field(default_factory=dict) state_check: ManifestStateCheck = field(default_factory=ManifestStateCheck)", "field( metadata=dict(description=( 'The exposures defined in the dbt project and", "overwritten! _check_duplicates(source, self.sources) self.sources[source.unique_id] = source # type: ignore source_file.sources.append(source.unique_id)", "Locality macro: ParsedMacro def __eq__(self, other: object) -> bool: if", "from mashumaro import DataClassMessagePackMixin from multiprocessing.synchronize import Lock from typing", "SourceLookup(self) @property def ref_lookup(self) -> RefableLookup: if self._ref_lookup is None:", "node.resource_type in resource_types }) def get_used_databases(self): return frozenset( x.database for", "in the target' )) parent_map: Optional[NodeEdgeMap] = field(metadata=dict( description='A mapping", "indent # note that the line wrap eats newlines, so", "WritableManifest( nodes=self.nodes, sources=self.sources, macros=self.macros, docs=self.docs, exposures=self.exposures, selectors=self.selectors, metadata=self.metadata, disabled=self.disabled, child_map=self.child_map,", "str = field( default_factory=lambda: str(WritableManifest.dbt_schema_version) ) project_id: Optional[str] = field(", "current_project, node_package, package ) for pkg in candidates: result =", "# positional arguments to construct a Manifest. def __reduce_ex__(self, protocol):", ")) return candidates.last() def get_resource_fqns(self) -> Mapping[str, PathSet]: resource_fqns: Dict[str,", "doc in manifest.docs.values(): self.add_doc(doc) def perform_lookup( self, unique_id: UniqueID, manifest", "str DocName = str RefName = str UniqueID = str", "not None: return self.perform_lookup(unique_id, manifest) return None def add_doc(self, doc:", "class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin): \"\"\"The manifest for the full graph,", "Only non-ephemeral refable nodes are examined. \"\"\" refables = set(NodeType.refable())", "> other.locality: return False return False M = TypeVar('M', bound=MacroCandidate)", "class Specificity(enum.IntEnum): Default = 1 Adapter = 2 @dataclass class", "in internal_packages: return Locality.Core else: return Locality.Imported class Searchable(Protocol): resource_type:", "self.sources.values()) ) # This is used in dbt.task.rpc.sql_commands 'add_new_refs' def", "package in pkg_dct: return pkg_dct[package] else: return None class DocLookup(dbtClassMixin):", "selectors={k: _deepcopy(v) for k, v in self.selectors.items()}, metadata=self.metadata, disabled=[_deepcopy(n) for", ") def __post_init__(self): if tracking.active_user is None: return if self.user_id", "There must be a better way. def __init__(self): self.macros =", "self.sort() return self[-1].macro def _get_locality( macro: ParsedMacro, root_project_name: str, internal_packages:", "_search_packages( current_project: str, node_package: str, target_package: Optional[str] = None, )", "attributes # in the Manifest class declaration, because they are", "GraphMemberNode ) from dbt.contracts.graph.parsed import ( ParsedMacro, ParsedDocumentation, ParsedNodePatch, ParsedMacroPatch,", "from parent nodes to their dependents', )) metadata: ManifestMetadata =", "warn_or_error( f'WARNING: Found documentation for macro \"{patch.name}\" ' f'which was", "project and its dependencies' )) ) exposures: Mapping[UniqueID, ParsedExposure] =", "or remove one of the following macros: - {macro.original_file_path} -", "ancestor of a node at runtime, because multiple threads could", "self.state_check, self.source_patches, self._disabled, self._doc_lookup, self._source_lookup, self._ref_lookup, ) return self.__class__, args", "dbt is configured to send anonymous usage statistics' )), )", "dbt import flags from dbt import tracking import dbt.utils NodeEdgeMap", "str, root_project_name: str ) -> Optional[ParsedMacro]: \"\"\" The `generate_X_name` macros", "= {} self.storage[key][source.package_name] = source.unique_id def populate(self, manifest): for source", "self.source_patches: raise_duplicate_source_patch_name(patch, self.source_patches[key]) self.source_patches[key] = patch source_file.source_patches.append(key) def get_used_schemas(self, resource_types=None):", "chain(self.nodes.values(), self.sources.values()) if not resource_types or node.resource_type in resource_types })", "else: source_file.nodes.append(node.unique_id) def add_exposure(self, source_file: SchemaSourceFile, exposure: ParsedExposure): _check_duplicates(exposure, self.exposures)", "get_adapter_package_names candidates: CandidateList = CandidateList() packages = set(get_adapter_package_names(self.metadata.adapter_type)) for unique_id,", "Optional[ManifestNode]: searcher: NameSearcher = NameSearcher( name, package, NodeType.refable() ) result", "for the user', }, ) send_anonymous_usage_stats: Optional[bool] = field( default=None,", "source_file.macros.append(macro.unique_id) def has_file(self, source_file: SourceFile) -> bool: key = source_file.file_id", "= str RefName = str UniqueID = str def find_unique_id_for_package(storage,", "Dict[PackageName, UniqueID]] = {} self.populate(manifest) def get_unique_id(self, key, package: Optional[PackageName]):", "cache but not found in manifest' ) return manifest.docs[unique_id] class", "if package is not None: def filter(candidate: MacroCandidate) -> bool:", "raise dbt.exceptions.InternalException( f'Doc {unique_id} found in cache but not found", "M = TypeVar('M', bound=MacroCandidate) class CandidateList(List[M]): def last(self) -> Optional[ParsedMacro]:", "_matches(self, model: N) -> bool: \"\"\"Return True if the model", "in self.nodes: return self.nodes[unique_id] elif unique_id in self.sources: return self.sources[unique_id]", "ParsingInfo: static_analysis_parsed_path_count: int = 0 static_analysis_path_count: int = 0 @dataclass", ") -> CandidateList: \"\"\"Find macros by their name. \"\"\" #", "' f'file {source_file.path.original_file_path}' ) if unique_id is None: # This", "source is not None and source.config.enabled: return source if disabled", "V_T = TypeVar('V_T') def _expect_value( key: K_T, src: Mapping[K_T, V_T],", "def _search_packages( current_project: str, node_package: str, target_package: Optional[str] = None,", "return Locality.Core else: return Locality.Imported class Searchable(Protocol): resource_type: NodeType package_name:", "from dbt.adapters.factory import get_adapter_package_names candidates: CandidateList = CandidateList() packages =", "from dbt import tracking import dbt.utils NodeEdgeMap = Dict[str, List[str]]", "not in manifest.sources: raise dbt.exceptions.InternalException( f'Source {unique_id} found in cache", "user', }, ) send_anonymous_usage_stats: Optional[bool] = field( default=None, metadata=dict(description=( 'Whether", "`generate_X_name` macros are similar to regular ones, but ignore imported", "AnyManifest = Union[Manifest, MacroManifest] @dataclass @schema_version('manifest', 2) class WritableManifest(ArtifactMixin): nodes:", "from dbt import flags from dbt import tracking import dbt.utils", "MaterializationCandidate(MacroCandidate): specificity: Specificity @classmethod def from_macro( cls, candidate: MacroCandidate, specificity:", "return obj def sync_update_node( self, new_node: NonSourceCompiledNode ) -> NonSourceCompiledNode:", "key = (target_source_name, target_table_name) candidates = _search_packages(current_project, node_package) source: Optional[ParsedSourceDefinition]", "return None else: return next(iter(pkg_dct.values())) elif package in pkg_dct: return", "package_name: str @property def search_name(self) -> str: raise NotImplementedError('search_name not", "str, package: Optional[str] = None ) -> Optional[ManifestNode]: searcher: NameSearcher", "None} ) _analysis_lookup: Optional[AnalysisLookup] = field( default=None, metadata={'serialize': lambda x:", "in node.depends_on_nodes: if unique_id in forward_edges.keys(): forward_edges[unique_id].append(node.unique_id) return _sort_values(forward_edges), _sort_values(backward_edges)", "= field( default_factory=flags.MP_CONTEXT.Lock, metadata={'serialize': lambda x: None, 'deserialize': lambda x:", "if not isinstance(other, MacroCandidate): return NotImplemented return self.locality == other.locality", "= Union[Manifest, MacroManifest] @dataclass @schema_version('manifest', 2) class WritableManifest(ArtifactMixin): nodes: Mapping[UniqueID,", "None # Called by RunTask.defer_to_manifest def merge_from_artifact( self, adapter, other:", "bool: \"\"\"Return True if the model matches the given name,", "if key not in src: raise CompilationException( 'Expected to find", "deepcopy use this. It returns a callable object used to", "backward_edges = build_node_edges(edge_members) self.child_map = forward_edges self.parent_map = backward_edges def", "ParsedDocumentation): if doc.name not in self.storage: self.storage[doc.name] = {} self.storage[doc.name][doc.package_name]", "}) def get_used_databases(self): return frozenset( x.database for x in chain(self.nodes.values(),", "nodes can't be overwritten! _check_duplicates(node, self.nodes) self.nodes[node.unique_id] = node def", "new_item.unique_id if unique_id not in dest: raise dbt.exceptions.RuntimeException( f'got an", "node is compiled, do not update the manifest and return", "macros named \"{macro.name}\" in the project \"{macro.package_name}\". To fix this", "= field( metadata=dict(description=( 'The exposures defined in the dbt project", "List[Any]): forward_edges: Dict[str, List[str]] = { n.unique_id: [] for n", "def has_file(self, source_file: SourceFile) -> bool: key = source_file.file_id if", "self._doc_lookup = DocLookup(self) @property def source_lookup(self) -> SourceLookup: if self._source_lookup", "3 class Specificity(enum.IntEnum): Default = 1 Adapter = 2 @dataclass", "str, package: Optional[str] ) -> Optional[ParsedMacro]: \"\"\"Find a macro in", "packages. - if there is a `generate_{component}_name` macro in the", "so we want to only build it once and avoid", "edge_members = list(chain( self.nodes.values(), self.macros.values(), )) forward_edges = build_macro_edges(edge_members) return", "filter: Optional[Callable[[MacroCandidate], bool]] = None if package is not None:", "MacroCandidate): return NotImplemented return self.locality == other.locality def __lt__(self, other:", "actually unique, so the Dict[PackageName, UniqueID] will # only ever", "'seeds', 'snapshots']: unique_id = self.ref_lookup.get_unique_id(patch.name, None) elif patch.yaml_key == 'analyses':", "an import cycle from dbt.adapters.factory import get_adapter_package_names candidates: CandidateList =", "for node in nodes: backward_edges[node.unique_id] = node.depends_on_nodes[:] for unique_id in", "return WritableManifest( nodes=self.nodes, sources=self.sources, macros=self.macros, docs=self.docs, exposures=self.exposures, selectors=self.selectors, metadata=self.metadata, disabled=self.disabled,", "exists and emit an error other_path = self.macros[macro.unique_id].original_file_path # subtract", "operator. \"\"\" if model.resource_type not in self.nodetypes: return False if", "path): self.writable_manifest().write(path) # Called in dbt.compilation.Linker.write_graph and # dbt.graph.queue.get and", "result :( msg = line_wrap_message( f'''\\ dbt found two macros", "Locality.Imported candidates: CandidateList = self._find_macros_by_name( name=f'generate_{component}_name', root_project_name=root_project_name, # filter out", "disabled: Optional[ManifestNode] = None candidates = _search_packages( current_project, node_package, target_model_package", "}, 'nodes': { k: v.to_dict(omit_none=False) for k, v in self.nodes.items()", "return self.__class__, args class MacroManifest(MacroMethods): def __init__(self, macros): self.macros =", "-> MaybeNonSource: node: Optional[ManifestNode] = None disabled: Optional[ManifestNode] = None", "NameSearcher = NameSearcher( search_name, package, [NodeType.Source] ) result = searcher.search(self.disabled)", "from ParseResult _disabled: MutableMapping[str, List[CompileResultNode]] = field(default_factory=dict) _doc_lookup: Optional[DocLookup] =", "in chain(self.nodes.values(), self.sources.values()) ) # This is used in dbt.task.rpc.sql_commands", "CompileResultNode): if node.unique_id in self._disabled: self._disabled[node.unique_id].append(node) else: self._disabled[node.unique_id] = [node]", "root_project_name: str, filter: Optional[Callable[[MacroCandidate], bool]] = None ) -> CandidateList:", "str, root_project_name: str, package: Optional[str] ) -> Optional[ParsedMacro]: \"\"\"Find a", ") -> None: # macros are fully namespaced unique_id =", "str, package: Optional[str], current_project: str, node_package: str, ) -> Optional[ParsedDocumentation]:", "be overwritten! _check_duplicates(node, self.nodes) self.nodes[node.unique_id] = node def add_node(self, source_file:", "@dataclass class ManifestStateCheck(dbtClassMixin): vars_hash: FileHash = field(default_factory=FileHash.empty) profile_hash: FileHash =", "candidate.locality != Locality.Imported candidates: CandidateList = self._find_macros_by_name( name=f'generate_{component}_name', root_project_name=root_project_name, #", "): merged.add(unique_id) self.nodes[unique_id] = node.replace(deferred=True) # log up to 5", "and ._include_in_cost def expect(self, unique_id: str) -> GraphMemberNode: if unique_id", "in selected and not adapter.get_relation( current.database, current.schema, current.identifier ) ):", "name: continue candidate = MacroCandidate( locality=_get_locality(macro, root_project_name, packages), macro=macro, )", "sort each value. This makes output deterministic, which helps for", "them to have a consistent view of the manifest. If", "None: self._doc_lookup = DocLookup(self) return self._doc_lookup def rebuild_doc_lookup(self): self._doc_lookup =", "ParsedMacro def __eq__(self, other: object) -> bool: if not isinstance(other,", "dataclasses import dataclass, field from itertools import chain, islice from", "for resource in all_resources: resource_type_plural = resource.resource_type.pluralize() if resource_type_plural not", "self.nodes.get(unique_id) if current and ( node.resource_type in refables and not", ") user_id: Optional[UUID] = field( default=None, metadata={ 'description': 'A unique", "dbt project and its dependencies' )) ) exposures: Mapping[UniqueID, ParsedExposure]", "{new_item.unique_id}' ) existing = dest[unique_id] if new_item.original_file_path != existing.original_file_path: raise", "new_node: NonSourceCompiledNode ) -> NonSourceCompiledNode: \"\"\"update the node with a", "{} self.storage[node.name][node.package_name] = node.unique_id def populate(self, manifest): for node in", "materialization_name: str, adapter_type: str ) -> Optional[ParsedMacro]: candidates: CandidateList =", "existing = dest[unique_id] if new_item.original_file_path != existing.original_file_path: raise dbt.exceptions.RuntimeException( f'cannot", "None: disabled = self.find_disabled_by_name( target_model_name, pkg ) if disabled is", "n in nodes} for node in nodes: backward_edges[node.unique_id] = node.depends_on_nodes[:]", "is None: return if self.user_id is None: self.user_id = tracking.active_user.id", "node_package: return [current_project, None] else: return [current_project, node_package, None] @dataclass", "in self.sources: return self.sources[unique_id] elif unique_id in self.exposures: return self.exposures[unique_id]", "None for pkg in candidates: source = self.source_lookup.find(key, pkg, self)", "The only time we should want to lock is when", "add_macro(self, source_file: SourceFile, macro: ParsedMacro): if macro.unique_id in self.macros: #", "return Locality.Imported class Searchable(Protocol): resource_type: NodeType package_name: str @property def", "# model, seed, snapshot _lookup_types: ClassVar[set] = set(NodeType.refable()) # refables", "HasUniqueID, UnpatchedSourceDefinition, ManifestNodes ) from dbt.contracts.graph.unparsed import SourcePatch from dbt.contracts.files", "DocLookup(dbtClassMixin): def __init__(self, manifest: 'Manifest'): self.storage: Dict[str, Dict[PackageName, UniqueID]] =", "self.files: return False my_checksum = self.files[key].checksum return my_checksum == source_file.checksum", "= self.ref_lookup.find(target_model_name, pkg, self) if node is not None and", "MacroCandidate): return NotImplemented if self.locality < other.locality: return True if", "not isinstance(other, MacroCandidate): return NotImplemented return self.locality == other.locality def", "resource_types=None): return frozenset({ (node.database, node.schema) for node in chain(self.nodes.values(), self.sources.values())", "state_check=_deepcopy(self.state_check), ) def build_parent_and_child_maps(self): edge_members = list(chain( self.nodes.values(), self.sources.values(), self.exposures.values(),", "from state (sample: {sample})' ) # Methods that were formerly", "_check_duplicates(node, self.nodes) self.nodes[node.unique_id] = node def add_node(self, source_file: AnySourceFile, node:", "field(default_factory=dict) macros: MutableMapping[str, ParsedMacro] = field(default_factory=dict) docs: MutableMapping[str, ParsedDocumentation] =", "node: CompileResultNode): if node.unique_id in self._disabled: self._disabled[node.unique_id].append(node) else: self._disabled[node.unique_id] =", "= field( default=None, metadata=dict(description='The type name of the adapter'), )", "new_node) def update_source(self, new_source: ParsedSourceDefinition): _update_into(self.sources, new_source) def build_flat_graph(self): \"\"\"This", "when compiling an ephemeral ancestor of a node at runtime,", "self._doc_lookup = DocLookup(self) return self._doc_lookup def rebuild_doc_lookup(self): self._doc_lookup = DocLookup(self)", "NotImplementedError('search_name not implemented') N = TypeVar('N', bound=Searchable) @dataclass class NameSearcher(Generic[N]):", "NonSourceCompiledNode ) -> NonSourceCompiledNode: \"\"\"update the node with a lock.", "= field( metadata=dict(description=( 'The selectors defined in selectors.yml' )) )", "self.add_node(node) def perform_lookup( self, unique_id: UniqueID, manifest ) -> ManifestNode:", "AbstractSet, ClassVar ) from typing_extensions import Protocol from uuid import", "Adapter = 2 @dataclass class MacroCandidate: locality: Locality macro: ParsedMacro", "= Specificity.Adapter full_name = dbt.utils.get_materialization_macro_name( materialization_name=materialization_name, adapter_type=adapter_type, with_prefix=False, ) return", "k, v in self.selectors.items()}, metadata=self.metadata, disabled=[_deepcopy(n) for n in self.disabled],", "unique_id in node.depends_on.macros: if unique_id in forward_edges.keys(): forward_edges[unique_id].append(node.unique_id) return _sort_values(forward_edges)", "unique_id in node.depends_on_nodes: if unique_id in forward_edges.keys(): forward_edges[unique_id].append(node.unique_id) return _sort_values(forward_edges),", "disabled = self.find_disabled_by_name( target_model_name, pkg ) if disabled is not", "v in self.docs.items()}, exposures={k: _deepcopy(v) for k, v in self.exposures.items()},", "elif macro.package_name in internal_packages: return Locality.Core else: return Locality.Imported class", "and dbt.parser.manifest._process_source_for_node def resolve_source( self, target_source_name: str, target_table_name: str, current_project:", "resource_types }) def get_used_databases(self): return frozenset( x.database for x in", "a consistent view of the manifest. If the existing node", "nodes to their dependencies', )) child_map: Optional[NodeEdgeMap] = field(metadata=dict( description='A", "HasUniqueID] ): if value.unique_id in src: raise_duplicate_resource_name(value, src[value.unique_id]) K_T =", "str(WritableManifest.dbt_schema_version) ) project_id: Optional[str] = field( default=None, metadata={ 'description': 'A", "If package is None, any package is allowed. nodetypes should", "Imported = 2 Root = 3 class Specificity(enum.IntEnum): Default =", "def __post_deserialize__(cls, obj): obj._lock = flags.MP_CONTEXT.Lock() return obj def sync_update_node(", "cast(NonSourceCompiledNode, existing) _update_into(self.nodes, new_node) return new_node def update_exposure(self, new_exposure: ParsedExposure):", "self.user_id = tracking.active_user.id if self.send_anonymous_usage_stats is None: self.send_anonymous_usage_stats = (", "# something terrible has happened raise dbt.exceptions.InternalException( 'Expected node {}", "= None ) -> Optional[ManifestNode]: searcher: NameSearcher = NameSearcher( name,", "specificity = Specificity.Default else: specificity = Specificity.Adapter full_name = dbt.utils.get_materialization_macro_name(", "{source_file.path.original_file_path}' ) if unique_id is None: # This will usually", "for k, v in self.selectors.items()}, metadata=self.metadata, disabled=[_deepcopy(n) for n in", "str ) -> Optional[ParsedMacro]: \"\"\" The `generate_X_name` macros are similar", "self.macro.package_name, other.macro.package_name) ) return equal def __lt__(self, other: object) ->", "NotImplemented equal = ( self.specificity == other.specificity and self.locality ==", "MacroCandidate( locality=_get_locality(macro, root_project_name, packages), macro=macro, ) if filter is None", "macro in the root project, return it - return the", "ManifestNode] = field(default_factory=dict) sources: MutableMapping[str, ParsedSourceDefinition] = field(default_factory=dict) macros: MutableMapping[str,", ") for pkg in candidates: result = self.doc_lookup.find(name, pkg, self)", "a dictionary, sort each value. This makes output deterministic, which", "emit an error other_path = self.macros[macro.unique_id].original_file_path # subtract 2 for", "chain(self.exposures.values(), self.nodes.values(), self.sources.values()) for resource in all_resources: resource_type_plural = resource.resource_type.pluralize()", "_check_duplicates(exposure, self.exposures) self.exposures[exposure.unique_id] = exposure source_file.exposures.append(exposure.unique_id) def add_disabled_nofile(self, node: CompileResultNode):", "in the dbt project and its dependencies' )) ) docs:", "adapter_type=adapter_type, with_prefix=False, ) return CandidateList( MaterializationCandidate.from_macro(m, specificity) for m in", "with the new node and return that. If the existing", "adapter_type is None: specificity = Specificity.Default else: specificity = Specificity.Adapter", "and type. If package is None, any package is allowed.", "dest: raise dbt.exceptions.RuntimeException( f'got an update_{new_item.resource_type} call with an '", "-> Optional[ParsedDocumentation]: \"\"\"Resolve the given documentation. This follows the same", ") def find_materialization_macro_by_name( self, project_name: str, materialization_name: str, adapter_type: str", "f'which was not found' ) return if macro.patch_path: package_name, existing_file_path", "= flags.MP_CONTEXT.Lock() return obj def sync_update_node( self, new_node: NonSourceCompiledNode )", "lambda x: None} ) _ref_lookup: Optional[RefableLookup] = field( default=None, metadata={'serialize':", "self.send_anonymous_usage_stats is None: self.send_anonymous_usage_stats = ( not tracking.active_user.do_not_track ) @classmethod", "two materializations with the name {} (packages {} and '", "def __reduce_ex__(self, protocol): args = ( self.nodes, self.sources, self.macros, self.docs,", "Mapping, MutableMapping, Any, Set, Tuple, TypeVar, Callable, Iterable, Generic, cast,", "name of the adapter'), ) def __post_init__(self): if tracking.active_user is", "candidates: CandidateList = CandidateList(chain.from_iterable( self._materialization_candidates_for( project_name=project_name, materialization_name=materialization_name, adapter_type=atype, ) for", "not None: def filter(candidate: MacroCandidate) -> bool: return package ==", "-> bool: return package == candidate.macro.package_name candidates: CandidateList = self._find_macros_by_name(", ") _parsing_info: ParsingInfo = field( default_factory=ParsingInfo, metadata={'serialize': lambda x: None,", "unique_id: UniqueID, manifest: 'Manifest' ) -> ParsedSourceDefinition: if unique_id not", "_sort_values(backward_edges) # Build a map of children of macros def", "macros def build_macro_edges(nodes: List[Any]): forward_edges: Dict[str, List[str]] = { n.unique_id:", "node: ManifestNodes): # nodes can't be overwritten! _check_duplicates(node, self.nodes) self.nodes[node.unique_id]", "= set(NodeType.Analysis) def _search_packages( current_project: str, node_package: str, target_package: Optional[str]", "specificity: Specificity ) -> 'MaterializationCandidate': return cls( locality=candidate.locality, macro=candidate.macro, specificity=specificity,", "NameSearcher( name, package, NodeType.refable() ) result = searcher.search(self.disabled) return result", "documentation. This follows the same algorithm as resolve_ref except the", "to avoid the lock! # pickle and deepcopy use this.", "def find_disabled_source_by_name( self, source_name: str, table_name: str, package: Optional[str] =", "# source patches must be unique key = (patch.overrides, patch.name)", "_disabled: MutableMapping[str, List[CompileResultNode]] = field(default_factory=dict) _doc_lookup: Optional[DocLookup] = field( default=None,", "None D = TypeVar('D') @dataclass class Disabled(Generic[D]): target: D MaybeDocumentation", "in storage: return None pkg_dct: Mapping[PackageName, UniqueID] = storage[key] if", "return False M = TypeVar('M', bound=MacroCandidate) class CandidateList(List[M]): def last(self)", "Generic, cast, AbstractSet, ClassVar ) from typing_extensions import Protocol from", "def last(self) -> Optional[ParsedMacro]: if not self: return None self.sort()", "-> bool: key = source_file.file_id if key is None: return", "nodes: MutableMapping[str, ManifestNode] = field(default_factory=dict) sources: MutableMapping[str, ParsedSourceDefinition] = field(default_factory=dict)", "pre-populate the forward edge dict for simplicity forward_edges: Dict[str, List[str]]", "Lock from typing import ( Dict, List, Optional, Union, Mapping,", "for unique_id, node in other.nodes.items(): current = self.nodes.get(unique_id) if current", "None: assert isinstance(result, ParsedSourceDefinition) return result def _materialization_candidates_for( self, project_name:", "eats newlines, so if you want newlines, # this is", "= {} self.storage[doc.name][doc.package_name] = doc.unique_id def populate(self, manifest): for doc", "dbt.contracts.graph.parsed import ( ParsedMacro, ParsedDocumentation, ParsedNodePatch, ParsedMacroPatch, ParsedSourceDefinition, ParsedExposure, HasUniqueID,", "we want to only build it once and avoid any", "you want newlines, # this is the result :( msg", "one of the following macros: - {macro.original_file_path} - {other_path} ''',", "SchemaSourceFile, source: UnpatchedSourceDefinition ): # sources can't be overwritten! _check_duplicates(source,", "' 'on cached file information: {}!' .format(key, name, old_file) )", "import UUID from dbt.contracts.graph.compiled import ( CompileResultNode, ManifestNode, NonSourceCompiledNode, GraphMemberNode", "[] for n in nodes} for node in nodes: backward_edges[node.unique_id]", "lambda x: None, 'deserialize': lambda x: None} ) def __pre_serialize__(self):", "keys are not supported, so ensure it's empty self.source_patches =", "don't call this until you're done with building your manifest!", "str, table_name: str, package: Optional[str] = None ) -> Optional[ParsedSourceDefinition]:", "5)) logger.debug( f'Merged {len(merged)} items from state (sample: {sample})' )", "None) elif patch.yaml_key == 'analyses': unique_id = self.analysis_lookup.get_unique_id(patch.name, None) else:", "= TypeVar('D') @dataclass class Disabled(Generic[D]): target: D MaybeDocumentation = Optional[ParsedDocumentation]", "dest[unique_id] if new_item.original_file_path != existing.original_file_path: raise dbt.exceptions.RuntimeException( f'cannot update a", "self._analysis_lookup # Called by dbt.parser.manifest._resolve_refs_for_exposure # and dbt.parser.manifest._process_refs_for_node def resolve_ref(", "in nodes: backward_edges[node.unique_id] = node.depends_on_nodes[:] for unique_id in node.depends_on_nodes: if", "NodeTypes that implements the 'in' operator. \"\"\" if model.resource_type not", "def perform_lookup( self, unique_id: UniqueID, manifest ) -> ManifestNode: if", "should be a container of NodeTypes that implements the 'in'", "haystack: if self._matches(model): return model return None D = TypeVar('D')", "PathSet from dbt.logger import GLOBAL_LOGGER as logger from dbt.node_types import", "or n.depends_on.macros } for node in nodes: for unique_id in", "RunTask.defer_to_manifest def merge_from_artifact( self, adapter, other: 'WritableManifest', selected: AbstractSet[UniqueID], )", "a tuple of arguments # for the object, i.e. the", "name. \"\"\" # avoid an import cycle from dbt.adapters.factory import", "Called by DocsRuntimeContext.doc def resolve_doc( self, name: str, package: Optional[str],", "dbt.logger import GLOBAL_LOGGER as logger from dbt.node_types import NodeType from", "model: N) -> bool: \"\"\"Return True if the model matches", "= _search_packages( current_project, node_package, target_model_package ) for pkg in candidates:", "patches can't be overwritten node = self.nodes.get(unique_id) if node: if", "K_T = TypeVar('K_T') V_T = TypeVar('V_T') def _expect_value( key: K_T,", "from dbt.exceptions import ( CompilationException, raise_duplicate_resource_name, raise_compiler_error, warn_or_error, raise_duplicate_patch_name, raise_duplicate_macro_patch_name,", "on the given list of ParsedNodes and return them as", "Mapping[UniqueID, ParsedExposure] = field( metadata=dict(description=( 'The exposures defined in the", "manifest: 'Manifest' ) -> ParsedSourceDefinition: if unique_id not in manifest.sources:", ") adapter_type: Optional[str] = field( default=None, metadata=dict(description='The type name of", "dbt.ui import line_wrap_message from dbt import flags from dbt import", "iterable by name.\"\"\" for model in haystack: if self._matches(model): return", "is not None and node.config.enabled: return node # it's possible", "manifest.sources.values(): if hasattr(source, 'source_name'): self.add_source(source) def perform_lookup( self, unique_id: UniqueID,", "-> ParsedSourceDefinition: if unique_id not in manifest.sources: raise dbt.exceptions.InternalException( f'Source", "not in manifest.nodes: raise dbt.exceptions.InternalException( f'Node {unique_id} found in cache", "compiling the same ephemeral dependency, and we want them to", "= exposure source_file.exposures.append(exposure.unique_id) def add_disabled_nofile(self, node: CompileResultNode): if node.unique_id in", "ManifestMetadata(BaseArtifactMetadata): \"\"\"Metadata for the manifest.\"\"\" dbt_schema_version: str = field( default_factory=lambda:", "selectors.yml' )) ) disabled: Optional[List[CompileResultNode]] = field(metadata=dict( description='A list of", "List[CompileResultNode]] = field(default_factory=dict) _doc_lookup: Optional[DocLookup] = field( default=None, metadata={'serialize': lambda", "for the \"Compilation Error\" indent # note that the line", "name, package, and type. If package is None, any package", "Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin): \"\"\"The manifest for the full graph, after", "but not found in manifest' ) return manifest.nodes[unique_id] class AnalysisLookup(RefableLookup):", "an entry in the given iterable by name.\"\"\" for model", "disabled if disabled is None: disabled = self.find_disabled_by_name( target_model_name, pkg", "package: Optional[PackageName], manifest: 'Manifest'): unique_id = self.get_unique_id(key, package) if unique_id", "first - then imported macros - then macros defined in", "ParsingInfo = field( default_factory=ParsingInfo, metadata={'serialize': lambda x: None, 'deserialize': lambda", "{sample})' ) # Methods that were formerly in ParseResult def", "\"{macro.package_name}\". To fix this error, rename or remove one of", "x in chain(self.nodes.values(), self.sources.values()) ) # This is used in", "manifest: 'Manifest'): self.storage: Dict[str, Dict[PackageName, UniqueID]] = {} self.populate(manifest) def", "-> bool: \"\"\"Return True if the model matches the given", "bool: if not isinstance(other, MacroCandidate): return NotImplemented return self.locality ==", "to overwrite whatever is at dest[new_item.unique_id] with new_itme. There must", "keyword. If an attribute # is added it must all", "self._find_macros_by_name( name=name, root_project_name=root_project_name, filter=filter, ) return candidates.last() def find_generate_macro_by_name( self,", "str, adapter_type: str ) -> Optional[ParsedMacro]: candidates: CandidateList = CandidateList(chain.from_iterable(", "Searchable(Protocol): resource_type: NodeType package_name: str @property def search_name(self) -> str:", "True if self.locality > other.locality: return False return False @dataclass", "last(self) -> Optional[ParsedMacro]: if not self: return None self.sort() return", "return [current_project, None] else: return [current_project, node_package, None] @dataclass class", "from dbt.contracts.graph.compiled import ( CompileResultNode, ManifestNode, NonSourceCompiledNode, GraphMemberNode ) from", "and its dependencies' )) ) exposures: Mapping[UniqueID, ParsedExposure] = field(", "set() for unique_id, node in other.nodes.items(): current = self.nodes.get(unique_id) if", "dbt.compilation.Linker.write_graph and # dbt.graph.queue.get and ._include_in_cost def expect(self, unique_id: str)", "def find_unique_id_for_package(storage, key, package: Optional[PackageName]): if key not in storage:", "key = (source.source_name, source.name) if key not in self.storage: self.storage[key]", "the MacroManifest class MacroMethods: # Just to make mypy happy.", "'deserialize': lambda x: None} ) _parsing_info: ParsingInfo = field( default_factory=ParsingInfo,", "in dbt.compilation.Linker.write_graph and # dbt.graph.queue.get and ._include_in_cost def expect(self, unique_id:", "else: specificity = Specificity.Adapter full_name = dbt.utils.get_materialization_macro_name( materialization_name=materialization_name, adapter_type=adapter_type, with_prefix=False,", "List[str]] = {} # pre-populate the forward edge dict for", "= CandidateList() packages = set(get_adapter_package_names(self.metadata.adapter_type)) for unique_id, macro in self.macros.items():", "= field(default_factory=dict) metadata: ManifestMetadata = field(default_factory=ManifestMetadata) flat_graph: Dict[str, Any] =", "def doc_lookup(self) -> DocLookup: if self._doc_lookup is None: self._doc_lookup =", "self.specificity < other.specificity: return True if self.specificity > other.specificity: return", "The `generate_X_name` macros are similar to regular ones, but ignore", "Provide support for copy.deepcopy() - we just need to avoid", "-> bool: if not isinstance(other, MacroCandidate): return NotImplemented if self.locality", "return NotImplemented equal = ( self.specificity == other.specificity and self.locality", "search_name, package, [NodeType.Source] ) result = searcher.search(self.disabled) if result is", "remove one of the following macros: - {macro.original_file_path} - {other_path}", "( not tracking.active_user.do_not_track ) @classmethod def default(cls): return cls( dbt_schema_version=str(WritableManifest.dbt_schema_version),", "type. If package is None, any package is allowed. nodetypes", "\"\"\"The manifest for the full graph, after parsing and during", "dependencies' )) ) docs: Mapping[UniqueID, ParsedDocumentation] = field( metadata=dict(description=( 'The", "K_T, src: Mapping[K_T, V_T], old_file: SourceFile, name: str ) ->", "materialization_name: str, adapter_type: Optional[str], ) -> CandidateList: if adapter_type is", "= TypeVar('V_T') def _expect_value( key: K_T, src: Mapping[K_T, V_T], old_file:", "If an attribute # is added it must all be", "description='A list of the disabled nodes in the target' ))", "not supported, so ensure it's empty self.source_patches = {} return", "MutableMapping[str, Any] = field(default_factory=dict) disabled: List[CompileResultNode] = field(default_factory=list) files: MutableMapping[str,", ") sources: Mapping[UniqueID, ParsedSourceDefinition] = field( metadata=dict(description=( 'The sources defined", "their dependencies', )) child_map: Optional[NodeEdgeMap] = field(metadata=dict( description='A mapping from", "macro.package_name in internal_packages: return Locality.Core else: return Locality.Imported class Searchable(Protocol):", "node.depends_on_nodes[:] for unique_id in node.depends_on_nodes: if unique_id in forward_edges.keys(): forward_edges[unique_id].append(node.unique_id)", "for k, v in self.exposures.items()}, selectors={k: _deepcopy(v) for k, v", "docs=self.docs, exposures=self.exposures, selectors=self.selectors, metadata=self.metadata, disabled=self.disabled, child_map=self.child_map, parent_map=self.parent_map, ) def write(self,", "bool: if not isinstance(other, MacroCandidate): return NotImplemented if self.locality <", "not isinstance(other, MacroCandidate): return NotImplemented if self.locality < other.locality: return", "unique_id) node.patch(patch) def add_macro_patch( self, source_file: SchemaSourceFile, patch: ParsedMacroPatch, )", "self.locality == other.locality def __lt__(self, other: object) -> bool: if", "UniqueID, manifest ) -> ParsedDocumentation: if unique_id not in manifest.docs:", "]] MaybeNonSource = Optional[Union[ ManifestNode, Disabled[ManifestNode] ]] T = TypeVar('T',", "_search_packages(current_project, node_package) source: Optional[ParsedSourceDefinition] = None disabled: Optional[ParsedSourceDefinition] = None", "False return False @dataclass class MaterializationCandidate(MacroCandidate): specificity: Specificity @classmethod def", "__post_init__(self): if tracking.active_user is None: return if self.user_id is None:", "# filter out imported packages filter=filter, ) return candidates.last() def", "_sort_values(dct): \"\"\"Given a dictionary, sort each value. This makes output", "def find(self, key, package: Optional[PackageName], manifest: 'Manifest'): unique_id = self.get_unique_id(key,", "separate dictionaries, each mapping unique IDs to lists of edges.", "x: None, 'deserialize': lambda x: None} ) _ref_lookup: Optional[RefableLookup] =", "= {} # pre-populate the forward edge dict for simplicity", "it once and avoid any concurrency issues around it. Make", "def filter(candidate: MacroCandidate) -> bool: return candidate.locality != Locality.Imported candidates:", "ParsedSourceDefinition] = field( metadata=dict(description=( 'The sources defined in the dbt", "self._analysis_lookup = AnalysisLookup(self) return self._analysis_lookup # Called by dbt.parser.manifest._resolve_refs_for_exposure #", "consistency def __init__(self, manifest: 'Manifest'): self.storage: Dict[str, Dict[PackageName, UniqueID]] =", "get_unique_id(self, key, package: Optional[PackageName]): return find_unique_id_for_package(self.storage, key, package) def find(self,", "determine priority: - locally defined macros come first - then", "RefableLookup(self) return self._ref_lookup def rebuild_ref_lookup(self): self._ref_lookup = RefableLookup(self) @property def", "log up to 5 items sample = list(islice(merged, 5)) logger.debug(", "field( metadata=dict(description=( 'The selectors defined in selectors.yml' )) ) disabled:", "n.unique_id: [] for n in nodes if n.unique_id.startswith('macro') or n.depends_on.macros", "{} all_resources = chain(self.exposures.values(), self.nodes.values(), self.sources.values()) for resource in all_resources:", "for k, v in self.files.items()}, state_check=_deepcopy(self.state_check), ) def build_parent_and_child_maps(self): edge_members", "mypy happy. There must be a better way. def __init__(self):", "at runtime, because multiple threads could be just-in-time compiling the", "if self._source_lookup is None: self._source_lookup = SourceLookup(self) return self._source_lookup def", ") return self.__class__, args class MacroManifest(MacroMethods): def __init__(self, macros): self.macros", "if macro.unique_id in self.macros: # detect that the macro exists", "return node # it's possible that the node is disabled", "self.specificity == other.specificity and self.locality == other.locality ) if equal:", "{new_item.resource_type} to have a new file ' f'path!' ) dest[unique_id]", "return manifest.sources[unique_id] class RefableLookup(dbtClassMixin): # model, seed, snapshot _lookup_types: ClassVar[set]", "IDs and a writable manifest, update this manifest by replacing", "patch in ' f'file {source_file.path.original_file_path}' ) if unique_id is None:", "not None: return Disabled(disabled) return None # Called by DocsRuntimeContext.doc", "from dbt.node_types import NodeType from dbt.ui import line_wrap_message from dbt", "metadata={ 'description': 'A unique identifier for the user', }, )", "the root project, return it - return the `generate_{component}_name` macro", "str ) -> Optional[ParsedMacro]: candidates: CandidateList = CandidateList(chain.from_iterable( self._materialization_candidates_for( project_name=project_name,", "' f'path!' ) dest[unique_id] = new_item # This contains macro", "= set(NodeType.refable()) # refables are actually unique, so the Dict[PackageName,", ") -> Optional[ManifestNode]: searcher: NameSearcher = NameSearcher( name, package, NodeType.refable()", "its name and package name, or None for any package.", "class MaterializationCandidate(MacroCandidate): specificity: Specificity @classmethod def from_macro( cls, candidate: MacroCandidate,", "the name {} (packages {} and ' '{}). dbt cannot", "refables are actually unique, so the Dict[PackageName, UniqueID] will #", "for the project', }, ) user_id: Optional[UUID] = field( default=None,", "because multiple threads could be just-in-time compiling the same ephemeral", "x: None} ) _parsing_info: ParsingInfo = field( default_factory=ParsingInfo, metadata={'serialize': lambda", "k, v in self.nodes.items()}, sources={k: _deepcopy(v) for k, v in", "the arguments must match the order of the attributes #", "# Moved from the ParseResult object source_patches: MutableMapping[SourceKey, SourcePatch] =", "can't be overwritten! _check_duplicates(source, self.sources) self.sources[source.unique_id] = source # type:", "list of the disabled nodes in the target' )) parent_map:", "pkg, self) if node is not None and node.config.enabled: return", "of arguments # for the object, i.e. the Manifest. #", "'A unique identifier for the project', }, ) user_id: Optional[UUID]", "dbt.parser.manifest._resolve_sources_for_exposure # and dbt.parser.manifest._process_source_for_node def resolve_source( self, target_source_name: str, target_table_name:", "from dbt.contracts.graph.unparsed import SourcePatch from dbt.contracts.files import SourceFile, SchemaSourceFile, FileHash,", "False M = TypeVar('M', bound=MacroCandidate) class CandidateList(List[M]): def last(self) ->", "= (target_source_name, target_table_name) candidates = _search_packages(current_project, node_package) source: Optional[ParsedSourceDefinition] =", "default=None, metadata={ 'description': 'A unique identifier for the project', },", "or node.resource_type in resource_types }) def get_used_databases(self): return frozenset( x.database", "Iterable[N]) -> Optional[N]: \"\"\"Find an entry in the given iterable", "name is used to determine priority: - locally defined macros", "self, source_file: SchemaSourceFile, patch: SourcePatch, ) -> None: # source", "a map of children of macros def build_macro_edges(nodes: List[Any]): forward_edges:", "uuid import UUID from dbt.contracts.graph.compiled import ( CompileResultNode, ManifestNode, NonSourceCompiledNode,", "None disabled: Optional[ParsedSourceDefinition] = None for pkg in candidates: source", "a lock. The only time we should want to lock", "self) if source is not None and source.config.enabled: return source", "source_file.add_test(node.unique_id, test_from) else: source_file.nodes.append(node.unique_id) def add_doc(self, source_file: SourceFile, doc: ParsedDocumentation):", "has happened raise dbt.exceptions.InternalException( 'Expected node {} not found in", "self.locality == other.locality ) if equal: raise_compiler_error( 'Found two materializations", "candidates = _search_packages( current_project, node_package, package ) for pkg in", "bool: return package == candidate.macro.package_name candidates: CandidateList = self._find_macros_by_name( name=name,", "None: self.user_id = tracking.active_user.id if self.send_anonymous_usage_stats is None: self.send_anonymous_usage_stats =", "cached \"result.{}\" based ' 'on cached file information: {}!' .format(key,", "[NodeType.Source] ) result = searcher.search(self.disabled) if result is not None:", "self, name: str, root_project_name: str, package: Optional[str] ) -> Optional[ParsedMacro]:", "helps for tests. \"\"\" return {k: sorted(v) for k, v", "== source_file.checksum def add_source( self, source_file: SchemaSourceFile, source: UnpatchedSourceDefinition ):", "ParsedMacro] = field(default_factory=dict) docs: MutableMapping[str, ParsedDocumentation] = field(default_factory=dict) exposures: MutableMapping[str,", "str, filter: Optional[Callable[[MacroCandidate], bool]] = None ) -> CandidateList: \"\"\"Find", "and self.locality == other.locality ) if equal: raise_compiler_error( 'Found two", "= resource.resource_type.pluralize() if resource_type_plural not in resource_fqns: resource_fqns[resource_type_plural] = set()", "= field( metadata=dict(description=( 'The sources defined in the dbt project", "can't be overwritten! _check_duplicates(node, self.nodes) self.nodes[node.unique_id] = node def add_node(self,", "contains macro methods that are in both the Manifest #", "self.docs) self.docs[doc.unique_id] = doc source_file.docs.append(doc.unique_id) # end of methods formerly", "in (adapter_type, None) )) return candidates.last() def get_resource_fqns(self) -> Mapping[str,", "Specificity(enum.IntEnum): Default = 1 Adapter = 2 @dataclass class MacroCandidate:", "return new_node def update_exposure(self, new_exposure: ParsedExposure): _update_into(self.exposures, new_exposure) def update_node(self,", "protocol): args = ( self.nodes, self.sources, self.macros, self.docs, self.exposures, self.selectors,", "have the same original file path. \"\"\" unique_id = new_item.unique_id", "if self.locality < other.locality: return True if self.locality > other.locality:", ") # Methods that were formerly in ParseResult def add_macro(self,", "to only build it once and avoid any concurrency issues", "source_file.sources.append(source.unique_id) def add_node_nofile(self, node: ManifestNodes): # nodes can't be overwritten!", "target_model_package: Optional[str], current_project: str, node_package: str, ) -> MaybeNonSource: node:", "-> ManifestNode: if unique_id not in manifest.nodes: raise dbt.exceptions.InternalException( f'Node", "merged = set() for unique_id, node in other.nodes.items(): current =", "To fix this error, rename or remove one of the", "version of the object and a tuple of arguments #", "Found documentation for macro \"{patch.name}\" ' f'which was not found'", "manifest: 'Manifest'): unique_id = self.get_unique_id(key, package) if unique_id is not", "identifier for the project', }, ) user_id: Optional[UUID] = field(", "ManifestMetadata = field(default_factory=ManifestMetadata) flat_graph: Dict[str, Any] = field(default_factory=dict) state_check: ManifestStateCheck", "): # sources can't be overwritten! _check_duplicates(source, self.sources) self.sources[source.unique_id] =", "order of the arguments must match the order of the", ") from dbt.helper_types import PathSet from dbt.logger import GLOBAL_LOGGER as", "nodes} for node in nodes: backward_edges[node.unique_id] = node.depends_on_nodes[:] for unique_id", "the __reduce_ex__ method in the # args tuple in the", "ParsedDocumentation] = field(default_factory=dict) exposures: MutableMapping[str, ParsedExposure] = field(default_factory=dict) selectors: MutableMapping[str,", "def add_macro(self, source_file: SourceFile, macro: ParsedMacro): if macro.unique_id in self.macros:", "import ( Dict, List, Optional, Union, Mapping, MutableMapping, Any, Set,", "its dependencies' )) ) sources: Mapping[UniqueID, ParsedSourceDefinition] = field( metadata=dict(description=(", "metadata={'serialize': lambda x: None, 'deserialize': lambda x: None} ) def", "key, package: Optional[PackageName]): return find_unique_id_for_package(self.storage, key, package) def find(self, key,", "of the adapter'), ) def __post_init__(self): if tracking.active_user is None:", "frozenset({ (node.database, node.schema) for node in chain(self.nodes.values(), self.sources.values()) if not", "project and its dependencies' )) ) sources: Mapping[UniqueID, ParsedSourceDefinition] =", "SchemaSourceFile, patch: ParsedNodePatch, ) -> None: if patch.yaml_key in ['models',", "existing value to overwrite, and they two nodes must have", "flags.MP_CONTEXT.Lock() return obj def sync_update_node( self, new_node: NonSourceCompiledNode ) ->", "Optional[NodeEdgeMap] = field(metadata=dict( description='A mapping from parent nodes to their", "unique_id: UniqueID, manifest ) -> ParsedDocumentation: if unique_id not in", "= patch source_file.source_patches.append(key) def get_used_schemas(self, resource_types=None): return frozenset({ (node.database, node.schema)", "ClassVar[set] = set(NodeType.Analysis) def _search_packages( current_project: str, node_package: str, target_package:", "node and return that. If the existing node is compiled,", "Mapping[K_T, V_T], old_file: SourceFile, name: str ) -> V_T: if", "package is None, any package is allowed. nodetypes should be", "Optional[ParsedMacro]: \"\"\"Find a macro in the graph by its name", "x: None, 'deserialize': lambda x: None} ) _analysis_lookup: Optional[AnalysisLookup] =", "not None: return result return None # Called by RunTask.defer_to_manifest", "ParsedNodePatch, ParsedMacroPatch, ParsedSourceDefinition, ParsedExposure, HasUniqueID, UnpatchedSourceDefinition, ManifestNodes ) from dbt.contracts.graph.unparsed", "materializations with the name {} (packages {} and ' '{}).", "node.resource_type in self._lookup_types: if node.name not in self.storage: self.storage[node.name] =", "in dct.items()} def build_node_edges(nodes: List[ManifestNode]): \"\"\"Build the forward and backward", "field(default_factory=dict) disabled: List[CompileResultNode] = field(default_factory=list) files: MutableMapping[str, AnySourceFile] = field(default_factory=dict)", "existing) _update_into(self.nodes, new_node) return new_node def update_exposure(self, new_exposure: ParsedExposure): _update_into(self.exposures,", "dbt.contracts.files import SourceFile, SchemaSourceFile, FileHash, AnySourceFile from dbt.contracts.util import (", "build it once and avoid any concurrency issues around it.", ") # This is used in dbt.task.rpc.sql_commands 'add_new_refs' def deepcopy(self):", "manifest, update this manifest by replacing any unselected nodes with", "context property # in the ProviderContext class. self.flat_graph = {}", "field(metadata=dict( description='A mapping from child nodes to their dependencies', ))", "doc.name not in self.storage: self.storage[doc.name] = {} self.storage[doc.name][doc.package_name] = doc.unique_id", "storage: return None pkg_dct: Mapping[PackageName, UniqueID] = storage[key] if package", "exposure source_file.exposures.append(exposure.unique_id) def add_disabled_nofile(self, node: CompileResultNode): if node.unique_id in self._disabled:", "other.nodes.items(): current = self.nodes.get(unique_id) if current and ( node.resource_type in", "def __init__(self, manifest: 'Manifest'): self.storage: Dict[str, Dict[PackageName, UniqueID]] = {}", "we just need to avoid the lock! # pickle and", "name.\"\"\" for model in haystack: if self._matches(model): return model return", "of NodeTypes that implements the 'in' operator. \"\"\" if model.resource_type", "in manifest'.format(unique_id) ) @property def doc_lookup(self) -> DocLookup: if self._doc_lookup", "> other.locality: return False return False @dataclass class MaterializationCandidate(MacroCandidate): specificity:", "building your manifest! \"\"\" self.flat_graph = { 'exposures': { k:", "__init__(self, manifest: 'Manifest'): self.storage: Dict[str, Dict[PackageName, UniqueID]] = {} self.populate(manifest)", "return manifest.docs[unique_id] class SourceLookup(dbtClassMixin): def __init__(self, manifest: 'Manifest'): self.storage: Dict[Tuple[str,", "two macros named \"{macro.name}\" in the project \"{macro.package_name}\". To fix", "List, Optional, Union, Mapping, MutableMapping, Any, Set, Tuple, TypeVar, Callable,", "if key not in self.files: return False my_checksum = self.files[key].checksum", "logger from dbt.node_types import NodeType from dbt.ui import line_wrap_message from", "-> bool: if not isinstance(other, MaterializationCandidate): return NotImplemented equal =", "in cache but not found in manifest' ) return manifest.docs[unique_id]", "\"\"\"Find an entry in the given iterable by name.\"\"\" for", "package, [NodeType.Source] ) result = searcher.search(self.disabled) if result is not", "object) -> bool: if not isinstance(other, MacroCandidate): return NotImplemented if", "None else: return next(iter(pkg_dct.values())) elif package in pkg_dct: return pkg_dct[package]", "of the manifest. If the existing node is not compiled,", "# already compiled -> must be a NonSourceCompiledNode return cast(NonSourceCompiledNode,", "v in self.sources.items() } } def find_disabled_by_name( self, name: str,", "ManifestNode): if node.resource_type in self._lookup_types: if node.name not in self.storage:", "return False my_checksum = self.files[key].checksum return my_checksum == source_file.checksum def", "from dbt.logger import GLOBAL_LOGGER as logger from dbt.node_types import NodeType", "_check_duplicates(doc, self.docs) self.docs[doc.unique_id] = doc source_file.docs.append(doc.unique_id) # end of methods", "source patches must be unique key = (patch.overrides, patch.name) if", "only time we should want to lock is when compiling", "namespaced unique_id = f'macro.{patch.package_name}.{patch.name}' macro = self.macros.get(unique_id) if not macro:", "self) if node is not None and node.config.enabled: return node", "FileHash, AnySourceFile from dbt.contracts.util import ( BaseArtifactMetadata, SourceKey, ArtifactMixin, schema_version", "# avoid an import cycle from dbt.adapters.factory import get_adapter_package_names candidates:", "unique, so the Dict[PackageName, UniqueID] will # only ever have", "_analysis_lookup: Optional[AnalysisLookup] = field( default=None, metadata={'serialize': lambda x: None, 'deserialize':", "in chain(self.nodes.values(), self.sources.values()) if not resource_types or node.resource_type in resource_types", "the Manifest. # The order of the arguments must match", "Called by dbt.parser.manifest._resolve_sources_for_exposure # and dbt.parser.manifest._process_source_for_node def resolve_source( self, target_source_name:", "return None def add_node(self, node: ManifestNode): if node.resource_type in self._lookup_types:", "get_resource_fqns(self) -> Mapping[str, PathSet]: resource_fqns: Dict[str, Set[Tuple[str, ...]]] = {}", "update_node(self, new_node: ManifestNode): _update_into(self.nodes, new_node) def update_source(self, new_source: ParsedSourceDefinition): _update_into(self.sources,", "-> DocLookup: if self._doc_lookup is None: self._doc_lookup = DocLookup(self) return", "return Disabled(disabled) return None # Called by dbt.parser.manifest._resolve_sources_for_exposure # and", "is None: self._source_lookup = SourceLookup(self) return self._source_lookup def rebuild_source_lookup(self): self._source_lookup", "= [node] def add_disabled(self, source_file: AnySourceFile, node: CompileResultNode, test_from=None): self.add_disabled_nofile(node)", "source: Optional[ParsedSourceDefinition] = None disabled: Optional[ParsedSourceDefinition] = None for pkg", "self._doc_lookup is None: self._doc_lookup = DocLookup(self) return self._doc_lookup def rebuild_doc_lookup(self):", "for n in nodes if n.unique_id.startswith('macro') or n.depends_on.macros } for", "None: specificity = Specificity.Default else: specificity = Specificity.Adapter full_name =", "!= Locality.Imported candidates: CandidateList = self._find_macros_by_name( name=f'generate_{component}_name', root_project_name=root_project_name, # filter", "packages = set(get_adapter_package_names(self.metadata.adapter_type)) for unique_id, macro in self.macros.items(): if macro.name", "= ( not tracking.active_user.do_not_track ) @classmethod def default(cls): return cls(", "self[-1].macro def _get_locality( macro: ParsedMacro, root_project_name: str, internal_packages: Set[str] )", "( CompileResultNode, ManifestNode, NonSourceCompiledNode, GraphMemberNode ) from dbt.contracts.graph.parsed import (", "nodes=self.nodes, sources=self.sources, macros=self.macros, docs=self.docs, exposures=self.exposures, selectors=self.selectors, metadata=self.metadata, disabled=self.disabled, child_map=self.child_map, parent_map=self.parent_map,", "in haystack: if self._matches(model): return model return None D =", "PackageName = str DocName = str RefName = str UniqueID", "terrible has happened raise dbt.exceptions.InternalException( 'Expected node {} not found", "( Dict, List, Optional, Union, Mapping, MutableMapping, Any, Set, Tuple,", "ManifestNode: if unique_id not in manifest.nodes: raise dbt.exceptions.InternalException( f'Node {unique_id}", "obj._lock = flags.MP_CONTEXT.Lock() return obj def sync_update_node( self, new_node: NonSourceCompiledNode", "_materialization_candidates_for( self, project_name: str, materialization_name: str, adapter_type: Optional[str], ) ->", "MutableMapping[str, AnySourceFile] = field(default_factory=dict) metadata: ManifestMetadata = field(default_factory=ManifestMetadata) flat_graph: Dict[str,", "Mapping[UniqueID, Any] = field( metadata=dict(description=( 'The selectors defined in selectors.yml'", "[current_project, node_package, None] @dataclass class ManifestMetadata(BaseArtifactMetadata): \"\"\"Metadata for the manifest.\"\"\"", "existing node is compiled, do not update the manifest and", "msg = line_wrap_message( f'''\\ dbt found two macros named \"{macro.name}\"", "usage statistics' )), ) adapter_type: Optional[str] = field( default=None, metadata=dict(description='The", "locality=_get_locality(macro, root_project_name, packages), macro=macro, ) if filter is None or", "is called by 'parse_patch' in the NodePatchParser def add_patch( self,", "sure you don't call this until you're done with building", "TypeVar, Callable, Iterable, Generic, cast, AbstractSet, ClassVar ) from typing_extensions", ") -> Optional[ParsedMacro]: candidates: CandidateList = CandidateList(chain.from_iterable( self._materialization_candidates_for( project_name=project_name, materialization_name=materialization_name,", "DocsRuntimeContext.doc def resolve_doc( self, name: str, package: Optional[str], current_project: str,", "= field(default_factory=ManifestMetadata) flat_graph: Dict[str, Any] = field(default_factory=dict) state_check: ManifestStateCheck =", "object used to # create the initial version of the", "not None and source.config.enabled: return source if disabled is None:", "a writable manifest, update this manifest by replacing any unselected", "in self._find_macros_by_name(full_name, project_name) ) def find_materialization_macro_by_name( self, project_name: str, materialization_name:", "PathSet]: resource_fqns: Dict[str, Set[Tuple[str, ...]]] = {} all_resources = chain(self.exposures.values(),", "None: return self.perform_lookup(unique_id, manifest) return None def add_doc(self, doc: ParsedDocumentation):", "manifest! \"\"\" self.flat_graph = { 'exposures': { k: v.to_dict(omit_none=False) for", "-> SourceLookup: if self._source_lookup is None: self._source_lookup = SourceLookup(self) return", "Optional[str], current_project: str, node_package: str, ) -> Optional[ParsedDocumentation]: \"\"\"Resolve the", "a `generate_{component}_name` macro in the root project, return it -", "newlines, # this is the result :( msg = line_wrap_message(", "doc.unique_id def populate(self, manifest): for doc in manifest.docs.values(): self.add_doc(doc) def", "not in manifest.docs: raise dbt.exceptions.InternalException( f'Doc {unique_id} found in cache", "__reduce_ex__ method in the # args tuple in the right", "forward_edges def writable_manifest(self): self.build_parent_and_child_maps() return WritableManifest( nodes=self.nodes, sources=self.sources, macros=self.macros, docs=self.docs,", "not in self.storage: self.storage[doc.name] = {} self.storage[doc.name][doc.package_name] = doc.unique_id def", "str, materialization_name: str, adapter_type: str ) -> Optional[ParsedMacro]: candidates: CandidateList", "an update_{new_item.resource_type} call with an ' f'unrecognized {new_item.resource_type}: {new_item.unique_id}' )", "Optional[N]: \"\"\"Find an entry in the given iterable by name.\"\"\"", "other.locality: return False return False @dataclass class MaterializationCandidate(MacroCandidate): specificity: Specificity", "ManifestNode, NonSourceCompiledNode, GraphMemberNode ) from dbt.contracts.graph.parsed import ( ParsedMacro, ParsedDocumentation,", "or self.package == model.package_name def search(self, haystack: Iterable[N]) -> Optional[N]:", "in self.macros: # detect that the macro exists and emit", "- locally defined macros come first - then imported macros", "self.nodes.get(unique_id) if node: if node.patch_path: package_name, existing_file_path = node.patch_path.split('://') raise_duplicate_patch_name(patch,", "= self.get_unique_id(key, package) if unique_id is not None: return self.perform_lookup(unique_id,", "DocName = str RefName = str UniqueID = str def", "node in nodes: backward_edges[node.unique_id] = node.depends_on_nodes[:] for unique_id in node.depends_on_nodes:", "metadata={'serialize': lambda x: None, 'deserialize': lambda x: None} ) _parsing_info:", "unique_id not in manifest.nodes: raise dbt.exceptions.InternalException( f'Node {unique_id} found in", "self.metadata, self.flat_graph, self.state_check, self.source_patches, self._disabled, self._doc_lookup, self._source_lookup, self._ref_lookup, ) return", "target_source_name, target_table_name, pkg ) if disabled is not None: return", "candidate = MacroCandidate( locality=_get_locality(macro, root_project_name, packages), macro=macro, ) if filter", ") @property def doc_lookup(self) -> DocLookup: if self._doc_lookup is None:", "import cycle from dbt.adapters.factory import get_adapter_package_names candidates: CandidateList = CandidateList()", "raise_duplicate_resource_name(value, src[value.unique_id]) K_T = TypeVar('K_T') V_T = TypeVar('V_T') def _expect_value(", "property # in the ProviderContext class. self.flat_graph = {} AnyManifest", "with_prefix=False, ) return CandidateList( MaterializationCandidate.from_macro(m, specificity) for m in self._find_macros_by_name(full_name,", "wrap eats newlines, so if you want newlines, # this", "rebuild_doc_lookup(self): self._doc_lookup = DocLookup(self) @property def source_lookup(self) -> SourceLookup: if", "return self._ref_lookup def rebuild_ref_lookup(self): self._ref_lookup = RefableLookup(self) @property def analysis_lookup(self)", "return False if self.name != model.search_name: return False return self.package", "@property def search_name(self) -> str: raise NotImplementedError('search_name not implemented') N", "from dbt.helper_types import PathSet from dbt.logger import GLOBAL_LOGGER as logger", "self.nodes[unique_id] elif unique_id in self.sources: return self.sources[unique_id] elif unique_id in", "output deterministic, which helps for tests. \"\"\" return {k: sorted(v)", "= field(metadata=dict( description='Metadata about the manifest', )) def _check_duplicates( value:", "Build a map of children of macros def build_macro_edges(nodes: List[Any]):", "manifest.sources[unique_id] class RefableLookup(dbtClassMixin): # model, seed, snapshot _lookup_types: ClassVar[set] =", "value. This makes output deterministic, which helps for tests. \"\"\"", "model return None D = TypeVar('D') @dataclass class Disabled(Generic[D]): target:", "= field(default_factory=dict) selectors: MutableMapping[str, Any] = field(default_factory=dict) disabled: List[CompileResultNode] =", "self.sources) self.sources[source.unique_id] = source # type: ignore source_file.sources.append(source.unique_id) def add_node_nofile(self,", "MutableMapping, Any, Set, Tuple, TypeVar, Callable, Iterable, Generic, cast, AbstractSet,", "full graph, after parsing and during compilation. \"\"\" # These", "user_id: Optional[UUID] = field( default=None, metadata={ 'description': 'A unique identifier", "self.add_source(source) def perform_lookup( self, unique_id: UniqueID, manifest: 'Manifest' ) ->", "are unnecessary as docs are always enabled. \"\"\" candidates =", "None) )) return candidates.last() def get_resource_fqns(self) -> Mapping[str, PathSet]: resource_fqns:", "it with the new node and return that. If the", "ParsedExposure] = field( metadata=dict(description=( 'The exposures defined in the dbt", "given name, package, and type. If package is None, any", "filter out imported packages filter=filter, ) return candidates.last() def _find_macros_by_name(", "name, or None for any package. The root project name", "and the MacroManifest class MacroMethods: # Just to make mypy", "DataClassMessagePackMixin, dbtClassMixin): \"\"\"The manifest for the full graph, after parsing", "return equal def __lt__(self, other: object) -> bool: if not", "if self.specificity > other.specificity: return False if self.locality < other.locality:", ") -> Optional[ParsedSourceDefinition]: search_name = f'{source_name}.{table_name}' searcher: NameSearcher = NameSearcher(", "source # type: ignore source_file.sources.append(source.unique_id) def add_node_nofile(self, node: ManifestNodes): #", "any package. The root project name is used to determine", "ParsedExposure): _check_duplicates(exposure, self.exposures) self.exposures[exposure.unique_id] = exposure source_file.exposures.append(exposure.unique_id) def add_disabled_nofile(self, node:", "must be unique key = (patch.overrides, patch.name) if key in", "It returns a callable object used to # create the", ") return manifest.docs[unique_id] class SourceLookup(dbtClassMixin): def __init__(self, manifest: 'Manifest'): self.storage:", "# end of methods formerly in ParseResult # Provide support", "is not None: return result return None # Called by", "== root_project_name: return Locality.Root elif macro.package_name in internal_packages: return Locality.Core", "self.flat_graph = { 'exposures': { k: v.to_dict(omit_none=False) for k, v", "is used in dbt.task.rpc.sql_commands 'add_new_refs' def deepcopy(self): return Manifest( nodes={k:", "must be an existing value to overwrite, and they two", "List[str]] = { n.unique_id: [] for n in nodes if", "manifest' ) return manifest.sources[unique_id] class RefableLookup(dbtClassMixin): # model, seed, snapshot", "project_name=project_name, materialization_name=materialization_name, adapter_type=atype, ) for atype in (adapter_type, None) ))", "node.patch(patch) def add_macro_patch( self, source_file: SchemaSourceFile, patch: ParsedMacroPatch, ) ->", "return NotImplemented if self.specificity < other.specificity: return True if self.specificity", "forward and backward edges on the given list of ParsedNodes", "return package == candidate.macro.package_name candidates: CandidateList = self._find_macros_by_name( name=name, root_project_name=root_project_name,", "deterministic, which helps for tests. \"\"\" return {k: sorted(v) for", "add_doc(self, doc: ParsedDocumentation): if doc.name not in self.storage: self.storage[doc.name] =", "filter=filter, ) return candidates.last() def _find_macros_by_name( self, name: str, root_project_name:", "right position. nodes: MutableMapping[str, ManifestNode] = field(default_factory=dict) sources: MutableMapping[str, ParsedSourceDefinition]", "None self.sort() return self[-1].macro def _get_locality( macro: ParsedMacro, root_project_name: str,", "Moved from the ParseResult object source_patches: MutableMapping[SourceKey, SourcePatch] = field(default_factory=dict)", "similar to regular ones, but ignore imported packages. - if", "return frozenset({ (node.database, node.schema) for node in chain(self.nodes.values(), self.sources.values()) if", "used to determine priority: - locally defined macros come first", "mapping unique IDs to lists of edges. \"\"\" backward_edges: Dict[str,", "False if key not in self.files: return False my_checksum =", "the attributes # in the Manifest class declaration, because they", "edge_members = list(chain( self.nodes.values(), self.sources.values(), self.exposures.values(), )) forward_edges, backward_edges =", "self.exposures.values(), )) forward_edges, backward_edges = build_node_edges(edge_members) self.child_map = forward_edges self.parent_map", "the object, i.e. the Manifest. # The order of the", "self.sources, self.macros, self.docs, self.exposures, self.selectors, self.disabled, self.files, self.metadata, self.flat_graph, self.state_check,", "src[value.unique_id]) K_T = TypeVar('K_T') V_T = TypeVar('V_T') def _expect_value( key:", "dbt.dataclass_schema import dbtClassMixin from dbt.exceptions import ( CompilationException, raise_duplicate_resource_name, raise_compiler_error,", "ManifestStateCheck(dbtClassMixin): vars_hash: FileHash = field(default_factory=FileHash.empty) profile_hash: FileHash = field(default_factory=FileHash.empty) project_hashes:", "test_from source_file.add_test(node.unique_id, test_from) else: source_file.nodes.append(node.unique_id) def add_exposure(self, source_file: SchemaSourceFile, exposure:", "MaterializationCandidate.from_macro(m, specificity) for m in self._find_macros_by_name(full_name, project_name) ) def find_materialization_macro_by_name(", "def _find_macros_by_name( self, name: str, root_project_name: str, filter: Optional[Callable[[MacroCandidate], bool]]", "with building your manifest! \"\"\" self.flat_graph = { 'exposures': {", "node: ManifestNodes, test_from=None): self.add_node_nofile(node) if isinstance(source_file, SchemaSourceFile): assert test_from source_file.add_test(node.unique_id,", "= macro source_file.macros.append(macro.unique_id) def has_file(self, source_file: SourceFile) -> bool: key", "in dbt.task.rpc.sql_commands 'add_new_refs' def deepcopy(self): return Manifest( nodes={k: _deepcopy(v) for", "_source_lookup: Optional[SourceLookup] = field( default=None, metadata={'serialize': lambda x: None, 'deserialize':", "MaybeParsedSource: key = (target_source_name, target_table_name) candidates = _search_packages(current_project, node_package) source:", "AnalysisLookup(RefableLookup): _lookup_types: ClassVar[set] = set(NodeType.Analysis) def _search_packages( current_project: str, node_package:", "Optional[NodeEdgeMap] = field(metadata=dict( description='A mapping from child nodes to their", "up to 5 items sample = list(islice(merged, 5)) logger.debug( f'Merged", "populate(self, manifest): for source in manifest.sources.values(): if hasattr(source, 'source_name'): self.add_source(source)", "the lock! # pickle and deepcopy use this. It returns", "and they two nodes must have the same original file", "in the root project \"\"\" filter: Optional[Callable[[MacroCandidate], bool]] = None", "'Manifest'): self.storage: Dict[Tuple[str, str], Dict[PackageName, UniqueID]] = {} self.populate(manifest) def", "@dataclass class MaterializationCandidate(MacroCandidate): specificity: Specificity @classmethod def from_macro( cls, candidate:", "self.locality < other.locality: return True if self.locality > other.locality: return", "'compiled', False): # already compiled -> must be a NonSourceCompiledNode", "and return them as two separate dictionaries, each mapping unique", "key is None: return False if key not in self.files:", "x: None} ) _lock: Lock = field( default_factory=flags.MP_CONTEXT.Lock, metadata={'serialize': lambda", "attributes are both positional and by keyword. If an attribute", "threads could be just-in-time compiling the same ephemeral dependency, and", "in nodes} for node in nodes: backward_edges[node.unique_id] = node.depends_on_nodes[:] for", ") result = searcher.search(self.disabled) if result is not None: assert", "Error\" indent # note that the line wrap eats newlines,", "return Disabled(disabled) return None # Called by DocsRuntimeContext.doc def resolve_doc(", "could be just-in-time compiling the same ephemeral dependency, and we", "add_exposure(self, source_file: SchemaSourceFile, exposure: ParsedExposure): _check_duplicates(exposure, self.exposures) self.exposures[exposure.unique_id] = exposure", "return self.perform_lookup(unique_id, manifest) return None def add_source(self, source: ParsedSourceDefinition): key", "other: 'WritableManifest', selected: AbstractSet[UniqueID], ) -> None: \"\"\"Given the selected", "the macro exists and emit an error other_path = self.macros[macro.unique_id].original_file_path", "macros defined in the dbt project and its dependencies' ))", "else: return None class DocLookup(dbtClassMixin): def __init__(self, manifest: 'Manifest'): self.storage:", "ephemeral ancestor of a node at runtime, because multiple threads", "= self.nodes[new_node.unique_id] if getattr(existing, 'compiled', False): # already compiled ->", ") for atype in (adapter_type, None) )) return candidates.last() def", "Disabled(disabled) return None # Called by DocsRuntimeContext.doc def resolve_doc( self,", "enabled. \"\"\" candidates = _search_packages( current_project, node_package, package ) for", "project \"\"\" filter: Optional[Callable[[MacroCandidate], bool]] = None if package is", "self.docs[doc.unique_id] = doc source_file.docs.append(doc.unique_id) # end of methods formerly in", "files: MutableMapping[str, AnySourceFile] = field(default_factory=dict) metadata: ManifestMetadata = field(default_factory=ManifestMetadata) flat_graph:", "disabled: List[CompileResultNode] = field(default_factory=list) files: MutableMapping[str, AnySourceFile] = field(default_factory=dict) metadata:", "update_exposure(self, new_exposure: ParsedExposure): _update_into(self.exposures, new_exposure) def update_node(self, new_node: ManifestNode): _update_into(self.nodes,", "if result is not None: assert isinstance(result, ParsedSourceDefinition) return result", "to have a consistent view of the manifest. If the", "str], Dict[PackageName, UniqueID]] = {} self.populate(manifest) def get_unique_id(self, key, package:", "each mapping unique IDs to lists of edges. \"\"\" backward_edges:", "_update_into(dest: MutableMapping[str, T], new_item: T): \"\"\"Update dest to overwrite whatever", "the result :( msg = line_wrap_message( f'''\\ dbt found two", "f'Node {unique_id} found in cache but not found in manifest'", "then imported macros - then macros defined in the root", "static_analysis_parsed_path_count: int = 0 static_analysis_path_count: int = 0 @dataclass class", "unnecessary as docs are always enabled. \"\"\" candidates = _search_packages(", "def _check_duplicates( value: HasUniqueID, src: Mapping[str, HasUniqueID] ): if value.unique_id", "resource in all_resources: resource_type_plural = resource.resource_type.pluralize() if resource_type_plural not in", "find_macro_by_name( self, name: str, root_project_name: str, package: Optional[str] ) ->", "import dbt.utils NodeEdgeMap = Dict[str, List[str]] PackageName = str DocName", "None: self._source_lookup = SourceLookup(self) return self._source_lookup def rebuild_source_lookup(self): self._source_lookup =", "f'file {source_file.path.original_file_path}' ) if unique_id is None: # This will", "new node and return that. If the existing node is", "enum from dataclasses import dataclass, field from itertools import chain,", "in resource_types }) def get_used_databases(self): return frozenset( x.database for x", "\"result.{}\" based ' 'on cached file information: {}!' .format(key, name,", "add_source( self, source_file: SchemaSourceFile, source: UnpatchedSourceDefinition ): # sources can't", "the initial version of the object and a tuple of", "self.doc_lookup.find(name, pkg, self) if result is not None: return result", "Locality.Root elif macro.package_name in internal_packages: return Locality.Core else: return Locality.Imported", "!= model.search_name: return False return self.package is None or self.package", "to have a new file ' f'path!' ) dest[unique_id] =", "self.macros.get(unique_id) if not macro: warn_or_error( f'WARNING: Found documentation for macro", "dependencies' )) ) sources: Mapping[UniqueID, ParsedSourceDefinition] = field( metadata=dict(description=( 'The", "'parse_patch' in the NodePatchParser def add_patch( self, source_file: SchemaSourceFile, patch:", "__eq__(self, other: object) -> bool: if not isinstance(other, MacroCandidate): return", "\"Compilation Error\" indent # note that the line wrap eats", "for k, v in self.docs.items()}, exposures={k: _deepcopy(v) for k, v", "node in chain(self.nodes.values(), self.sources.values()) if not resource_types or node.resource_type in", "field( default=None, metadata={ 'description': 'A unique identifier for the project',", "elif patch.yaml_key == 'analyses': unique_id = self.analysis_lookup.get_unique_id(patch.name, None) else: raise", "for pkg in candidates: source = self.source_lookup.find(key, pkg, self) if", "k, v in self.sources.items()}, macros={k: _deepcopy(v) for k, v in", "-> GraphMemberNode: if unique_id in self.nodes: return self.nodes[unique_id] elif unique_id", "List[ManifestNode]): \"\"\"Build the forward and backward edges on the given", "self.macros.items(): if macro.name != name: continue candidate = MacroCandidate( locality=_get_locality(macro,", "class Locality(enum.IntEnum): Core = 1 Imported = 2 Root =", "self.exposures.items() }, 'nodes': { k: v.to_dict(omit_none=False) for k, v in", "self.send_anonymous_usage_stats = ( not tracking.active_user.do_not_track ) @classmethod def default(cls): return", "n in nodes if n.unique_id.startswith('macro') or n.depends_on.macros } for node", "'description': 'A unique identifier for the project', }, ) user_id:", "SourceFile, SchemaSourceFile, FileHash, AnySourceFile from dbt.contracts.util import ( BaseArtifactMetadata, SourceKey,", "selectors: MutableMapping[str, Any] = field(default_factory=dict) disabled: List[CompileResultNode] = field(default_factory=list) files:", "are not supported, so ensure it's empty self.source_patches = {}", "from typing_extensions import Protocol from uuid import UUID from dbt.contracts.graph.compiled", "return # patches can't be overwritten node = self.nodes.get(unique_id) if", "= self.source_lookup.find(key, pkg, self) if source is not None and", "is None: return False if key not in self.files: return", "dest to overwrite whatever is at dest[new_item.unique_id] with new_itme. There", "found in manifest'.format(unique_id) ) @property def doc_lookup(self) -> DocLookup: if", "= CandidateList(chain.from_iterable( self._materialization_candidates_for( project_name=project_name, materialization_name=materialization_name, adapter_type=atype, ) for atype in", "return manifest.nodes[unique_id] class AnalysisLookup(RefableLookup): _lookup_types: ClassVar[set] = set(NodeType.Analysis) def _search_packages(", "@classmethod def from_macro( cls, candidate: MacroCandidate, specificity: Specificity ) ->", "= MacroCandidate( locality=_get_locality(macro, root_project_name, packages), macro=macro, ) if filter is", "child_map=self.child_map, parent_map=self.parent_map, ) def write(self, path): self.writable_manifest().write(path) # Called in", "return that. If the existing node is compiled, do not", "Default = 1 Adapter = 2 @dataclass class MacroCandidate: locality:", "target_source_name: str, target_table_name: str, current_project: str, node_package: str ) ->", "not in self.files: return False my_checksum = self.files[key].checksum return my_checksum", ") for pkg in candidates: node = self.ref_lookup.find(target_model_name, pkg, self)", "project and its dependencies' )) ) selectors: Mapping[UniqueID, Any] =", "manifest) return None def add_doc(self, doc: ParsedDocumentation): if doc.name not", "= field(default_factory=dict) # following is from ParseResult _disabled: MutableMapping[str, List[CompileResultNode]]", "default(cls): return cls( dbt_schema_version=str(WritableManifest.dbt_schema_version), ) def _sort_values(dct): \"\"\"Given a dictionary,", "self.find_disabled_source_by_name( target_source_name, target_table_name, pkg ) if disabled is not None:", "in the dbt project and its dependencies' )) ) macros:", "}, ) send_anonymous_usage_stats: Optional[bool] = field( default=None, metadata=dict(description=( 'Whether dbt", "line wrap eats newlines, so if you want newlines, #", "@dataclass class MacroCandidate: locality: Locality macro: ParsedMacro def __eq__(self, other:", "patch source_file.source_patches.append(key) def get_used_schemas(self, resource_types=None): return frozenset({ (node.database, node.schema) for", "from itertools import chain, islice from mashumaro import DataClassMessagePackMixin from", "MaybeNonSource = Optional[Union[ ManifestNode, Disabled[ManifestNode] ]] T = TypeVar('T', bound=GraphMemberNode)", "self.sources.items() } } def find_disabled_by_name( self, name: str, package: Optional[str]", "NonSourceCompiledNode, GraphMemberNode ) from dbt.contracts.graph.parsed import ( ParsedMacro, ParsedDocumentation, ParsedNodePatch,", "the order of the attributes # in the Manifest class", "NodeType.refable() ) result = searcher.search(self.disabled) return result def find_disabled_source_by_name( self,", "in candidates: source = self.source_lookup.find(key, pkg, self) if source is", "support for copy.deepcopy() - we just need to avoid the", "self._matches(model): return model return None D = TypeVar('D') @dataclass class", "def add_node(self, node: ManifestNode): if node.resource_type in self._lookup_types: if node.name", "from uuid import UUID from dbt.contracts.graph.compiled import ( CompileResultNode, ManifestNode,", "# note that the line wrap eats newlines, so if", "to construct a Manifest. def __reduce_ex__(self, protocol): args = (", "with an ' f'unrecognized {new_item.resource_type}: {new_item.unique_id}' ) existing = dest[unique_id]", "-> bool: return candidate.locality != Locality.Imported candidates: CandidateList = self._find_macros_by_name(", "found' ) return if macro.patch_path: package_name, existing_file_path = macro.patch_path.split('://') raise_duplicate_macro_patch_name(patch,", "model, seed, snapshot _lookup_types: ClassVar[set] = set(NodeType.refable()) # refables are", "pkg_dct: return pkg_dct[package] else: return None class DocLookup(dbtClassMixin): def __init__(self,", "elif package in pkg_dct: return pkg_dct[package] else: return None class", "if the model matches the given name, package, and type.", "set(get_adapter_package_names(self.metadata.adapter_type)) for unique_id, macro in self.macros.items(): if macro.name != name:", "dbt_schema_version: str = field( default_factory=lambda: str(WritableManifest.dbt_schema_version) ) project_id: Optional[str] =", "of the object and a tuple of arguments # for", "in manifest.nodes.values(): self.add_node(node) def perform_lookup( self, unique_id: UniqueID, manifest )", "self.add_doc(doc) def perform_lookup( self, unique_id: UniqueID, manifest ) -> ParsedDocumentation:", "field(default_factory=dict) @dataclass class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin): \"\"\"The manifest for the", ") exposures: Mapping[UniqueID, ParsedExposure] = field( metadata=dict(description=( 'The exposures defined", "self.macros[macro.unique_id] = macro source_file.macros.append(macro.unique_id) def has_file(self, source_file: SourceFile) -> bool:", "@dataclass class NameSearcher(Generic[N]): name: str package: Optional[str] nodetypes: List[NodeType] def", "not resource_types or node.resource_type in resource_types }) def get_used_databases(self): return", "`generate_{component}_name` macro from the 'dbt' internal project \"\"\" def filter(candidate:", "source.config.enabled: return source if disabled is None: disabled = self.find_disabled_source_by_name(", "in self.selectors.items()}, metadata=self.metadata, disabled=[_deepcopy(n) for n in self.disabled], files={k: _deepcopy(v)", "# Methods that were formerly in ParseResult def add_macro(self, source_file:", "returned by the 'graph' context property # in the ProviderContext", "TypeVar('D') @dataclass class Disabled(Generic[D]): target: D MaybeDocumentation = Optional[ParsedDocumentation] MaybeParsedSource", "both positional and by keyword. If an attribute # is", "declaration, because they are used as # positional arguments to", "= field( metadata=dict(description=( 'The nodes defined in the dbt project", "def find_disabled_by_name( self, name: str, package: Optional[str] = None )", "self, adapter, other: 'WritableManifest', selected: AbstractSet[UniqueID], ) -> None: \"\"\"Given", "used to # create the initial version of the object", "key not in src: raise CompilationException( 'Expected to find \"{}\"", "= {n.unique_id: [] for n in nodes} for node in", "'The sources defined in the dbt project and its dependencies'", "self.locality > other.locality: return False return False @dataclass class MaterializationCandidate(MacroCandidate):", ") @classmethod def default(cls): return cls( dbt_schema_version=str(WritableManifest.dbt_schema_version), ) def _sort_values(dct):", "forward_edges.keys(): forward_edges[unique_id].append(node.unique_id) return _sort_values(forward_edges), _sort_values(backward_edges) # Build a map of", "List[Optional[str]]: if target_package is not None: return [target_package] elif current_project", "FileHash = field(default_factory=FileHash.empty) project_hashes: MutableMapping[str, FileHash] = field(default_factory=dict) @dataclass class", "field(default_factory=ManifestStateCheck) # Moved from the ParseResult object source_patches: MutableMapping[SourceKey, SourcePatch]", "False if self.name != model.search_name: return False return self.package is", "SourcePatch] = field(default_factory=dict) # following is from ParseResult _disabled: MutableMapping[str,", "non-ephemeral refable nodes are examined. \"\"\" refables = set(NodeType.refable()) merged", "v in self.nodes.items() }, 'sources': { k: v.to_dict(omit_none=False) for k,", "src: Mapping[K_T, V_T], old_file: SourceFile, name: str ) -> V_T:", "if target_package is not None: return [target_package] elif current_project ==", "in other.nodes.items(): current = self.nodes.get(unique_id) if current and ( node.resource_type", "SchemaSourceFile, patch: ParsedMacroPatch, ) -> None: # macros are fully", "_deepcopy(v) for k, v in self.files.items()}, state_check=_deepcopy(self.state_check), ) def build_parent_and_child_maps(self):", "None: return self.perform_lookup(unique_id, manifest) return None def add_source(self, source: ParsedSourceDefinition):", "compiled, update it with the new node and return that.", "send_anonymous_usage_stats: Optional[bool] = field( default=None, metadata=dict(description=( 'Whether dbt is configured", "error other_path = self.macros[macro.unique_id].original_file_path # subtract 2 for the \"Compilation", "if n.unique_id.startswith('macro') or n.depends_on.macros } for node in nodes: for", "so if you want newlines, # this is the result", "about the manifest', )) def _check_duplicates( value: HasUniqueID, src: Mapping[str,", "found two macros named \"{macro.name}\" in the project \"{macro.package_name}\". To", "ManifestMetadata = field(metadata=dict( description='Metadata about the manifest', )) def _check_duplicates(", "SourceFile, macro: ParsedMacro): if macro.unique_id in self.macros: # detect that", "if unique_id is None: # This will usually happen when", "= list(islice(merged, 5)) logger.debug( f'Merged {len(merged)} items from state (sample:", "if not pkg_dct: return None else: return next(iter(pkg_dct.values())) elif package", "# Called by dbt.parser.manifest._resolve_sources_for_exposure # and dbt.parser.manifest._process_source_for_node def resolve_source( self,", "'graph' context property # in the ProviderContext class. self.flat_graph =", "simplicity forward_edges: Dict[str, List[str]] = {n.unique_id: [] for n in", "def populate(self, manifest): for doc in manifest.docs.values(): self.add_doc(doc) def perform_lookup(", "def __init__(self): self.macros = [] self.metadata = {} def find_macro_by_name(", "mapping from child nodes to their dependencies', )) child_map: Optional[NodeEdgeMap]", "unique_id, node in other.nodes.items(): current = self.nodes.get(unique_id) if current and", "macro.patch_path.split('://') raise_duplicate_macro_patch_name(patch, existing_file_path) source_file.macro_patches[patch.name] = unique_id macro.patch(patch) def add_source_patch( self,", "self.flat_graph, self.state_check, self.source_patches, self._disabled, self._doc_lookup, self._source_lookup, self._ref_lookup, ) return self.__class__,", "done with building your manifest! \"\"\" self.flat_graph = { 'exposures':", "None class DocLookup(dbtClassMixin): def __init__(self, manifest: 'Manifest'): self.storage: Dict[str, Dict[PackageName,", "self, source_file: SchemaSourceFile, patch: ParsedNodePatch, ) -> None: if patch.yaml_key", "T): \"\"\"Update dest to overwrite whatever is at dest[new_item.unique_id] with", "1 # is not a big deal at all and", "default=None, metadata={'serialize': lambda x: None, 'deserialize': lambda x: None} )", "the Dict[PackageName, UniqueID] will # only ever have exactly one", "self.add_node_nofile(node) if isinstance(source_file, SchemaSourceFile): assert test_from source_file.add_test(node.unique_id, test_from) else: source_file.nodes.append(node.unique_id)", "dbt.contracts.graph.compiled import ( CompileResultNode, ManifestNode, NonSourceCompiledNode, GraphMemberNode ) from dbt.contracts.graph.parsed", "5 items sample = list(islice(merged, 5)) logger.debug( f'Merged {len(merged)} items", "self.storage[key] = {} self.storage[key][source.package_name] = source.unique_id def populate(self, manifest): for", "def populate(self, manifest): for node in manifest.nodes.values(): self.add_node(node) def perform_lookup(", "internal project \"\"\" def filter(candidate: MacroCandidate) -> bool: return candidate.locality", "V_T], old_file: SourceFile, name: str ) -> V_T: if key", "SchemaSourceFile): assert test_from source_file.add_test(node.unique_id, test_from) else: source_file.nodes.append(node.unique_id) def add_doc(self, source_file:", "self.storage[doc.name][doc.package_name] = doc.unique_id def populate(self, manifest): for doc in manifest.docs.values():", "build_macro_edges(edge_members) return forward_edges def writable_manifest(self): self.build_parent_and_child_maps() return WritableManifest( nodes=self.nodes, sources=self.sources,", "macro exists and emit an error other_path = self.macros[macro.unique_id].original_file_path #", "- return the `generate_{component}_name` macro from the 'dbt' internal project", "model.package_name def search(self, haystack: Iterable[N]) -> Optional[N]: \"\"\"Find an entry", "= 2 @dataclass class MacroCandidate: locality: Locality macro: ParsedMacro def", "def add_disabled_nofile(self, node: CompileResultNode): if node.unique_id in self._disabled: self._disabled[node.unique_id].append(node) else:", "[target_package] elif current_project == node_package: return [current_project, None] else: return", "self, target_model_name: str, target_model_package: Optional[str], current_project: str, node_package: str, )", "def resolve_doc( self, name: str, package: Optional[str], current_project: str, node_package:", "2) class WritableManifest(ArtifactMixin): nodes: Mapping[UniqueID, ManifestNode] = field( metadata=dict(description=( 'The", ") -> Locality: if macro.package_name == root_project_name: return Locality.Root elif", ") -> Optional[ParsedMacro]: \"\"\" The `generate_X_name` macros are similar to", "dbt cannot resolve this ambiguity' .format(self.macro.name, self.macro.package_name, other.macro.package_name) ) return", "dbt.exceptions.InternalException( f'Doc {unique_id} found in cache but not found in", "from dbt.ui import line_wrap_message from dbt import flags from dbt", "return model return None D = TypeVar('D') @dataclass class Disabled(Generic[D]):", "perform_lookup( self, unique_id: UniqueID, manifest: 'Manifest' ) -> ParsedSourceDefinition: if", "in self.nodes.items()}, sources={k: _deepcopy(v) for k, v in self.sources.items()}, macros={k:", "if equal: raise_compiler_error( 'Found two materializations with the name {}", "_find_macros_by_name( self, name: str, root_project_name: str, filter: Optional[Callable[[MacroCandidate], bool]] =", "if unique_id in forward_edges.keys(): forward_edges[unique_id].append(node.unique_id) return _sort_values(forward_edges), _sort_values(backward_edges) # Build", "for k, v in self.exposures.items() }, 'nodes': { k: v.to_dict(omit_none=False)", "'source_name'): self.add_source(source) def perform_lookup( self, unique_id: UniqueID, manifest: 'Manifest' )", "field( metadata=dict(description=( 'The sources defined in the dbt project and", "dbt.adapters.factory import get_adapter_package_names candidates: CandidateList = CandidateList() packages = set(get_adapter_package_names(self.metadata.adapter_type))", "project, return it - return the `generate_{component}_name` macro from the", "isinstance(source_file, SchemaSourceFile): assert test_from source_file.add_test(node.unique_id, test_from) else: source_file.nodes.append(node.unique_id) def add_exposure(self,", "None and source.config.enabled: return source if disabled is None: disabled", "str: raise NotImplementedError('search_name not implemented') N = TypeVar('N', bound=Searchable) @dataclass", "materialization_name=materialization_name, adapter_type=adapter_type, with_prefix=False, ) return CandidateList( MaterializationCandidate.from_macro(m, specificity) for m", "return self.perform_lookup(unique_id, manifest) return None def add_node(self, node: ManifestNode): if", "def add_doc(self, source_file: SourceFile, doc: ParsedDocumentation): _check_duplicates(doc, self.docs) self.docs[doc.unique_id] =", "root project \"\"\" filter: Optional[Callable[[MacroCandidate], bool]] = None if package", "internal_packages: return Locality.Core else: return Locality.Imported class Searchable(Protocol): resource_type: NodeType", "the dbt project and its dependencies' )) ) macros: Mapping[UniqueID,", "def add_source(self, source: ParsedSourceDefinition): key = (source.source_name, source.name) if key", "Optional[ParsedDocumentation] MaybeParsedSource = Optional[Union[ ParsedSourceDefinition, Disabled[ParsedSourceDefinition], ]] MaybeNonSource = Optional[Union[", "self.populate(manifest) def get_unique_id(self, key, package: Optional[PackageName]): return find_unique_id_for_package(self.storage, key, package)", "source_file.append_patch(patch.yaml_key, unique_id) node.patch(patch) def add_macro_patch( self, source_file: SchemaSourceFile, patch: ParsedMacroPatch,", "is not None: return [target_package] elif current_project == node_package: return", "if tracking.active_user is None: return if self.user_id is None: self.user_id", "str @property def search_name(self) -> str: raise NotImplementedError('search_name not implemented')", "lambda x: None, 'deserialize': lambda x: None} ) _source_lookup: Optional[SourceLookup]", "self.source_lookup.find(key, pkg, self) if source is not None and source.config.enabled:", "dbtClassMixin): \"\"\"The manifest for the full graph, after parsing and", "for patch in ' f'file {source_file.path.original_file_path}' ) if unique_id is", "' '{}). dbt cannot resolve this ambiguity' .format(self.macro.name, self.macro.package_name, other.macro.package_name)", "source_file: SchemaSourceFile, exposure: ParsedExposure): _check_duplicates(exposure, self.exposures) self.exposures[exposure.unique_id] = exposure source_file.exposures.append(exposure.unique_id)", "Optional[List[CompileResultNode]] = field(metadata=dict( description='A list of the disabled nodes in", "f'macro.{patch.package_name}.{patch.name}' macro = self.macros.get(unique_id) if not macro: warn_or_error( f'WARNING: Found", "that implements the 'in' operator. \"\"\" if model.resource_type not in", "return False if self.locality < other.locality: return True if self.locality", "original file path. \"\"\" unique_id = new_item.unique_id if unique_id not", "None, 'deserialize': lambda x: None} ) _source_lookup: Optional[SourceLookup] = field(", "update_source(self, new_source: ParsedSourceDefinition): _update_into(self.sources, new_source) def build_flat_graph(self): \"\"\"This attribute is", "nodes in the target' )) parent_map: Optional[NodeEdgeMap] = field(metadata=dict( description='A", "TypeVar('N', bound=Searchable) @dataclass class NameSearcher(Generic[N]): name: str package: Optional[str] nodetypes:", "disabled: Optional[List[CompileResultNode]] = field(metadata=dict( description='A list of the disabled nodes", "source_file.file_id if key is None: return False if key not", "k, v in self.files.items()}, state_check=_deepcopy(self.state_check), ) def build_parent_and_child_maps(self): edge_members =", "return True if self.specificity > other.specificity: return False if self.locality", "node_package, None] @dataclass class ManifestMetadata(BaseArtifactMetadata): \"\"\"Metadata for the manifest.\"\"\" dbt_schema_version:", "default_factory=lambda: str(WritableManifest.dbt_schema_version) ) project_id: Optional[str] = field( default=None, metadata={ 'description':", "is configured to send anonymous usage statistics' )), ) adapter_type:", "root project name is used to determine priority: - locally", "forward_edges self.parent_map = backward_edges def build_macro_child_map(self): edge_members = list(chain( self.nodes.values(),", "the following macros: - {macro.original_file_path} - {other_path} ''', subtract=2 )", "manifest and return the existing node. \"\"\" with self._lock: existing", "return NotImplemented return self.locality == other.locality def __lt__(self, other: object)", "self, name: str, root_project_name: str, filter: Optional[Callable[[MacroCandidate], bool]] = None", "key not in self.storage: self.storage[key] = {} self.storage[key][source.package_name] = source.unique_id", "have exactly one value, but doing 3 dict lookups instead", "'MaterializationCandidate': return cls( locality=candidate.locality, macro=candidate.macro, specificity=specificity, ) def __eq__(self, other:", "filter(candidate): candidates.append(candidate) return candidates @dataclass class ParsingInfo: static_analysis_parsed_path_count: int =", "selectors=self.selectors, metadata=self.metadata, disabled=self.disabled, child_map=self.child_map, parent_map=self.parent_map, ) def write(self, path): self.writable_manifest().write(path)", "sources can't be overwritten! _check_duplicates(source, self.sources) self.sources[source.unique_id] = source #", "and return that. If the existing node is compiled, do", "__lt__(self, other: object) -> bool: if not isinstance(other, MacroCandidate): return", "f'got an update_{new_item.resource_type} call with an ' f'unrecognized {new_item.resource_type}: {new_item.unique_id}'", "MacroManifest(MacroMethods): def __init__(self, macros): self.macros = macros self.metadata = ManifestMetadata()", "return candidates.last() def find_generate_macro_by_name( self, component: str, root_project_name: str )", "\"\"\" if model.resource_type not in self.nodetypes: return False if self.name", "\"\"\" unique_id = new_item.unique_id if unique_id not in dest: raise", "not None: return self.perform_lookup(unique_id, manifest) return None def add_node(self, node:", "def update_node(self, new_node: ManifestNode): _update_into(self.nodes, new_node) def update_source(self, new_source: ParsedSourceDefinition):", "def add_exposure(self, source_file: SchemaSourceFile, exposure: ParsedExposure): _check_duplicates(exposure, self.exposures) self.exposures[exposure.unique_id] =", "if adapter_type is None: specificity = Specificity.Default else: specificity =", "metadata=self.metadata, disabled=[_deepcopy(n) for n in self.disabled], files={k: _deepcopy(v) for k,", "field( metadata=dict(description=( 'The nodes defined in the dbt project and", "NodeType from dbt.ui import line_wrap_message from dbt import flags from", "defined in selectors.yml' )) ) disabled: Optional[List[CompileResultNode]] = field(metadata=dict( description='A", "pkg, self) if result is not None: return result return", "metadata=dict(description=( 'Whether dbt is configured to send anonymous usage statistics'", ") from dbt.contracts.graph.unparsed import SourcePatch from dbt.contracts.files import SourceFile, SchemaSourceFile,", "return False @dataclass class MaterializationCandidate(MacroCandidate): specificity: Specificity @classmethod def from_macro(", "= new_item.unique_id if unique_id not in dest: raise dbt.exceptions.RuntimeException( f'got", "in self.storage: self.storage[node.name] = {} self.storage[node.name][node.package_name] = node.unique_id def populate(self,", "__reduce_ex__(self, protocol): args = ( self.nodes, self.sources, self.macros, self.docs, self.exposures,", "None pkg_dct: Mapping[PackageName, UniqueID] = storage[key] if package is None:", "ParseResult def add_macro(self, source_file: SourceFile, macro: ParsedMacro): if macro.unique_id in", "str, target_package: Optional[str] = None, ) -> List[Optional[str]]: if target_package", "of ParsedNodes and return them as two separate dictionaries, each", "Optional[Union[ ManifestNode, Disabled[ManifestNode] ]] T = TypeVar('T', bound=GraphMemberNode) def _update_into(dest:", "to 5 items sample = list(islice(merged, 5)) logger.debug( f'Merged {len(merged)}", "nodes: backward_edges[node.unique_id] = node.depends_on_nodes[:] for unique_id in node.depends_on_nodes: if unique_id", "from_macro( cls, candidate: MacroCandidate, specificity: Specificity ) -> 'MaterializationCandidate': return", "added in the __reduce_ex__ method in the # args tuple", "== other.specificity and self.locality == other.locality ) if equal: raise_compiler_error(", "is None: self.user_id = tracking.active_user.id if self.send_anonymous_usage_stats is None: self.send_anonymous_usage_stats", "> other.specificity: return False if self.locality < other.locality: return True", "str, node_package: str, target_package: Optional[str] = None, ) -> List[Optional[str]]:", "# only ever have exactly one value, but doing 3", "\"\"\" The `generate_X_name` macros are similar to regular ones, but", "(target_source_name, target_table_name) candidates = _search_packages(current_project, node_package) source: Optional[ParsedSourceDefinition] = None", "Optional[UUID] = field( default=None, metadata={ 'description': 'A unique identifier for", ") return candidates.last() def _find_macros_by_name( self, name: str, root_project_name: str,", "not None: return Disabled(disabled) return None # Called by dbt.parser.manifest._resolve_sources_for_exposure", "# tuple keys are not supported, so ensure it's empty", "overwritten! _check_duplicates(node, self.nodes) self.nodes[node.unique_id] = node def add_node(self, source_file: AnySourceFile,", "AnySourceFile, node: ManifestNodes, test_from=None): self.add_node_nofile(node) if isinstance(source_file, SchemaSourceFile): assert test_from", ") -> None: # source patches must be unique key", "@dataclass @schema_version('manifest', 2) class WritableManifest(ArtifactMixin): nodes: Mapping[UniqueID, ManifestNode] = field(", "= self.analysis_lookup.get_unique_id(patch.name, None) else: raise dbt.exceptions.InternalException( f'Unexpected yaml_key {patch.yaml_key} for", "compiled -> must be a NonSourceCompiledNode return cast(NonSourceCompiledNode, existing) _update_into(self.nodes,", "resource_fqns[resource_type_plural] = set() resource_fqns[resource_type_plural].add(tuple(resource.fqn)) return resource_fqns # This is called", "if node.name not in self.storage: self.storage[node.name] = {} self.storage[node.name][node.package_name] =", "not found' ) return if macro.patch_path: package_name, existing_file_path = macro.patch_path.split('://')", "the ParseResult object source_patches: MutableMapping[SourceKey, SourcePatch] = field(default_factory=dict) # following", "as two separate dictionaries, each mapping unique IDs to lists", "just-in-time compiling the same ephemeral dependency, and we want them", "them as two separate dictionaries, each mapping unique IDs to", "there is a `generate_{component}_name` macro in the root project, return", "not in self.storage: self.storage[key] = {} self.storage[key][source.package_name] = source.unique_id def", "x: None} ) def __pre_serialize__(self): # serialization won't work with", "both the Manifest # and the MacroManifest class MacroMethods: #", "current.database, current.schema, current.identifier ) ): merged.add(unique_id) self.nodes[unique_id] = node.replace(deferred=True) #", "list(chain( self.nodes.values(), self.macros.values(), )) forward_edges = build_macro_edges(edge_members) return forward_edges def", ") project_id: Optional[str] = field( default=None, metadata={ 'description': 'A unique", "manifest.\"\"\" dbt_schema_version: str = field( default_factory=lambda: str(WritableManifest.dbt_schema_version) ) project_id: Optional[str]", "= field( metadata=dict(description=( 'The macros defined in the dbt project", "= Optional[ParsedDocumentation] MaybeParsedSource = Optional[Union[ ParsedSourceDefinition, Disabled[ParsedSourceDefinition], ]] MaybeNonSource =", "# Just to make mypy happy. There must be a", "self.sources[unique_id] elif unique_id in self.exposures: return self.exposures[unique_id] else: # something", "given documentation. This follows the same algorithm as resolve_ref except", "adapter'), ) def __post_init__(self): if tracking.active_user is None: return if", "self._find_macros_by_name( name=f'generate_{component}_name', root_project_name=root_project_name, # filter out imported packages filter=filter, )", "is when compiling an ephemeral ancestor of a node at", "Optional[str], ) -> CandidateList: if adapter_type is None: specificity =", "self._source_lookup, self._ref_lookup, ) return self.__class__, args class MacroManifest(MacroMethods): def __init__(self,", "overwritten node = self.nodes.get(unique_id) if node: if node.patch_path: package_name, existing_file_path", "\"{}\" in cached \"result.{}\" based ' 'on cached file information:", "lambda x: None, 'deserialize': lambda x: None} ) _parsing_info: ParsingInfo", "target_model_name: str, target_model_package: Optional[str], current_project: str, node_package: str, ) ->", "that the macro exists and emit an error other_path =", "\"\"\"Resolve the given documentation. This follows the same algorithm as", "locality: Locality macro: ParsedMacro def __eq__(self, other: object) -> bool:", "macros): self.macros = macros self.metadata = ManifestMetadata() # This is", "= TypeVar('M', bound=MacroCandidate) class CandidateList(List[M]): def last(self) -> Optional[ParsedMacro]: if", "target_model_package ) for pkg in candidates: node = self.ref_lookup.find(target_model_name, pkg,", "self.package is None or self.package == model.package_name def search(self, haystack:", "deepcopy(self): return Manifest( nodes={k: _deepcopy(v) for k, v in self.nodes.items()},", "happy. There must be a better way. def __init__(self): self.macros", "is a `generate_{component}_name` macro in the root project, return it", "_deepcopy(v) for k, v in self.exposures.items()}, selectors={k: _deepcopy(v) for k,", "macros by their name. \"\"\" # avoid an import cycle", "field(default_factory=dict) selectors: MutableMapping[str, Any] = field(default_factory=dict) disabled: List[CompileResultNode] = field(default_factory=list)", "if node.resource_type in self._lookup_types: if node.name not in self.storage: self.storage[node.name]", "the dbt project and its dependencies' )) ) docs: Mapping[UniqueID,", "defined in the root project \"\"\" filter: Optional[Callable[[MacroCandidate], bool]] =", "source_file.macro_patches[patch.name] = unique_id macro.patch(patch) def add_source_patch( self, source_file: SchemaSourceFile, patch:", "list(islice(merged, 5)) logger.debug( f'Merged {len(merged)} items from state (sample: {sample})'", "in src: raise_duplicate_resource_name(value, src[value.unique_id]) K_T = TypeVar('K_T') V_T = TypeVar('V_T')", "source_file.nodes.append(node.unique_id) def add_doc(self, source_file: SourceFile, doc: ParsedDocumentation): _check_duplicates(doc, self.docs) self.docs[doc.unique_id]", "__init__(self, macros): self.macros = macros self.metadata = ManifestMetadata() # This", "# patches can't be overwritten node = self.nodes.get(unique_id) if node:", "big deal at all and retains consistency def __init__(self, manifest:", "CandidateList: \"\"\"Find macros by their name. \"\"\" # avoid an", "str, root_project_name: str, filter: Optional[Callable[[MacroCandidate], bool]] = None ) ->", "algorithm as resolve_ref except the is_enabled checks are unnecessary as", "if node is not None and node.config.enabled: return node #", "= node.replace(deferred=True) # log up to 5 items sample =", "name: str, package: Optional[str], current_project: str, node_package: str, ) ->", "SchemaSourceFile, exposure: ParsedExposure): _check_duplicates(exposure, self.exposures) self.exposures[exposure.unique_id] = exposure source_file.exposures.append(exposure.unique_id) def", "source: UnpatchedSourceDefinition ): # sources can't be overwritten! _check_duplicates(source, self.sources)", "return [current_project, node_package, None] @dataclass class ManifestMetadata(BaseArtifactMetadata): \"\"\"Metadata for the", "the manifest', )) def _check_duplicates( value: HasUniqueID, src: Mapping[str, HasUniqueID]", "name: str, root_project_name: str, filter: Optional[Callable[[MacroCandidate], bool]] = None )", "SchemaSourceFile, FileHash, AnySourceFile from dbt.contracts.util import ( BaseArtifactMetadata, SourceKey, ArtifactMixin,", "v.to_dict(omit_none=False) for k, v in self.sources.items() } } def find_disabled_by_name(", "test_from source_file.add_test(node.unique_id, test_from) else: source_file.nodes.append(node.unique_id) def add_doc(self, source_file: SourceFile, doc:", "construct a Manifest. def __reduce_ex__(self, protocol): args = ( self.nodes,", "dbt.parser.manifest._process_source_for_node def resolve_source( self, target_source_name: str, target_table_name: str, current_project: str,", "import Lock from typing import ( Dict, List, Optional, Union,", "is not None: return self.perform_lookup(unique_id, manifest) return None def add_node(self,", "if self._ref_lookup is None: self._ref_lookup = RefableLookup(self) return self._ref_lookup def", "field( default=None, metadata={'serialize': lambda x: None, 'deserialize': lambda x: None}", "dbt.exceptions.InternalException( 'Expected node {} not found in manifest'.format(unique_id) ) @property", "# is added it must all be added in the", "SourceFile, doc: ParsedDocumentation): _check_duplicates(doc, self.docs) self.docs[doc.unique_id] = doc source_file.docs.append(doc.unique_id) #", "in ParseResult # Provide support for copy.deepcopy() - we just", "forward edge dict for simplicity forward_edges: Dict[str, List[str]] = {n.unique_id:", "note that the line wrap eats newlines, so if you", "None: return if self.user_id is None: self.user_id = tracking.active_user.id if", "dbt found two macros named \"{macro.name}\" in the project \"{macro.package_name}\".", "in the dbt project and its dependencies' )) ) selectors:", "model in haystack: if self._matches(model): return model return None D", "node is not compiled, update it with the new node", "return candidates @dataclass class ParsingInfo: static_analysis_parsed_path_count: int = 0 static_analysis_path_count:", "raise CompilationException( 'Expected to find \"{}\" in cached \"result.{}\" based", "dbt.contracts.graph.unparsed import SourcePatch from dbt.contracts.files import SourceFile, SchemaSourceFile, FileHash, AnySourceFile", "Specificity ) -> 'MaterializationCandidate': return cls( locality=candidate.locality, macro=candidate.macro, specificity=specificity, )", "be overwritten! _check_duplicates(source, self.sources) self.sources[source.unique_id] = source # type: ignore", "name=name, root_project_name=root_project_name, filter=filter, ) return candidates.last() def find_generate_macro_by_name( self, component:", "FileHash = field(default_factory=FileHash.empty) profile_hash: FileHash = field(default_factory=FileHash.empty) project_hashes: MutableMapping[str, FileHash]", "find(self, key, package: Optional[PackageName], manifest: 'Manifest'): unique_id = self.get_unique_id(key, package)", "doc: ParsedDocumentation): if doc.name not in self.storage: self.storage[doc.name] = {}", "in manifest.nodes: raise dbt.exceptions.InternalException( f'Node {unique_id} found in cache but", "and deepcopy use this. It returns a callable object used", "type name of the adapter'), ) def __post_init__(self): if tracking.active_user", "dictionary, sort each value. This makes output deterministic, which helps", "manifest' ) return manifest.nodes[unique_id] class AnalysisLookup(RefableLookup): _lookup_types: ClassVar[set] = set(NodeType.Analysis)", "analysis_lookup(self) -> AnalysisLookup: if self._analysis_lookup is None: self._analysis_lookup = AnalysisLookup(self)", "add_source(self, source: ParsedSourceDefinition): key = (source.source_name, source.name) if key not", "return forward_edges def writable_manifest(self): self.build_parent_and_child_maps() return WritableManifest( nodes=self.nodes, sources=self.sources, macros=self.macros,", "# and the MacroManifest class MacroMethods: # Just to make", "line_wrap_message from dbt import flags from dbt import tracking import", "specificity: Specificity @classmethod def from_macro( cls, candidate: MacroCandidate, specificity: Specificity", "must have the same original file path. \"\"\" unique_id =", "self._ref_lookup is None: self._ref_lookup = RefableLookup(self) return self._ref_lookup def rebuild_ref_lookup(self):", "): if value.unique_id in src: raise_duplicate_resource_name(value, src[value.unique_id]) K_T = TypeVar('K_T')", "current_project == node_package: return [current_project, None] else: return [current_project, node_package,", "in manifest.sources.values(): if hasattr(source, 'source_name'): self.add_source(source) def perform_lookup( self, unique_id:", "= field(default_factory=dict) exposures: MutableMapping[str, ParsedExposure] = field(default_factory=dict) selectors: MutableMapping[str, Any]", "\"\"\"Build the forward and backward edges on the given list", "return value.from_dict(value.to_dict(omit_none=True)) class Locality(enum.IntEnum): Core = 1 Imported = 2", "metadata={'serialize': lambda x: None, 'deserialize': lambda x: None} ) _analysis_lookup:", "node. \"\"\" with self._lock: existing = self.nodes[new_node.unique_id] if getattr(existing, 'compiled',", "( ParsedMacro, ParsedDocumentation, ParsedNodePatch, ParsedMacroPatch, ParsedSourceDefinition, ParsedExposure, HasUniqueID, UnpatchedSourceDefinition, ManifestNodes", "to lists of edges. \"\"\" backward_edges: Dict[str, List[str]] = {}", "are both positional and by keyword. If an attribute #", "# following is from ParseResult _disabled: MutableMapping[str, List[CompileResultNode]] = field(default_factory=dict)", "package is None: if not pkg_dct: return None else: return", "resource_type_plural = resource.resource_type.pluralize() if resource_type_plural not in resource_fqns: resource_fqns[resource_type_plural] =", "MacroMethods: # Just to make mypy happy. There must be", "equal = ( self.specificity == other.specificity and self.locality == other.locality", "bound=GraphMemberNode) def _update_into(dest: MutableMapping[str, T], new_item: T): \"\"\"Update dest to", "documentation for macro \"{patch.name}\" ' f'which was not found' )", "None: # macros are fully namespaced unique_id = f'macro.{patch.package_name}.{patch.name}' macro", "new_source: ParsedSourceDefinition): _update_into(self.sources, new_source) def build_flat_graph(self): \"\"\"This attribute is used", "N) -> bool: \"\"\"Return True if the model matches the", "target_table_name: str, current_project: str, node_package: str ) -> MaybeParsedSource: key", "dest[new_item.unique_id] with new_itme. There must be an existing value to", "package: Optional[str] = None ) -> Optional[ParsedSourceDefinition]: search_name = f'{source_name}.{table_name}'", "node is not None and node.config.enabled: return node # it's", "an existing value to overwrite, and they two nodes must", "field(default_factory=dict) _doc_lookup: Optional[DocLookup] = field( default=None, metadata={'serialize': lambda x: None,", "'deserialize': lambda x: None} ) _analysis_lookup: Optional[AnalysisLookup] = field( default=None,", "concurrency issues around it. Make sure you don't call this", "search_name = f'{source_name}.{table_name}' searcher: NameSearcher = NameSearcher( search_name, package, [NodeType.Source]", "following macros: - {macro.original_file_path} - {other_path} ''', subtract=2 ) raise_compiler_error(msg)", "name: str package: Optional[str] nodetypes: List[NodeType] def _matches(self, model: N)", "node_package: str, ) -> Optional[ParsedDocumentation]: \"\"\"Resolve the given documentation. This", "< other.locality: return True if self.locality > other.locality: return False", "= 1 Imported = 2 Root = 3 class Specificity(enum.IntEnum):", "searcher: NameSearcher = NameSearcher( name, package, NodeType.refable() ) result =", "There must be an existing value to overwrite, and they", "source_file: SourceFile) -> bool: key = source_file.file_id if key is", "time we should want to lock is when compiling an", "in the right position. nodes: MutableMapping[str, ManifestNode] = field(default_factory=dict) sources:", "SourcePatch, ) -> None: # source patches must be unique", "hasattr(source, 'source_name'): self.add_source(source) def perform_lookup( self, unique_id: UniqueID, manifest: 'Manifest'", "in self.exposures: return self.exposures[unique_id] else: # something terrible has happened", "package: Optional[str] nodetypes: List[NodeType] def _matches(self, model: N) -> bool:", "class ManifestMetadata(BaseArtifactMetadata): \"\"\"Metadata for the manifest.\"\"\" dbt_schema_version: str = field(", "edge dict for simplicity forward_edges: Dict[str, List[str]] = {n.unique_id: []", "self.macros.values(), )) forward_edges = build_macro_edges(edge_members) return forward_edges def writable_manifest(self): self.build_parent_and_child_maps()", "in manifest' ) return manifest.nodes[unique_id] class AnalysisLookup(RefableLookup): _lookup_types: ClassVar[set] =", "= build_node_edges(edge_members) self.child_map = forward_edges self.parent_map = backward_edges def build_macro_child_map(self):", "def write(self, path): self.writable_manifest().write(path) # Called in dbt.compilation.Linker.write_graph and #", "node in other.nodes.items(): current = self.nodes.get(unique_id) if current and (", "in all_resources: resource_type_plural = resource.resource_type.pluralize() if resource_type_plural not in resource_fqns:", "key in self.source_patches: raise_duplicate_source_patch_name(patch, self.source_patches[key]) self.source_patches[key] = patch source_file.source_patches.append(key) def", "to make mypy happy. There must be a better way.", "Optional[AnalysisLookup] = field( default=None, metadata={'serialize': lambda x: None, 'deserialize': lambda", "self._find_macros_by_name(full_name, project_name) ) def find_materialization_macro_by_name( self, project_name: str, materialization_name: str,", "= {} def find_macro_by_name( self, name: str, root_project_name: str, package:", "unique_id: str) -> GraphMemberNode: if unique_id in self.nodes: return self.nodes[unique_id]", "field( default=None, metadata=dict(description='The type name of the adapter'), ) def", "resolve_ref( self, target_model_name: str, target_model_package: Optional[str], current_project: str, node_package: str,", "= backward_edges def build_macro_child_map(self): edge_members = list(chain( self.nodes.values(), self.macros.values(), ))", "want them to have a consistent view of the manifest.", "\"{macro.name}\" in the project \"{macro.package_name}\". To fix this error, rename", "unique_id not in manifest.sources: raise dbt.exceptions.InternalException( f'Source {unique_id} found in", "k, v in self.exposures.items()}, selectors={k: _deepcopy(v) for k, v in", ") def write(self, path): self.writable_manifest().write(path) # Called in dbt.compilation.Linker.write_graph and", "self.sources: return self.sources[unique_id] elif unique_id in self.exposures: return self.exposures[unique_id] else:", "dbtClassMixin from dbt.exceptions import ( CompilationException, raise_duplicate_resource_name, raise_compiler_error, warn_or_error, raise_duplicate_patch_name,", "newlines, so if you want newlines, # this is the", "def _get_locality( macro: ParsedMacro, root_project_name: str, internal_packages: Set[str] ) ->", "the node with a lock. The only time we should", "manifest.docs: raise dbt.exceptions.InternalException( f'Doc {unique_id} found in cache but not", "items sample = list(islice(merged, 5)) logger.debug( f'Merged {len(merged)} items from", "be unique key = (patch.overrides, patch.name) if key in self.source_patches:", "are actually unique, so the Dict[PackageName, UniqueID] will # only", "imported packages filter=filter, ) return candidates.last() def _find_macros_by_name( self, name:", "'deserialize': lambda x: None} ) _lock: Lock = field( default_factory=flags.MP_CONTEXT.Lock,", "else: return [current_project, node_package, None] @dataclass class ManifestMetadata(BaseArtifactMetadata): \"\"\"Metadata for", "return None self.sort() return self[-1].macro def _get_locality( macro: ParsedMacro, root_project_name:", "= None disabled: Optional[ManifestNode] = None candidates = _search_packages( current_project,", "args = ( self.nodes, self.sources, self.macros, self.docs, self.exposures, self.selectors, self.disabled,", "str, ) -> Optional[ParsedDocumentation]: \"\"\"Resolve the given documentation. This follows", "\"\"\"Given the selected unique IDs and a writable manifest, update", "GLOBAL_LOGGER as logger from dbt.node_types import NodeType from dbt.ui import", "for node in manifest.nodes.values(): self.add_node(node) def perform_lookup( self, unique_id: UniqueID,", "RefableLookup(self) @property def analysis_lookup(self) -> AnalysisLookup: if self._analysis_lookup is None:", "defined macros come first - then imported macros - then", "-> None: # macros are fully namespaced unique_id = f'macro.{patch.package_name}.{patch.name}'", "from dbt.contracts.graph.parsed import ( ParsedMacro, ParsedDocumentation, ParsedNodePatch, ParsedMacroPatch, ParsedSourceDefinition, ParsedExposure,", "int = 0 @dataclass class ManifestStateCheck(dbtClassMixin): vars_hash: FileHash = field(default_factory=FileHash.empty)", "for x in chain(self.nodes.values(), self.sources.values()) ) # This is used", "value.unique_id in src: raise_duplicate_resource_name(value, src[value.unique_id]) K_T = TypeVar('K_T') V_T =", "self.sources.values()) for resource in all_resources: resource_type_plural = resource.resource_type.pluralize() if resource_type_plural", "and source.config.enabled: return source if disabled is None: disabled =", "as resolve_ref except the is_enabled checks are unnecessary as docs", "self.child_map = forward_edges self.parent_map = backward_edges def build_macro_child_map(self): edge_members =", "find \"{}\" in cached \"result.{}\" based ' 'on cached file", "searcher.search(self.disabled) return result def find_disabled_source_by_name( self, source_name: str, table_name: str,", "unique_id: UniqueID, manifest ) -> ManifestNode: if unique_id not in", "@property def doc_lookup(self) -> DocLookup: if self._doc_lookup is None: self._doc_lookup", ")) ) disabled: Optional[List[CompileResultNode]] = field(metadata=dict( description='A list of the", "always enabled. \"\"\" candidates = _search_packages( current_project, node_package, package )", "return [target_package] elif current_project == node_package: return [current_project, None] else:", "Lock = field( default_factory=flags.MP_CONTEXT.Lock, metadata={'serialize': lambda x: None, 'deserialize': lambda", "self, name: str, package: Optional[str], current_project: str, node_package: str, )", "graph, after parsing and during compilation. \"\"\" # These attributes", "-> Optional[ParsedMacro]: candidates: CandidateList = CandidateList(chain.from_iterable( self._materialization_candidates_for( project_name=project_name, materialization_name=materialization_name, adapter_type=atype,", "ParsedNodePatch, ) -> None: if patch.yaml_key in ['models', 'seeds', 'snapshots']:", "= self._find_macros_by_name( name=f'generate_{component}_name', root_project_name=root_project_name, # filter out imported packages filter=filter,", "Manifest( nodes={k: _deepcopy(v) for k, v in self.nodes.items()}, sources={k: _deepcopy(v)", ") selectors: Mapping[UniqueID, Any] = field( metadata=dict(description=( 'The selectors defined", "]] T = TypeVar('T', bound=GraphMemberNode) def _update_into(dest: MutableMapping[str, T], new_item:", "return None def add_doc(self, doc: ParsedDocumentation): if doc.name not in", "if not resource_types or node.resource_type in resource_types }) def get_used_databases(self):", "if self._doc_lookup is None: self._doc_lookup = DocLookup(self) return self._doc_lookup def", "T], new_item: T): \"\"\"Update dest to overwrite whatever is at", "False): # already compiled -> must be a NonSourceCompiledNode return", "in both the Manifest # and the MacroManifest class MacroMethods:", "ParsedMacro, ParsedDocumentation, ParsedNodePatch, ParsedMacroPatch, ParsedSourceDefinition, ParsedExposure, HasUniqueID, UnpatchedSourceDefinition, ManifestNodes )", "NonSourceCompiledNode return cast(NonSourceCompiledNode, existing) _update_into(self.nodes, new_node) return new_node def update_exposure(self,", "in the NodePatchParser def add_patch( self, source_file: SchemaSourceFile, patch: ParsedNodePatch,", "other.locality: return False return False M = TypeVar('M', bound=MacroCandidate) class", "Any, Set, Tuple, TypeVar, Callable, Iterable, Generic, cast, AbstractSet, ClassVar", "_update_into(self.exposures, new_exposure) def update_node(self, new_node: ManifestNode): _update_into(self.nodes, new_node) def update_source(self,", "that were formerly in ParseResult def add_macro(self, source_file: SourceFile, macro:", "= { n.unique_id: [] for n in nodes if n.unique_id.startswith('macro')", "allowed. nodetypes should be a container of NodeTypes that implements", ")) forward_edges = build_macro_edges(edge_members) return forward_edges def writable_manifest(self): self.build_parent_and_child_maps() return", "the disabled nodes in the target' )) parent_map: Optional[NodeEdgeMap] =", "-> None: if patch.yaml_key in ['models', 'seeds', 'snapshots']: unique_id =", "raise_duplicate_macro_patch_name(patch, existing_file_path) source_file.macro_patches[patch.name] = unique_id macro.patch(patch) def add_source_patch( self, source_file:", "around it. Make sure you don't call this until you're", "must be a better way. def __init__(self): self.macros = []", "Set, Tuple, TypeVar, Callable, Iterable, Generic, cast, AbstractSet, ClassVar )", "{} self.populate(manifest) def get_unique_id(self, key, package: Optional[PackageName]): return find_unique_id_for_package(self.storage, key,", "tracking.active_user.do_not_track ) @classmethod def default(cls): return cls( dbt_schema_version=str(WritableManifest.dbt_schema_version), ) def", "fully namespaced unique_id = f'macro.{patch.package_name}.{patch.name}' macro = self.macros.get(unique_id) if not", "rebuild_ref_lookup(self): self._ref_lookup = RefableLookup(self) @property def analysis_lookup(self) -> AnalysisLookup: if", "checks are unnecessary as docs are always enabled. \"\"\" candidates", "= 2 Root = 3 class Specificity(enum.IntEnum): Default = 1", "in nodes if n.unique_id.startswith('macro') or n.depends_on.macros } for node in", "if doc.name not in self.storage: self.storage[doc.name] = {} self.storage[doc.name][doc.package_name] =", "so the Dict[PackageName, UniqueID] will # only ever have exactly", "return False return self.package is None or self.package == model.package_name", "def filter(candidate: MacroCandidate) -> bool: return package == candidate.macro.package_name candidates:", "= self._find_macros_by_name( name=name, root_project_name=root_project_name, filter=filter, ) return candidates.last() def find_generate_macro_by_name(", "manifest) return None def add_source(self, source: ParsedSourceDefinition): key = (source.source_name,", "must match the order of the attributes # in the", "= list(chain( self.nodes.values(), self.sources.values(), self.exposures.values(), )) forward_edges, backward_edges = build_node_edges(edge_members)", "CandidateList = self._find_macros_by_name( name=name, root_project_name=root_project_name, filter=filter, ) return candidates.last() def", "= None for pkg in candidates: source = self.source_lookup.find(key, pkg,", "disabled is None: disabled = self.find_disabled_source_by_name( target_source_name, target_table_name, pkg )", "return False return False M = TypeVar('M', bound=MacroCandidate) class CandidateList(List[M]):", "Optional[Callable[[MacroCandidate], bool]] = None ) -> CandidateList: \"\"\"Find macros by", "Set[Tuple[str, ...]]] = {} all_resources = chain(self.exposures.values(), self.nodes.values(), self.sources.values()) for", "return self.locality == other.locality def __lt__(self, other: object) -> bool:", "have a consistent view of the manifest. If the existing", "makes output deterministic, which helps for tests. \"\"\" return {k:", "self.sources.values(), self.exposures.values(), )) forward_edges, backward_edges = build_node_edges(edge_members) self.child_map = forward_edges", "if disabled is None: disabled = self.find_disabled_source_by_name( target_source_name, target_table_name, pkg", "package ) for pkg in candidates: result = self.doc_lookup.find(name, pkg,", "exposure: ParsedExposure): _check_duplicates(exposure, self.exposures) self.exposures[exposure.unique_id] = exposure source_file.exposures.append(exposure.unique_id) def add_disabled_nofile(self,", "other: object) -> bool: if not isinstance(other, MaterializationCandidate): return NotImplemented", "if self.specificity < other.specificity: return True if self.specificity > other.specificity:", "by each node, so we want to only build it", "str ) -> V_T: if key not in src: raise", "= RefableLookup(self) return self._ref_lookup def rebuild_ref_lookup(self): self._ref_lookup = RefableLookup(self) @property", "raise_compiler_error( 'Found two materializations with the name {} (packages {}", "and its dependencies' )) ) docs: Mapping[UniqueID, ParsedDocumentation] = field(", "current and ( node.resource_type in refables and not node.is_ephemeral and", "all and retains consistency def __init__(self, manifest: 'Manifest'): self.storage: Dict[str,", "self.nodes.items() }, 'sources': { k: v.to_dict(omit_none=False) for k, v in", "None: self._ref_lookup = RefableLookup(self) return self._ref_lookup def rebuild_ref_lookup(self): self._ref_lookup =", "in nodes: for unique_id in node.depends_on.macros: if unique_id in forward_edges.keys():", "# log up to 5 items sample = list(islice(merged, 5))", "of the following macros: - {macro.original_file_path} - {other_path} ''', subtract=2", "k: v.to_dict(omit_none=False) for k, v in self.exposures.items() }, 'nodes': {", "target_package: Optional[str] = None, ) -> List[Optional[str]]: if target_package is", "False return self.package is None or self.package == model.package_name def", "ParsedExposure, HasUniqueID, UnpatchedSourceDefinition, ManifestNodes ) from dbt.contracts.graph.unparsed import SourcePatch from", "node: ManifestNode): if node.resource_type in self._lookup_types: if node.name not in", "class ParsingInfo: static_analysis_parsed_path_count: int = 0 static_analysis_path_count: int = 0", "def __eq__(self, other: object) -> bool: if not isinstance(other, MacroCandidate):", "{unique_id} found in cache but not found in manifest' )", "None, 'deserialize': lambda x: None} ) _parsing_info: ParsingInfo = field(", "model matches the given name, package, and type. If package", "n.depends_on.macros } for node in nodes: for unique_id in node.depends_on.macros:", "if not isinstance(other, MaterializationCandidate): return NotImplemented equal = ( self.specificity", "key, package: Optional[PackageName]): if key not in storage: return None", "self.specificity > other.specificity: return False if self.locality < other.locality: return", "if not self: return None self.sort() return self[-1].macro def _get_locality(", "= searcher.search(self.disabled) if result is not None: assert isinstance(result, ParsedSourceDefinition)", "exactly one value, but doing 3 dict lookups instead of", "self, new_node: NonSourceCompiledNode ) -> NonSourceCompiledNode: \"\"\"update the node with", "patch: ParsedNodePatch, ) -> None: if patch.yaml_key in ['models', 'seeds',", "self.exposures) self.exposures[exposure.unique_id] = exposure source_file.exposures.append(exposure.unique_id) def add_disabled_nofile(self, node: CompileResultNode): if", "tuple keys are not supported, so ensure it's empty self.source_patches", "add_disabled_nofile(self, node: CompileResultNode): if node.unique_id in self._disabled: self._disabled[node.unique_id].append(node) else: self._disabled[node.unique_id]", "the Manifest class declaration, because they are used as #", "@schema_version('manifest', 2) class WritableManifest(ArtifactMixin): nodes: Mapping[UniqueID, ManifestNode] = field( metadata=dict(description=(", "!= existing.original_file_path: raise dbt.exceptions.RuntimeException( f'cannot update a {new_item.resource_type} to have", "`generate_{component}_name` macro in the root project, return it - return", "def _sort_values(dct): \"\"\"Given a dictionary, sort each value. This makes", "return None # Called by DocsRuntimeContext.doc def resolve_doc( self, name:", "'deserialize': lambda x: None} ) _source_lookup: Optional[SourceLookup] = field( default=None,", "CompilationException( 'Expected to find \"{}\" in cached \"result.{}\" based '", "they are used as # positional arguments to construct a", "description='A mapping from child nodes to their dependencies', )) child_map:", "CandidateList() packages = set(get_adapter_package_names(self.metadata.adapter_type)) for unique_id, macro in self.macros.items(): if", "node.config.enabled: return node # it's possible that the node is", "field(default_factory=dict) docs: MutableMapping[str, ParsedDocumentation] = field(default_factory=dict) exposures: MutableMapping[str, ParsedExposure] =", "self.nodes: return self.nodes[unique_id] elif unique_id in self.sources: return self.sources[unique_id] elif", "None: self.send_anonymous_usage_stats = ( not tracking.active_user.do_not_track ) @classmethod def default(cls):", "deal at all and retains consistency def __init__(self, manifest: 'Manifest'):", "dependencies' )) ) macros: Mapping[UniqueID, ParsedMacro] = field( metadata=dict(description=( 'The", "Optional[str] = None, ) -> List[Optional[str]]: if target_package is not", "-> Optional[ParsedMacro]: \"\"\"Find a macro in the graph by its", "existing node is not compiled, update it with the new", "= node.patch_path.split('://') raise_duplicate_patch_name(patch, existing_file_path) source_file.append_patch(patch.yaml_key, unique_id) node.patch(patch) def add_macro_patch( self,", "macro source_file.macros.append(macro.unique_id) def has_file(self, source_file: SourceFile) -> bool: key =", "= (patch.overrides, patch.name) if key in self.source_patches: raise_duplicate_source_patch_name(patch, self.source_patches[key]) self.source_patches[key]", "-> Optional[ManifestNode]: searcher: NameSearcher = NameSearcher( name, package, NodeType.refable() )", "package. The root project name is used to determine priority:", "root_project_name=root_project_name, filter=filter, ) return candidates.last() def find_generate_macro_by_name( self, component: str,", "adapter_type=atype, ) for atype in (adapter_type, None) )) return candidates.last()", "CandidateList(chain.from_iterable( self._materialization_candidates_for( project_name=project_name, materialization_name=materialization_name, adapter_type=atype, ) for atype in (adapter_type,", "def source_lookup(self) -> SourceLookup: if self._source_lookup is None: self._source_lookup =", "Optional, Union, Mapping, MutableMapping, Any, Set, Tuple, TypeVar, Callable, Iterable,", "are similar to regular ones, but ignore imported packages. -", "self.metadata = ManifestMetadata() # This is returned by the 'graph'", "in ['models', 'seeds', 'snapshots']: unique_id = self.ref_lookup.get_unique_id(patch.name, None) elif patch.yaml_key", "= None disabled: Optional[ParsedSourceDefinition] = None for pkg in candidates:", "if hasattr(source, 'source_name'): self.add_source(source) def perform_lookup( self, unique_id: UniqueID, manifest:", "are used as # positional arguments to construct a Manifest.", "# dbt.graph.queue.get and ._include_in_cost def expect(self, unique_id: str) -> GraphMemberNode:", "for macro \"{patch.name}\" ' f'which was not found' ) return", "called by 'parse_patch' in the NodePatchParser def add_patch( self, source_file:", "\"\"\"Return True if the model matches the given name, package,", "project_name: str, materialization_name: str, adapter_type: Optional[str], ) -> CandidateList: if", ") _lock: Lock = field( default_factory=flags.MP_CONTEXT.Lock, metadata={'serialize': lambda x: None,", "(patch.overrides, patch.name) if key in self.source_patches: raise_duplicate_source_patch_name(patch, self.source_patches[key]) self.source_patches[key] =", "new_item: T): \"\"\"Update dest to overwrite whatever is at dest[new_item.unique_id]", "unique_id not in dest: raise dbt.exceptions.RuntimeException( f'got an update_{new_item.resource_type} call", "= field( default=None, metadata=dict(description=( 'Whether dbt is configured to send", "Disabled[ParsedSourceDefinition], ]] MaybeNonSource = Optional[Union[ ManifestNode, Disabled[ManifestNode] ]] T =", "The root project name is used to determine priority: -", "in the __reduce_ex__ method in the # args tuple in", "not node.is_ephemeral and unique_id not in selected and not adapter.get_relation(", "the ProviderContext class. self.flat_graph = {} AnyManifest = Union[Manifest, MacroManifest]", "docs: Mapping[UniqueID, ParsedDocumentation] = field( metadata=dict(description=( 'The docs defined in", "metadata={ 'description': 'A unique identifier for the project', }, )", "\"\"\"Update dest to overwrite whatever is at dest[new_item.unique_id] with new_itme.", "v in self.sources.items()}, macros={k: _deepcopy(v) for k, v in self.macros.items()},", "unique_id not in manifest.docs: raise dbt.exceptions.InternalException( f'Doc {unique_id} found in", "macros: - {macro.original_file_path} - {other_path} ''', subtract=2 ) raise_compiler_error(msg) self.macros[macro.unique_id]", "macros=self.macros, docs=self.docs, exposures=self.exposures, selectors=self.selectors, metadata=self.metadata, disabled=self.disabled, child_map=self.child_map, parent_map=self.parent_map, ) def", "want to lock is when compiling an ephemeral ancestor of", "f'Unexpected yaml_key {patch.yaml_key} for patch in ' f'file {source_file.path.original_file_path}' )", "def _matches(self, model: N) -> bool: \"\"\"Return True if the", "by keyword. If an attribute # is added it must", "positional arguments to construct a Manifest. def __reduce_ex__(self, protocol): args", "class. self.flat_graph = {} AnyManifest = Union[Manifest, MacroManifest] @dataclass @schema_version('manifest',", "= node def add_node(self, source_file: AnySourceFile, node: ManifestNodes, test_from=None): self.add_node_nofile(node)", "dbt.parser.manifest._process_refs_for_node def resolve_ref( self, target_model_name: str, target_model_package: Optional[str], current_project: str,", "pkg_dct: return None else: return next(iter(pkg_dct.values())) elif package in pkg_dct:", "the given iterable by name.\"\"\" for model in haystack: if", "\"\"\"Find a macro in the graph by its name and", "k, v in dct.items()} def build_node_edges(nodes: List[ManifestNode]): \"\"\"Build the forward", "def find_macro_by_name( self, name: str, root_project_name: str, package: Optional[str] )", "'dbt' internal project \"\"\" def filter(candidate: MacroCandidate) -> bool: return", "other.locality ) if equal: raise_compiler_error( 'Found two materializations with the", "' f'unrecognized {new_item.resource_type}: {new_item.unique_id}' ) existing = dest[unique_id] if new_item.original_file_path", "except an empty source_patches because # tuple keys are not", "sources defined in the dbt project and its dependencies' ))", "bound=Searchable) @dataclass class NameSearcher(Generic[N]): name: str package: Optional[str] nodetypes: List[NodeType]", "must be a NonSourceCompiledNode return cast(NonSourceCompiledNode, existing) _update_into(self.nodes, new_node) return", "your manifest! \"\"\" self.flat_graph = { 'exposures': { k: v.to_dict(omit_none=False)", "examined. \"\"\" refables = set(NodeType.refable()) merged = set() for unique_id,", "bound=MacroCandidate) class CandidateList(List[M]): def last(self) -> Optional[ParsedMacro]: if not self:", "for unique_id in node.depends_on_nodes: if unique_id in forward_edges.keys(): forward_edges[unique_id].append(node.unique_id) return", "self.storage: Dict[str, Dict[PackageName, UniqueID]] = {} self.populate(manifest) def get_unique_id(self, key,", "-> bool: if not isinstance(other, MaterializationCandidate): return NotImplemented if self.specificity", "unique_id, macro in self.macros.items(): if macro.name != name: continue candidate", "None or filter(candidate): candidates.append(candidate) return candidates @dataclass class ParsingInfo: static_analysis_parsed_path_count:", "{} and ' '{}). dbt cannot resolve this ambiguity' .format(self.macro.name,", "an ephemeral ancestor of a node at runtime, because multiple", "named \"{macro.name}\" in the project \"{macro.package_name}\". To fix this error,", "= Optional[Union[ ManifestNode, Disabled[ManifestNode] ]] T = TypeVar('T', bound=GraphMemberNode) def", "__eq__(self, other: object) -> bool: if not isinstance(other, MaterializationCandidate): return", "Locality.Core else: return Locality.Imported class Searchable(Protocol): resource_type: NodeType package_name: str", "_get_locality( macro: ParsedMacro, root_project_name: str, internal_packages: Set[str] ) -> Locality:", "is not None: return self.perform_lookup(unique_id, manifest) return None def add_source(self,", "if self.user_id is None: self.user_id = tracking.active_user.id if self.send_anonymous_usage_stats is", "forward_edges[unique_id].append(node.unique_id) return _sort_values(forward_edges), _sort_values(backward_edges) # Build a map of children", "ParsedMacroPatch, ) -> None: # macros are fully namespaced unique_id", "but not found in manifest' ) return manifest.sources[unique_id] class RefableLookup(dbtClassMixin):", "'Manifest'): unique_id = self.get_unique_id(key, package) if unique_id is not None:", "2 @dataclass class MacroCandidate: locality: Locality macro: ParsedMacro def __eq__(self,", "None] else: return [current_project, node_package, None] @dataclass class ManifestMetadata(BaseArtifactMetadata): \"\"\"Metadata", "is not None: assert isinstance(result, ParsedSourceDefinition) return result def _materialization_candidates_for(", "self.perform_lookup(unique_id, manifest) return None def add_doc(self, doc: ParsedDocumentation): if doc.name", "target_table_name) candidates = _search_packages(current_project, node_package) source: Optional[ParsedSourceDefinition] = None disabled:", "follows the same algorithm as resolve_ref except the is_enabled checks", "'in' operator. \"\"\" if model.resource_type not in self.nodetypes: return False", "self.storage: self.storage[node.name] = {} self.storage[node.name][node.package_name] = node.unique_id def populate(self, manifest):", "candidates = _search_packages(current_project, node_package) source: Optional[ParsedSourceDefinition] = None disabled: Optional[ParsedSourceDefinition]", "should want to lock is when compiling an ephemeral ancestor", "test_from) else: source_file.nodes.append(node.unique_id) def add_exposure(self, source_file: SchemaSourceFile, exposure: ParsedExposure): _check_duplicates(exposure,", "self, unique_id: UniqueID, manifest: 'Manifest' ) -> ParsedSourceDefinition: if unique_id", "the graph by its name and package name, or None", "This is used in dbt.task.rpc.sql_commands 'add_new_refs' def deepcopy(self): return Manifest(", "isinstance(other, MaterializationCandidate): return NotImplemented if self.specificity < other.specificity: return True", "macros: MutableMapping[str, ParsedMacro] = field(default_factory=dict) docs: MutableMapping[str, ParsedDocumentation] = field(default_factory=dict)", "populate(self, manifest): for doc in manifest.docs.values(): self.add_doc(doc) def perform_lookup( self,", "return if self.user_id is None: self.user_id = tracking.active_user.id if self.send_anonymous_usage_stats", "unique_id macro.patch(patch) def add_source_patch( self, source_file: SchemaSourceFile, patch: SourcePatch, )", "set(NodeType.refable()) merged = set() for unique_id, node in other.nodes.items(): current", "str, node_package: str, ) -> Optional[ParsedDocumentation]: \"\"\"Resolve the given documentation.", "and dbt.parser.manifest._process_refs_for_node def resolve_ref( self, target_model_name: str, target_model_package: Optional[str], current_project:", "in self.macros.items(): if macro.name != name: continue candidate = MacroCandidate(", "self.disabled], files={k: _deepcopy(v) for k, v in self.files.items()}, state_check=_deepcopy(self.state_check), )", "if disabled is None: disabled = self.find_disabled_by_name( target_model_name, pkg )", "by their name. \"\"\" # avoid an import cycle from", "self.metadata = {} def find_macro_by_name( self, name: str, root_project_name: str,", "self.storage[node.name] = {} self.storage[node.name][node.package_name] = node.unique_id def populate(self, manifest): for", "node_package: str ) -> MaybeParsedSource: key = (target_source_name, target_table_name) candidates", "node at runtime, because multiple threads could be just-in-time compiling", "by DocsRuntimeContext.doc def resolve_doc( self, name: str, package: Optional[str], current_project:", "other.specificity: return False if self.locality < other.locality: return True if", "anonymous usage statistics' )), ) adapter_type: Optional[str] = field( default=None,", "MacroCandidate) -> bool: return candidate.locality != Locality.Imported candidates: CandidateList =", "backward_edges def build_macro_child_map(self): edge_members = list(chain( self.nodes.values(), self.macros.values(), )) forward_edges", ") existing = dest[unique_id] if new_item.original_file_path != existing.original_file_path: raise dbt.exceptions.RuntimeException(", "TypeVar('M', bound=MacroCandidate) class CandidateList(List[M]): def last(self) -> Optional[ParsedMacro]: if not", "node.resource_type in refables and not node.is_ephemeral and unique_id not in", "project and its dependencies' )) ) docs: Mapping[UniqueID, ParsedDocumentation] =", "dict lookups instead of 1 # is not a big", "v.to_dict(omit_none=False) for k, v in self.exposures.items() }, 'nodes': { k:", "0 static_analysis_path_count: int = 0 @dataclass class ManifestStateCheck(dbtClassMixin): vars_hash: FileHash", "if key not in self.storage: self.storage[key] = {} self.storage[key][source.package_name] =", "self.flat_graph = {} AnyManifest = Union[Manifest, MacroManifest] @dataclass @schema_version('manifest', 2)", "typing_extensions import Protocol from uuid import UUID from dbt.contracts.graph.compiled import", ") if equal: raise_compiler_error( 'Found two materializations with the name", "priority: - locally defined macros come first - then imported", "if key not in storage: return None pkg_dct: Mapping[PackageName, UniqueID]", "the user', }, ) send_anonymous_usage_stats: Optional[bool] = field( default=None, metadata=dict(description=(", "project name is used to determine priority: - locally defined", "packages), macro=macro, ) if filter is None or filter(candidate): candidates.append(candidate)", "WritableManifest(ArtifactMixin): nodes: Mapping[UniqueID, ManifestNode] = field( metadata=dict(description=( 'The nodes defined", "default_factory=flags.MP_CONTEXT.Lock, metadata={'serialize': lambda x: None, 'deserialize': lambda x: None} )", "or None for any package. The root project name is", "def add_macro_patch( self, source_file: SchemaSourceFile, patch: ParsedMacroPatch, ) -> None:", "if source is not None and source.config.enabled: return source if", "doc source_file.docs.append(doc.unique_id) # end of methods formerly in ParseResult #", "Mapping[UniqueID, ParsedSourceDefinition] = field( metadata=dict(description=( 'The sources defined in the", "None, any package is allowed. nodetypes should be a container", "import DataClassMessagePackMixin from multiprocessing.synchronize import Lock from typing import (", "= field( default_factory=lambda: str(WritableManifest.dbt_schema_version) ) project_id: Optional[str] = field( default=None,", "def __init__(self, manifest: 'Manifest'): self.storage: Dict[Tuple[str, str], Dict[PackageName, UniqueID]] =", "warn_or_error, raise_duplicate_patch_name, raise_duplicate_macro_patch_name, raise_duplicate_source_patch_name, ) from dbt.helper_types import PathSet from", "of children of macros def build_macro_edges(nodes: List[Any]): forward_edges: Dict[str, List[str]]", "return Locality.Root elif macro.package_name in internal_packages: return Locality.Core else: return", "-> Optional[ParsedMacro]: \"\"\" The `generate_X_name` macros are similar to regular", "root_project_name: str, internal_packages: Set[str] ) -> Locality: if macro.package_name ==", "class MacroCandidate: locality: Locality macro: ParsedMacro def __eq__(self, other: object)", "searcher.search(self.disabled) if result is not None: assert isinstance(result, ParsedSourceDefinition) return", "result return None # Called by RunTask.defer_to_manifest def merge_from_artifact( self,", "the existing node is not compiled, update it with the", "result = searcher.search(self.disabled) return result def find_disabled_source_by_name( self, source_name: str,", "self.storage: Dict[Tuple[str, str], Dict[PackageName, UniqueID]] = {} self.populate(manifest) def get_unique_id(self,", "at all and retains consistency def __init__(self, manifest: 'Manifest'): self.storage:", "self._materialization_candidates_for( project_name=project_name, materialization_name=materialization_name, adapter_type=atype, ) for atype in (adapter_type, None)", "attribute is used in context.common by each node, so we", ") disabled: Optional[List[CompileResultNode]] = field(metadata=dict( description='A list of the disabled", "found in manifest' ) return manifest.docs[unique_id] class SourceLookup(dbtClassMixin): def __init__(self,", "return self.nodes[unique_id] elif unique_id in self.sources: return self.sources[unique_id] elif unique_id", "is not None and source.config.enabled: return source if disabled is", "the line wrap eats newlines, so if you want newlines,", "'A unique identifier for the user', }, ) send_anonymous_usage_stats: Optional[bool]", "_deepcopy(v) for k, v in self.macros.items()}, docs={k: _deepcopy(v) for k,", "dbt.helper_types import PathSet from dbt.logger import GLOBAL_LOGGER as logger from", "resource_type_plural not in resource_fqns: resource_fqns[resource_type_plural] = set() resource_fqns[resource_type_plural].add(tuple(resource.fqn)) return resource_fqns", "class CandidateList(List[M]): def last(self) -> Optional[ParsedMacro]: if not self: return", "Tuple, TypeVar, Callable, Iterable, Generic, cast, AbstractSet, ClassVar ) from", "def writable_manifest(self): self.build_parent_and_child_maps() return WritableManifest( nodes=self.nodes, sources=self.sources, macros=self.macros, docs=self.docs, exposures=self.exposures,", "by dbt.parser.manifest._resolve_refs_for_exposure # and dbt.parser.manifest._process_refs_for_node def resolve_ref( self, target_model_name: str,", "ParsedSourceDefinition): key = (source.source_name, source.name) if key not in self.storage:", "self.exposures[exposure.unique_id] = exposure source_file.exposures.append(exposure.unique_id) def add_disabled_nofile(self, node: CompileResultNode): if node.unique_id", "and during compilation. \"\"\" # These attributes are both positional", "used as # positional arguments to construct a Manifest. def", "resource_fqns: resource_fqns[resource_type_plural] = set() resource_fqns[resource_type_plural].add(tuple(resource.fqn)) return resource_fqns # This is", "Iterable, Generic, cast, AbstractSet, ClassVar ) from typing_extensions import Protocol", "{ 'exposures': { k: v.to_dict(omit_none=False) for k, v in self.exposures.items()", "by RunTask.defer_to_manifest def merge_from_artifact( self, adapter, other: 'WritableManifest', selected: AbstractSet[UniqueID],", "TypeVar('K_T') V_T = TypeVar('V_T') def _expect_value( key: K_T, src: Mapping[K_T,", "static_analysis_path_count: int = 0 @dataclass class ManifestStateCheck(dbtClassMixin): vars_hash: FileHash =", "False my_checksum = self.files[key].checksum return my_checksum == source_file.checksum def add_source(", "node_package: str, target_package: Optional[str] = None, ) -> List[Optional[str]]: if", "List[CompileResultNode] = field(default_factory=list) files: MutableMapping[str, AnySourceFile] = field(default_factory=dict) metadata: ManifestMetadata", "RefableLookup(dbtClassMixin): # model, seed, snapshot _lookup_types: ClassVar[set] = set(NodeType.refable()) #", "- if there is a `generate_{component}_name` macro in the root", "that. If the existing node is compiled, do not update", "be a NonSourceCompiledNode return cast(NonSourceCompiledNode, existing) _update_into(self.nodes, new_node) return new_node", "in cache but not found in manifest' ) return manifest.sources[unique_id]", "_lookup_types: ClassVar[set] = set(NodeType.Analysis) def _search_packages( current_project: str, node_package: str,", "@property def analysis_lookup(self) -> AnalysisLookup: if self._analysis_lookup is None: self._analysis_lookup", "UnpatchedSourceDefinition ): # sources can't be overwritten! _check_duplicates(source, self.sources) self.sources[source.unique_id]", "existing.original_file_path: raise dbt.exceptions.RuntimeException( f'cannot update a {new_item.resource_type} to have a", "# pickle and deepcopy use this. It returns a callable", "else: # something terrible has happened raise dbt.exceptions.InternalException( 'Expected node", "Dict[str, List[str]] PackageName = str DocName = str RefName =", "def add_disabled(self, source_file: AnySourceFile, node: CompileResultNode, test_from=None): self.add_disabled_nofile(node) if isinstance(source_file,", "an empty source_patches because # tuple keys are not supported,", "update this manifest by replacing any unselected nodes with their", "return NotImplemented if self.locality < other.locality: return True if self.locality", "ParsedMacro, root_project_name: str, internal_packages: Set[str] ) -> Locality: if macro.package_name", "return CandidateList( MaterializationCandidate.from_macro(m, specificity) for m in self._find_macros_by_name(full_name, project_name) )", "str, current_project: str, node_package: str ) -> MaybeParsedSource: key =", "path. \"\"\" unique_id = new_item.unique_id if unique_id not in dest:", "def from_macro( cls, candidate: MacroCandidate, specificity: Specificity ) -> 'MaterializationCandidate':", "macro in self.macros.items(): if macro.name != name: continue candidate =", "component: str, root_project_name: str ) -> Optional[ParsedMacro]: \"\"\" The `generate_X_name`", "macro: ParsedMacro): if macro.unique_id in self.macros: # detect that the", "ParsedMacroPatch, ParsedSourceDefinition, ParsedExposure, HasUniqueID, UnpatchedSourceDefinition, ManifestNodes ) from dbt.contracts.graph.unparsed import", "macro.patch_path: package_name, existing_file_path = macro.patch_path.split('://') raise_duplicate_macro_patch_name(patch, existing_file_path) source_file.macro_patches[patch.name] = unique_id", "v in self.files.items()}, state_check=_deepcopy(self.state_check), ) def build_parent_and_child_maps(self): edge_members = list(chain(", "str, target_model_package: Optional[str], current_project: str, node_package: str, ) -> MaybeNonSource:", "ParsedSourceDefinition] = field(default_factory=dict) macros: MutableMapping[str, ParsedMacro] = field(default_factory=dict) docs: MutableMapping[str,", "possible that the node is disabled if disabled is None:", "the \"Compilation Error\" indent # note that the line wrap", "disabled = self.find_disabled_source_by_name( target_source_name, target_table_name, pkg ) if disabled is", "lock! # pickle and deepcopy use this. It returns a", "return them as two separate dictionaries, each mapping unique IDs", "tracking import dbt.utils NodeEdgeMap = Dict[str, List[str]] PackageName = str", "of macros def build_macro_edges(nodes: List[Any]): forward_edges: Dict[str, List[str]] = {", "with new_itme. There must be an existing value to overwrite,", "name: str, package: Optional[str] = None ) -> Optional[ManifestNode]: searcher:", "dbt.graph.queue.get and ._include_in_cost def expect(self, unique_id: str) -> GraphMemberNode: if", "package name, or None for any package. The root project", "not tracking.active_user.do_not_track ) @classmethod def default(cls): return cls( dbt_schema_version=str(WritableManifest.dbt_schema_version), )", "if self.send_anonymous_usage_stats is None: self.send_anonymous_usage_stats = ( not tracking.active_user.do_not_track )", "# nodes can't be overwritten! _check_duplicates(node, self.nodes) self.nodes[node.unique_id] = node", "Core = 1 Imported = 2 Root = 3 class", "build_macro_child_map(self): edge_members = list(chain( self.nodes.values(), self.macros.values(), )) forward_edges = build_macro_edges(edge_members)", "because they are used as # positional arguments to construct", "None # Called by DocsRuntimeContext.doc def resolve_doc( self, name: str,", "return it - return the `generate_{component}_name` macro from the 'dbt'", "source_file: SourceFile, doc: ParsedDocumentation): _check_duplicates(doc, self.docs) self.docs[doc.unique_id] = doc source_file.docs.append(doc.unique_id)", "resource_fqns # This is called by 'parse_patch' in the NodePatchParser", "self.add_disabled_nofile(node) if isinstance(source_file, SchemaSourceFile): assert test_from source_file.add_test(node.unique_id, test_from) else: source_file.nodes.append(node.unique_id)", "entry in the given iterable by name.\"\"\" for model in", "in selectors.yml' )) ) disabled: Optional[List[CompileResultNode]] = field(metadata=dict( description='A list", "= unique_id macro.patch(patch) def add_source_patch( self, source_file: SchemaSourceFile, patch: SourcePatch,", "ManifestNodes ) from dbt.contracts.graph.unparsed import SourcePatch from dbt.contracts.files import SourceFile,", "empty source_patches because # tuple keys are not supported, so", "lock. The only time we should want to lock is", "}, 'sources': { k: v.to_dict(omit_none=False) for k, v in self.sources.items()", "bool: return candidate.locality != Locality.Imported candidates: CandidateList = self._find_macros_by_name( name=f'generate_{component}_name',", "and avoid any concurrency issues around it. Make sure you", "args class MacroManifest(MacroMethods): def __init__(self, macros): self.macros = macros self.metadata", "None, 'deserialize': lambda x: None} ) def __pre_serialize__(self): # serialization", "cached file information: {}!' .format(key, name, old_file) ) return src[key]", "@dataclass class ParsingInfo: static_analysis_parsed_path_count: int = 0 static_analysis_path_count: int =", "to determine priority: - locally defined macros come first -", "if resource_type_plural not in resource_fqns: resource_fqns[resource_type_plural] = set() resource_fqns[resource_type_plural].add(tuple(resource.fqn)) return", "= set() resource_fqns[resource_type_plural].add(tuple(resource.fqn)) return resource_fqns # This is called by", "field( default=None, metadata={ 'description': 'A unique identifier for the user',", "1 Adapter = 2 @dataclass class MacroCandidate: locality: Locality macro:", "self.ref_lookup.get_unique_id(patch.name, None) elif patch.yaml_key == 'analyses': unique_id = self.analysis_lookup.get_unique_id(patch.name, None)", "bool: if not isinstance(other, MaterializationCandidate): return NotImplemented if self.specificity <", "selected: AbstractSet[UniqueID], ) -> None: \"\"\"Given the selected unique IDs", "it must all be added in the __reduce_ex__ method in", "its dependencies' )) ) exposures: Mapping[UniqueID, ParsedExposure] = field( metadata=dict(description=(", "will usually happen when a node is disabled return #", "dbt project and its dependencies' )) ) macros: Mapping[UniqueID, ParsedMacro]", "macros come first - then imported macros - then macros", "return self.perform_lookup(unique_id, manifest) return None def add_doc(self, doc: ParsedDocumentation): if", "None: return result return None # Called by RunTask.defer_to_manifest def", "= new_item # This contains macro methods that are in", "for n in nodes} for node in nodes: backward_edges[node.unique_id] =", "_search_packages( current_project, node_package, target_model_package ) for pkg in candidates: node", "self, unique_id: UniqueID, manifest ) -> ManifestNode: if unique_id not", "{} (packages {} and ' '{}). dbt cannot resolve this", "value to overwrite, and they two nodes must have the", "if getattr(existing, 'compiled', False): # already compiled -> must be", "in the root project, return it - return the `generate_{component}_name`", "disabled: Optional[ParsedSourceDefinition] = None for pkg in candidates: source =", "specificity=specificity, ) def __eq__(self, other: object) -> bool: if not", "source: ParsedSourceDefinition): key = (source.source_name, source.name) if key not in", "in self._lookup_types: if node.name not in self.storage: self.storage[node.name] = {}", "def ref_lookup(self) -> RefableLookup: if self._ref_lookup is None: self._ref_lookup =", "return None def add_source(self, source: ParsedSourceDefinition): key = (source.source_name, source.name)", "def __pre_serialize__(self): # serialization won't work with anything except an", "'nodes': { k: v.to_dict(omit_none=False) for k, v in self.nodes.items() },", "if node.patch_path: package_name, existing_file_path = node.patch_path.split('://') raise_duplicate_patch_name(patch, existing_file_path) source_file.append_patch(patch.yaml_key, unique_id)", "avoid any concurrency issues around it. Make sure you don't", "_sort_values(forward_edges), _sort_values(backward_edges) # Build a map of children of macros", "= field(metadata=dict( description='A mapping from child nodes to their dependencies',", "# Called in dbt.compilation.Linker.write_graph and # dbt.graph.queue.get and ._include_in_cost def", "new_item.original_file_path != existing.original_file_path: raise dbt.exceptions.RuntimeException( f'cannot update a {new_item.resource_type} to", "= ManifestMetadata() # This is returned by the 'graph' context", "ClassVar ) from typing_extensions import Protocol from uuid import UUID", "to overwrite, and they two nodes must have the same", "= str UniqueID = str def find_unique_id_for_package(storage, key, package: Optional[PackageName]):", "k, v in self.docs.items()}, exposures={k: _deepcopy(v) for k, v in", "node.is_ephemeral and unique_id not in selected and not adapter.get_relation( current.database,", "Disabled[ManifestNode] ]] T = TypeVar('T', bound=GraphMemberNode) def _update_into(dest: MutableMapping[str, T],", "resolve this ambiguity' .format(self.macro.name, self.macro.package_name, other.macro.package_name) ) return equal def", "Optional[str] ) -> Optional[ParsedMacro]: \"\"\"Find a macro in the graph", "better way. def __init__(self): self.macros = [] self.metadata = {}", "field( default_factory=ParsingInfo, metadata={'serialize': lambda x: None, 'deserialize': lambda x: None}", "def perform_lookup( self, unique_id: UniqueID, manifest: 'Manifest' ) -> ParsedSourceDefinition:", "call with an ' f'unrecognized {new_item.resource_type}: {new_item.unique_id}' ) existing =", "AnySourceFile from dbt.contracts.util import ( BaseArtifactMetadata, SourceKey, ArtifactMixin, schema_version )", "return _sort_values(forward_edges) def _deepcopy(value): return value.from_dict(value.to_dict(omit_none=True)) class Locality(enum.IntEnum): Core =", "D MaybeDocumentation = Optional[ParsedDocumentation] MaybeParsedSource = Optional[Union[ ParsedSourceDefinition, Disabled[ParsedSourceDefinition], ]]", "candidates.last() def _find_macros_by_name( self, name: str, root_project_name: str, filter: Optional[Callable[[MacroCandidate],", "current.schema, current.identifier ) ): merged.add(unique_id) self.nodes[unique_id] = node.replace(deferred=True) # log", "# for the object, i.e. the Manifest. # The order", "that the node is disabled if disabled is None: disabled", "writable_manifest(self): self.build_parent_and_child_maps() return WritableManifest( nodes=self.nodes, sources=self.sources, macros=self.macros, docs=self.docs, exposures=self.exposures, selectors=self.selectors,", "package, NodeType.refable() ) result = searcher.search(self.disabled) return result def find_disabled_source_by_name(", "cache but not found in manifest' ) return manifest.nodes[unique_id] class", ".format(self.macro.name, self.macro.package_name, other.macro.package_name) ) return equal def __lt__(self, other: object)", "model.resource_type not in self.nodetypes: return False if self.name != model.search_name:", "ParseResult object source_patches: MutableMapping[SourceKey, SourcePatch] = field(default_factory=dict) # following is", "defined in the dbt project and its dependencies' )) )", "position. nodes: MutableMapping[str, ManifestNode] = field(default_factory=dict) sources: MutableMapping[str, ParsedSourceDefinition] =", "-> Mapping[str, PathSet]: resource_fqns: Dict[str, Set[Tuple[str, ...]]] = {} all_resources", "['models', 'seeds', 'snapshots']: unique_id = self.ref_lookup.get_unique_id(patch.name, None) elif patch.yaml_key ==", ")) ) sources: Mapping[UniqueID, ParsedSourceDefinition] = field( metadata=dict(description=( 'The sources", "parent_map=self.parent_map, ) def write(self, path): self.writable_manifest().write(path) # Called in dbt.compilation.Linker.write_graph", "as logger from dbt.node_types import NodeType from dbt.ui import line_wrap_message", "m in self._find_macros_by_name(full_name, project_name) ) def find_materialization_macro_by_name( self, project_name: str,", "Called by dbt.parser.manifest._resolve_refs_for_exposure # and dbt.parser.manifest._process_refs_for_node def resolve_ref( self, target_model_name:", "import Protocol from uuid import UUID from dbt.contracts.graph.compiled import (", "def search(self, haystack: Iterable[N]) -> Optional[N]: \"\"\"Find an entry in", "self, name: str, package: Optional[str] = None ) -> Optional[ManifestNode]:", "None ) -> CandidateList: \"\"\"Find macros by their name. \"\"\"", "ParsedSourceDefinition, ParsedExposure, HasUniqueID, UnpatchedSourceDefinition, ManifestNodes ) from dbt.contracts.graph.unparsed import SourcePatch", "str ) -> MaybeParsedSource: key = (target_source_name, target_table_name) candidates =", "{new_item.resource_type}: {new_item.unique_id}' ) existing = dest[unique_id] if new_item.original_file_path != existing.original_file_path:", "frozenset( x.database for x in chain(self.nodes.values(), self.sources.values()) ) # This", "just need to avoid the lock! # pickle and deepcopy", "from the 'dbt' internal project \"\"\" def filter(candidate: MacroCandidate) ->", "raise dbt.exceptions.InternalException( f'Unexpected yaml_key {patch.yaml_key} for patch in ' f'file", "of methods formerly in ParseResult # Provide support for copy.deepcopy()", "this. It returns a callable object used to # create", "MutableMapping[str, FileHash] = field(default_factory=dict) @dataclass class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin): \"\"\"The", "cls( locality=candidate.locality, macro=candidate.macro, specificity=specificity, ) def __eq__(self, other: object) ->", "fix this error, rename or remove one of the following", "search(self, haystack: Iterable[N]) -> Optional[N]: \"\"\"Find an entry in the", "Any] = field( metadata=dict(description=( 'The selectors defined in selectors.yml' ))", "@dataclass class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin): \"\"\"The manifest for the full", "come first - then imported macros - then macros defined", "root project, return it - return the `generate_{component}_name` macro from", "build_node_edges(nodes: List[ManifestNode]): \"\"\"Build the forward and backward edges on the", "default_factory=ParsingInfo, metadata={'serialize': lambda x: None, 'deserialize': lambda x: None} )", "SourceLookup: if self._source_lookup is None: self._source_lookup = SourceLookup(self) return self._source_lookup", "ParsedDocumentation: if unique_id not in manifest.docs: raise dbt.exceptions.InternalException( f'Doc {unique_id}", "in the dbt project and its dependencies' )) ) exposures:", "NonSourceCompiledNode: \"\"\"update the node with a lock. The only time", "for any package. The root project name is used to", "it - return the `generate_{component}_name` macro from the 'dbt' internal", "the full graph, after parsing and during compilation. \"\"\" #", "= DocLookup(self) @property def source_lookup(self) -> SourceLookup: if self._source_lookup is", "get_used_databases(self): return frozenset( x.database for x in chain(self.nodes.values(), self.sources.values()) )", "self._disabled: self._disabled[node.unique_id].append(node) else: self._disabled[node.unique_id] = [node] def add_disabled(self, source_file: AnySourceFile,", "-> CandidateList: if adapter_type is None: specificity = Specificity.Default else:", "{len(merged)} items from state (sample: {sample})' ) # Methods that", "self.get_unique_id(key, package) if unique_id is not None: return self.perform_lookup(unique_id, manifest)", "description='Metadata about the manifest', )) def _check_duplicates( value: HasUniqueID, src:", "Manifest # and the MacroManifest class MacroMethods: # Just to", "set() resource_fqns[resource_type_plural].add(tuple(resource.fqn)) return resource_fqns # This is called by 'parse_patch'", "the forward edge dict for simplicity forward_edges: Dict[str, List[str]] =", "package: Optional[str], current_project: str, node_package: str, ) -> Optional[ParsedDocumentation]: \"\"\"Resolve", "unique IDs to lists of edges. \"\"\" backward_edges: Dict[str, List[str]]", "= field(default_factory=dict) disabled: List[CompileResultNode] = field(default_factory=list) files: MutableMapping[str, AnySourceFile] =", "elif current_project == node_package: return [current_project, None] else: return [current_project,", "MutableMapping[str, ParsedExposure] = field(default_factory=dict) selectors: MutableMapping[str, Any] = field(default_factory=dict) disabled:", "find_unique_id_for_package(self.storage, key, package) def find(self, key, package: Optional[PackageName], manifest: 'Manifest'):", "in manifest' ) return manifest.sources[unique_id] class RefableLookup(dbtClassMixin): # model, seed,", "Specificity.Default else: specificity = Specificity.Adapter full_name = dbt.utils.get_materialization_macro_name( materialization_name=materialization_name, adapter_type=adapter_type,", "= list(chain( self.nodes.values(), self.macros.values(), )) forward_edges = build_macro_edges(edge_members) return forward_edges", "MacroCandidate: locality: Locality macro: ParsedMacro def __eq__(self, other: object) ->", "same original file path. \"\"\" unique_id = new_item.unique_id if unique_id", "UUID from dbt.contracts.graph.compiled import ( CompileResultNode, ManifestNode, NonSourceCompiledNode, GraphMemberNode )", "key = (patch.overrides, patch.name) if key in self.source_patches: raise_duplicate_source_patch_name(patch, self.source_patches[key])", "elif unique_id in self.sources: return self.sources[unique_id] elif unique_id in self.exposures:", "not in storage: return None pkg_dct: Mapping[PackageName, UniqueID] = storage[key]", "Manifest. # The order of the arguments must match the", "pkg, self) if source is not None and source.config.enabled: return", "# args tuple in the right position. nodes: MutableMapping[str, ManifestNode]", "in refables and not node.is_ephemeral and unique_id not in selected", "self.perform_lookup(unique_id, manifest) return None def add_source(self, source: ParsedSourceDefinition): key =", ") return manifest.nodes[unique_id] class AnalysisLookup(RefableLookup): _lookup_types: ClassVar[set] = set(NodeType.Analysis) def", "you don't call this until you're done with building your", "nodetypes: List[NodeType] def _matches(self, model: N) -> bool: \"\"\"Return True", "MutableMapping[str, List[CompileResultNode]] = field(default_factory=dict) _doc_lookup: Optional[DocLookup] = field( default=None, metadata={'serialize':", "source_file.nodes.append(node.unique_id) def add_exposure(self, source_file: SchemaSourceFile, exposure: ParsedExposure): _check_duplicates(exposure, self.exposures) self.exposures[exposure.unique_id]", ") -> None: \"\"\"Given the selected unique IDs and a", "counterpart. Only non-ephemeral refable nodes are examined. \"\"\" refables =", "default=None, metadata=dict(description='The type name of the adapter'), ) def __post_init__(self):", "== candidate.macro.package_name candidates: CandidateList = self._find_macros_by_name( name=name, root_project_name=root_project_name, filter=filter, )", "nodes={k: _deepcopy(v) for k, v in self.nodes.items()}, sources={k: _deepcopy(v) for", "the Manifest # and the MacroManifest class MacroMethods: # Just", "return frozenset( x.database for x in chain(self.nodes.values(), self.sources.values()) ) #", "lists of edges. \"\"\" backward_edges: Dict[str, List[str]] = {} #", "return if macro.patch_path: package_name, existing_file_path = macro.patch_path.split('://') raise_duplicate_macro_patch_name(patch, existing_file_path) source_file.macro_patches[patch.name]", "as # positional arguments to construct a Manifest. def __reduce_ex__(self,", "project \"{macro.package_name}\". To fix this error, rename or remove one", "metadata=dict(description=( 'The docs defined in the dbt project and its", "file ' f'path!' ) dest[unique_id] = new_item # This contains", "not None: assert isinstance(result, ParsedSourceDefinition) return result def _materialization_candidates_for( self,", "import ( CompilationException, raise_duplicate_resource_name, raise_compiler_error, warn_or_error, raise_duplicate_patch_name, raise_duplicate_macro_patch_name, raise_duplicate_source_patch_name, )", "False @dataclass class MaterializationCandidate(MacroCandidate): specificity: Specificity @classmethod def from_macro( cls,", "disabled is None: disabled = self.find_disabled_by_name( target_model_name, pkg ) if", "return self._source_lookup def rebuild_source_lookup(self): self._source_lookup = SourceLookup(self) @property def ref_lookup(self)", "object) -> bool: if not isinstance(other, MaterializationCandidate): return NotImplemented equal", "This will usually happen when a node is disabled return", "but not found in manifest' ) return manifest.docs[unique_id] class SourceLookup(dbtClassMixin):", "import SourceFile, SchemaSourceFile, FileHash, AnySourceFile from dbt.contracts.util import ( BaseArtifactMetadata,", "This follows the same algorithm as resolve_ref except the is_enabled", "isinstance(source_file, SchemaSourceFile): assert test_from source_file.add_test(node.unique_id, test_from) else: source_file.nodes.append(node.unique_id) def add_doc(self,", "def merge_from_artifact( self, adapter, other: 'WritableManifest', selected: AbstractSet[UniqueID], ) ->", "anything except an empty source_patches because # tuple keys are", ") -> ParsedDocumentation: if unique_id not in manifest.docs: raise dbt.exceptions.InternalException(", "Locality: if macro.package_name == root_project_name: return Locality.Root elif macro.package_name in", "import chain, islice from mashumaro import DataClassMessagePackMixin from multiprocessing.synchronize import", "self._disabled, self._doc_lookup, self._source_lookup, self._ref_lookup, ) return self.__class__, args class MacroManifest(MacroMethods):", ")) ) macros: Mapping[UniqueID, ParsedMacro] = field( metadata=dict(description=( 'The macros", "self._source_lookup is None: self._source_lookup = SourceLookup(self) return self._source_lookup def rebuild_source_lookup(self):", "filter: Optional[Callable[[MacroCandidate], bool]] = None ) -> CandidateList: \"\"\"Find macros", "with anything except an empty source_patches because # tuple keys", "it's empty self.source_patches = {} return self @classmethod def __post_deserialize__(cls,", "dbt.exceptions import ( CompilationException, raise_duplicate_resource_name, raise_compiler_error, warn_or_error, raise_duplicate_patch_name, raise_duplicate_macro_patch_name, raise_duplicate_source_patch_name,", "RefName = str UniqueID = str def find_unique_id_for_package(storage, key, package:", "self.storage: self.storage[doc.name] = {} self.storage[doc.name][doc.package_name] = doc.unique_id def populate(self, manifest):", "adapter_type: Optional[str], ) -> CandidateList: if adapter_type is None: specificity", "return False if key not in self.files: return False my_checksum" ]
[ "schema schema_016; ALTER DEFAULT PRIVILEGES for role default016_01 in schema", "CONSTRAINTS ; ALTER DEFAULT PRIVILEGES for role default016_01 revoke select,insert,update,truncate,references,TRIGGER,DELETE", "cascade; drop function if exists test_default_016(int) cascade; drop schema if", "revoke select,insert,update,truncate,references,TRIGGER,DELETE on tables from {self.username} CASCADE CONSTRAINTS; ALTER DEFAULT", "CASCADE CONSTRAINTS; ''') excute_cmd = f''' source {self.DB_ENV_PATH}; gsql -d", "role default016_01 revoke select,insert,update,truncate,references,TRIGGER,DELETE on tables from default016_01 CASCADE CONSTRAINTS;", "end; select test_default_016(16); drop type if exists type016; CREATE TYPE", "= self.userNode.sh(excute_cmd).result() logger.info(msg) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], msg) logger.info('--------2.sysadmin用户alter自己的权限--------') logger.info('--------2.1.管理员用户连接创建sysadmin用户 default016_01 --------') sql_cmd", "from yat.test import macro from yat.test import Node sys.path.append(sys.path[0] +", "cascade; create schema schema_016; ALTER DEFAULT PRIVILEGES for role {self.username}", "drop schema if exists schema_016 cascade; create schema schema_016; ALTER", "ALL on functions from {self.username} CASCADE CONSTRAINTS ; ALTER DEFAULT", "create or replace function test_default_016(a int) return int as b", "\"/../\") from testcase.utils.Logger import Logger from testcase.utils.Constant import Constant from", "default016_01 -W {macro.COMMON_PASSWD} -c \"{sql_cmd}\" ''' logger.info(excute_cmd) msg = self.userNode.sh(excute_cmd).result()", "TYPES from {self.username} CASCADE CONSTRAINTS ; ALTER DEFAULT PRIVILEGES for", "select * from test_alter_default_016; ALTER DEFAULT PRIVILEGES for role {self.username}", "testcase.utils.Logger import Logger from testcase.utils.Constant import Constant from testcase.utils.CommonSH import", "drop user if exists default016_01; create user default016_01 password '{<PASSWORD>}';", ": 功能测试 Case Name : 初始用户和sysadmin自己alter自己权限 Description : 1.初始用户alter自己的权限:alter不报错,但不生效,查询权限不变 1.1.初始用户alter自己的权限", "for role default016_01 GRANT EXECUTE on functions to default016_01 WITH", "default016_01; ''') logger.info(sql_cmd) self.assertIn(self.Constant.CREATE_ROLE_SUCCESS_MSG, sql_cmd) logger.info('--------2.2.default016_01用户连接 执行alter测试--------') sql_cmd = (f'''", "OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR", "Node sys.path.append(sys.path[0] + \"/../\") from testcase.utils.Logger import Logger from testcase.utils.Constant", "schema_016 revoke ALL on tables from {self.username} CASCADE CONSTRAINTS ;", "owned by default016_01 cascade; drop user if exists default016_01; ''')", "for role default016_01 GRANT USAGE on TYPES to default016_01 WITH", "under Mulan PSL v2. You can use this software according", "unique); select * from test_alter_default_016; drop function if exists test_default_016(int)", "ALL PRIVILEGES on functions to {self.username} WITH GRANT OPTION ;", "'{<PASSWORD>}'; grant all privileges to default016_01; ''') logger.info(sql_cmd) self.assertIn(self.Constant.CREATE_ROLE_SUCCESS_MSG, sql_cmd)", "test_default_016(16); drop type if exists type016; CREATE TYPE type016 AS", "if exists type016 cascade; drop function if exists test_default_016(int) cascade;", "PRIVILEGES on TYPES to default016_01 WITH GRANT OPTION ; ALTER", "def setUp(self): logger.info('--------Opengauss_Function_Alter_Default_Privileges_Case0016开始执行--------') self.userNode = Node('dbuser') self.DB_ENV_PATH = macro.DB_ENV_PATH self.Constant", "on functions from {self.username} CASCADE CONSTRAINTS; ALTER DEFAULT PRIVILEGES for", "DEFAULT PRIVILEGES for role {self.username} GRANT USAGE on TYPES to", "for role {self.username} in schema schema_016 revoke ALL on tables", "for role default016_01 revoke EXECUTE on functions from default016_01 CASCADE", "EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY", "2.3.清理 期望:清理成功 备注:以上alter测试包括对表(包含视图),类型,函数的权限测试 Expect : 1.初始用户alter自己的权限:alter不报错,但不生效,查询权限不变 1.1.初始用户alter自己的权限 1.2.清理环境 期望:清理成功 2.sysadmin用户alter自己的权限:alter不报错,但不生效,查询权限不变", ": \"\"\" import sys import unittest from yat.test import macro", "from {self.username} CASCADE CONSTRAINTS ; ALTER DEFAULT PRIVILEGES for role", "sql_cmd = (''' drop table if exists test_alter_default_016 cascade; drop", "on TYPES from default016_01 CASCADE CONSTRAINTS; ''') excute_cmd = f'''", "role {self.username} GRANT EXECUTE on functions to {self.username} WITH GRANT", "yat.test import Node sys.path.append(sys.path[0] + \"/../\") from testcase.utils.Logger import Logger", "role {self.username} in schema schema_016 revoke ALL on functions from", "GRANT ALL PRIVILEGES on TYPES to default016_01 WITH GRANT OPTION", "revoke EXECUTE on functions from default016_01 CASCADE CONSTRAINTS; ALTER DEFAULT", "drop function if exists test_default_016(int) cascade; create or replace function", "self.assertIn(self.Constant.ALTER_DEFAULT_PRIVILEGES, msg) logger.info('--------1.2.清理环境--------') sql_cmd = (''' drop table if exists", "revoke USAGE on TYPES from default016_01 CASCADE CONSTRAINTS; ''') excute_cmd", "schema schema_016 GRANT ALL PRIVILEGES on tables to default016_01 WITH", "schema_016; ALTER DEFAULT PRIVILEGES for role default016_01 in schema schema_016", "EXECUTE on functions from {self.username} CASCADE CONSTRAINTS; ALTER DEFAULT PRIVILEGES", "KIND, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO", "default016_01 GRANT EXECUTE on functions to default016_01 WITH GRANT OPTION", "logger.info(msg) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], msg) self.assertIn(self.Constant.ALTER_DEFAULT_PRIVILEGES, msg) logger.info('--------2.3.清理--------') sql_cmd = commonsh.execut_db_sql(f''' drop", "PRIVILEGES for role default016_01 GRANT USAGE on TYPES to default016_01", "to {self.username} WITH GRANT OPTION ; ALTER DEFAULT PRIVILEGES for", "{self.username} GRANT EXECUTE on functions to {self.username} WITH GRANT OPTION", "schema_016 GRANT ALL PRIVILEGES on TYPES to default016_01 WITH GRANT", "table if exists test_alter_default_016 cascade; create table test_alter_default_016(id int unique);", "int) return int as b int:= a; begin for i", "GRANT OPTION ; ALTER DEFAULT PRIVILEGES for role default016_01 GRANT", "int unique); select * from test_alter_default_016; drop function if exists", "self.userNode.sh(excute_cmd).result() logger.info(msg) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], msg) self.assertIn(self.Constant.ALTER_DEFAULT_PRIVILEGES, msg) logger.info('--------2.3.清理--------') sql_cmd = commonsh.execut_db_sql(f'''", "type016 AS (c_int int,c_text text); drop table if exists test_alter_default_016", "PROVIDED ON AN \"AS IS\" BASIS, WITHOUT WARRANTIES OF ANY", "WITH GRANT OPTION ; ALTER DEFAULT PRIVILEGES for role {self.username}", "import Node sys.path.append(sys.path[0] + \"/../\") from testcase.utils.Logger import Logger from", "OPTION ; ALTER DEFAULT PRIVILEGES for role default016_01 in schema", "= CommonSH('dbuser') class Privategrant(unittest.TestCase): def setUp(self): logger.info('--------Opengauss_Function_Alter_Default_Privileges_Case0016开始执行--------') self.userNode = Node('dbuser')", "revoke EXECUTE on functions from {self.username} CASCADE CONSTRAINTS; ALTER DEFAULT", "in schema schema_016 revoke ALL on TYPES from default016_01 CASCADE", "; ALTER DEFAULT PRIVILEGES for role {self.username} GRANT select,insert,update,truncate,references,TRIGGER,DELETE on", "schema schema_016 revoke ALL on functions from default016_01 CASCADE CONSTRAINTS", "执行alter测试--------') sql_cmd = (f''' drop schema if exists schema_016 cascade;", "''') logger.info(sql_cmd) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], sql_cmd) def tearDown(self): logger.info('----------------------------------清理环境----------------------------------') sql_cmd = commonsh.execut_db_sql('''", "type if exists type016; CREATE TYPE type016 AS (c_int int,c_text", "PRIVILEGES for role default016_01 in schema schema_016 revoke ALL on", "SOFTWARE IS PROVIDED ON AN \"AS IS\" BASIS, WITHOUT WARRANTIES", "grant all privileges to default016_01; ''') logger.info(sql_cmd) self.assertIn(self.Constant.CREATE_ROLE_SUCCESS_MSG, sql_cmd) logger.info('--------2.2.default016_01用户连接", "default016_01 in schema schema_016 revoke ALL on TYPES from default016_01", "testcase.utils.CommonSH import CommonSH logger = Logger() commonsh = CommonSH('dbuser') class", "msg = self.userNode.sh(excute_cmd).result() logger.info(msg) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], msg) self.assertIn(self.Constant.ALTER_DEFAULT_PRIVILEGES, msg) logger.info('--------1.2.清理环境--------') sql_cmd", "to default016_01; ''') logger.info(sql_cmd) self.assertIn(self.Constant.CREATE_ROLE_SUCCESS_MSG, sql_cmd) logger.info('--------2.2.default016_01用户连接 执行alter测试--------') sql_cmd =", "PRIVILEGES for role {self.username} revoke select,insert,update,truncate,references,TRIGGER,DELETE on tables from {self.username}", "ALTER DEFAULT PRIVILEGES for role {self.username} revoke USAGE on TYPES", "revoke ALL on tables from {self.username} CASCADE CONSTRAINTS ; ALTER", "drop type if exists type016 cascade; drop function if exists", "default016_01 in schema schema_016 GRANT ALL PRIVILEGES on functions to", "DEFAULT PRIVILEGES for role default016_01 revoke USAGE on TYPES from", "macro.DB_ENV_PATH self.Constant = Constant() # 初始用户用户名 self.username = self.userNode.ssh_user #", "schema schema_016 GRANT ALL PRIVILEGES on tables to {self.username} WITH", "from {self.username} CASCADE CONSTRAINTS; ALTER DEFAULT PRIVILEGES for role {self.username}", "for role {self.username} GRANT EXECUTE on functions to {self.username} WITH", "on tables to {self.username} WITH GRANT OPTION ; ALTER DEFAULT", "int:= a; begin for i in 1..a loop b:=b+1; end", "USAGE on TYPES to default016_01 WITH GRANT OPTION ; drop", "IS\" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR", "default016_01 GRANT USAGE on TYPES to default016_01 WITH GRANT OPTION", "on tables from default016_01 CASCADE CONSTRAINTS; ALTER DEFAULT PRIVILEGES for", "replace function test_default_016(a int) return int as b int:= a;", "def tearDown(self): logger.info('----------------------------------清理环境----------------------------------') sql_cmd = commonsh.execut_db_sql(''' drop owned by default016_01", "{self.userNode.db_name} -p {self.userNode.db_port} -U {self.username} -W {self.password} -c \"{sql_cmd}\" '''", "role default016_01 revoke USAGE on TYPES from default016_01 CASCADE CONSTRAINTS;", "-p {self.userNode.db_port} -U default016_01 -W {macro.COMMON_PASSWD} -c \"{sql_cmd}\" ''' logger.info(excute_cmd)", "logger.info('--------1.2.清理环境--------') sql_cmd = (''' drop table if exists test_alter_default_016 cascade;", "EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,", "ALL on tables from {self.username} CASCADE CONSTRAINTS ; ALTER DEFAULT", "functions from default016_01 CASCADE CONSTRAINTS; ALTER DEFAULT PRIVILEGES for role", "Technologies Co.,Ltd. openGauss is licensed under Mulan PSL v2. You", "self.assertIn(self.Constant.CREATE_ROLE_SUCCESS_MSG, sql_cmd) logger.info('--------2.2.default016_01用户连接 执行alter测试--------') sql_cmd = (f''' drop schema if", "create schema schema_016; ALTER DEFAULT PRIVILEGES for role default016_01 in", "初始用户用户名 self.username = self.userNode.ssh_user # 初始用户密码 self.password = <PASSWORD> def", "; ALTER DEFAULT PRIVILEGES for role {self.username} GRANT USAGE on", "{self.username} in schema schema_016 revoke ALL on TYPES from {self.username}", "sys import unittest from yat.test import macro from yat.test import", "on TYPES from default016_01 CASCADE CONSTRAINTS ; ALTER DEFAULT PRIVILEGES", "according to the terms and conditions of the Mulan PSL", "gsql -d {self.userNode.db_name} -p {self.userNode.db_port} -U default016_01 -W {macro.COMMON_PASSWD} -c", "logger.info('--------2.2.default016_01用户连接 执行alter测试--------') sql_cmd = (f''' drop schema if exists schema_016", "schema_016; ALTER DEFAULT PRIVILEGES for role {self.username} in schema schema_016", "FIT FOR A PARTICULAR PURPOSE. See the Mulan PSL v2", "role default016_01 in schema schema_016 revoke ALL on functions from", "Case Type : 功能测试 Case Name : 初始用户和sysadmin自己alter自己权限 Description :", "test_default_016(int) cascade; create or replace function test_default_016(a int) return int", "role {self.username} revoke EXECUTE on functions from {self.username} CASCADE CONSTRAINTS;", "{self.username} CASCADE CONSTRAINTS; ALTER DEFAULT PRIVILEGES for role {self.username} in", "DEFAULT PRIVILEGES for role {self.username} revoke USAGE on TYPES from", "TYPES to {self.username} WITH GRANT OPTION ; ALTER DEFAULT PRIVILEGES", "self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], sql_cmd) def tearDown(self): logger.info('----------------------------------清理环境----------------------------------') sql_cmd = commonsh.execut_db_sql(''' drop owned", "-c \"{sql_cmd}\" ''' logger.info(excute_cmd) msg = self.userNode.sh(excute_cmd).result() logger.info(msg) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], msg)", "schema if exists schema_016 cascade; create schema schema_016; drop table", "DEFAULT PRIVILEGES for role default016_01 GRANT select,insert,update,truncate,references,TRIGGER,DELETE on tables to", "TYPE type016 AS (c_int int,c_text text); drop table if exists", "TYPES from default016_01 CASCADE CONSTRAINTS; ''') excute_cmd = f''' source", "PRIVILEGES for role default016_01 revoke USAGE on TYPES from default016_01", "logger.info('--------2.1.管理员用户连接创建sysadmin用户 default016_01 --------') sql_cmd = commonsh.execut_db_sql(f''' drop owned by default016_01", "PRIVILEGES on tables to {self.username} WITH GRANT OPTION ; ALTER", "role default016_01 in schema schema_016 GRANT ALL PRIVILEGES on functions", "+ \"/../\") from testcase.utils.Logger import Logger from testcase.utils.Constant import Constant", "loop; return b; end; select test_default_016(16); drop type if exists", "{self.username} GRANT USAGE on TYPES to {self.username} WITH GRANT OPTION", "CONSTRAINTS; ALTER DEFAULT PRIVILEGES for role default016_01 in schema schema_016", "if exists default016_01; create user default016_01 password '{<PASSWORD>}'; grant all", "on tables from default016_01 CASCADE CONSTRAINTS ; ALTER DEFAULT PRIVILEGES", "drop schema if exists schema_016 cascade; ''') excute_cmd = f'''", "PRIVILEGES for role {self.username} in schema schema_016 revoke ALL on", "exists type016 cascade; drop function if exists test_default_016(int) cascade; drop", "v2. You can use this software according to the terms", "= Constant() # 初始用户用户名 self.username = self.userNode.ssh_user # 初始用户密码 self.password", "type016 cascade; drop function if exists test_default_016(int) cascade; drop schema", "b:=b+1; end loop; return b; end; select test_default_016(16); drop type", "USAGE on TYPES from default016_01 CASCADE CONSTRAINTS; ''') excute_cmd =", "licensed under Mulan PSL v2. You can use this software", "ALTER DEFAULT PRIVILEGES for role {self.username} in schema schema_016 revoke", "test_alter_default_016 cascade; drop type if exists type016 cascade; drop function", "TYPES to default016_01 WITH GRANT OPTION ; ALTER DEFAULT PRIVILEGES", "default016_01 CASCADE CONSTRAINTS ; ALTER DEFAULT PRIVILEGES for role default016_01", "{macro.COMMON_PASSWD} -c \"{sql_cmd}\" ''' logger.info(excute_cmd) msg = self.userNode.sh(excute_cmd).result() logger.info(msg) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1],", "on functions from default016_01 CASCADE CONSTRAINTS; ALTER DEFAULT PRIVILEGES for", "software according to the terms and conditions of the Mulan", "def test_common_user_permission(self): logger.info('--------1.初始用户alter自己的权限--------') logger.info('--------1.1.初始用户alter自己的权限--------') sql_cmd = (f''' drop schema if", ": 初始用户和sysadmin自己alter自己权限 Description : 1.初始用户alter自己的权限:alter不报错,但不生效,查询权限不变 1.1.初始用户alter自己的权限 1.2.清理环境 期望:清理成功 2.sysadmin用户alter自己的权限:alter不报错,但不生效,查询权限不变 2.1.管理员用户连接创建sysadmin用户", "role {self.username} in schema schema_016 GRANT ALL PRIVILEGES on tables", "loop b:=b+1; end loop; return b; end; select test_default_016(16); drop", "exists test_default_016(int) cascade; drop schema if exists schema_016 cascade; ''')", "OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT", "schema schema_016 GRANT ALL PRIVILEGES on TYPES to default016_01 WITH", "-d {self.userNode.db_name} -p {self.userNode.db_port} -U {self.username} -W {self.password} -c \"{sql_cmd}\"", "test_common_user_permission(self): logger.info('--------1.初始用户alter自己的权限--------') logger.info('--------1.1.初始用户alter自己的权限--------') sql_cmd = (f''' drop schema if exists", "schema_016 revoke ALL on tables from default016_01 CASCADE CONSTRAINTS ;", "schema if exists schema_016 cascade; ''') excute_cmd = f''' source", "for role default016_01 in schema schema_016 GRANT ALL PRIVILEGES on", "CASCADE CONSTRAINTS; ALTER DEFAULT PRIVILEGES for role default016_01 in schema", "CONSTRAINTS; ''') excute_cmd = f''' source {self.DB_ENV_PATH}; gsql -d {self.userNode.db_name}", "default016_01 in schema schema_016 GRANT ALL PRIVILEGES on TYPES to", "ALL PRIVILEGES on TYPES to {self.username} WITH GRANT OPTION ;", "schema_016 revoke ALL on TYPES from {self.username} CASCADE CONSTRAINTS ;", "drop function if exists test_default_016(int) cascade; drop schema if exists", "int,c_text text); drop table if exists test_alter_default_016 cascade; create table", "-p {self.userNode.db_port} -U {self.username} -W {self.password} -c \"{sql_cmd}\" ''' logger.info(excute_cmd)", "exists test_alter_default_016 cascade; create table test_alter_default_016(id int unique); select *", "logger.info('--------Opengauss_Function_Alter_Default_Privileges_Case0016开始执行--------') self.userNode = Node('dbuser') self.DB_ENV_PATH = macro.DB_ENV_PATH self.Constant = Constant()", "GRANT ALL PRIVILEGES on TYPES to {self.username} WITH GRANT OPTION", "OPTION ; ALTER DEFAULT PRIVILEGES for role {self.username} GRANT EXECUTE", "to {self.username} WITH GRANT OPTION ; drop schema if exists", "default016_01 revoke EXECUTE on functions from default016_01 CASCADE CONSTRAINTS; ALTER", "ALL on TYPES from default016_01 CASCADE CONSTRAINTS ; ALTER DEFAULT", "ALTER DEFAULT PRIVILEGES for role default016_01 revoke USAGE on TYPES", "self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], msg) logger.info('--------2.sysadmin用户alter自己的权限--------') logger.info('--------2.1.管理员用户连接创建sysadmin用户 default016_01 --------') sql_cmd = commonsh.execut_db_sql(f''' drop", "function if exists test_default_016(int) cascade; drop schema if exists schema_016", "Privategrant(unittest.TestCase): def setUp(self): logger.info('--------Opengauss_Function_Alter_Default_Privileges_Case0016开始执行--------') self.userNode = Node('dbuser') self.DB_ENV_PATH = macro.DB_ENV_PATH", "{self.username} revoke USAGE on TYPES from {self.username} CASCADE CONSTRAINTS; ''')", "ON AN \"AS IS\" BASIS, WITHOUT WARRANTIES OF ANY KIND,", "EXECUTE on functions to default016_01 WITH GRANT OPTION ; ALTER", "CASCADE CONSTRAINTS ; ALTER DEFAULT PRIVILEGES for role default016_01 revoke", "备注:以上alter测试包括对表(包含视图),类型,函数的权限测试 History : \"\"\" import sys import unittest from yat.test", "logger.info(msg) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], msg) self.assertIn(self.Constant.ALTER_DEFAULT_PRIVILEGES, msg) logger.info('--------1.2.清理环境--------') sql_cmd = (''' drop", "schema schema_016 revoke ALL on TYPES from {self.username} CASCADE CONSTRAINTS", "{self.password} -c \"{sql_cmd}\" ''' logger.info(excute_cmd) msg = self.userNode.sh(excute_cmd).result() logger.info(msg) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1],", "function if exists test_default_016(int) cascade; create or replace function test_default_016(a", "revoke ALL on TYPES from {self.username} CASCADE CONSTRAINTS ; ALTER", "logger.info(msg) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], msg) logger.info('--------2.sysadmin用户alter自己的权限--------') logger.info('--------2.1.管理员用户连接创建sysadmin用户 default016_01 --------') sql_cmd = commonsh.execut_db_sql(f'''", "A PARTICULAR PURPOSE. See the Mulan PSL v2 for more", "ALL on functions from default016_01 CASCADE CONSTRAINTS ; ALTER DEFAULT", "for role default016_01 in schema schema_016 revoke ALL on functions", "import sys import unittest from yat.test import macro from yat.test", "schema schema_016 revoke ALL on TYPES from default016_01 CASCADE CONSTRAINTS", "a copy of Mulan PSL v2 at: http://license.coscl.org.cn/MulanPSL2 THIS SOFTWARE", "Mulan PSL v2 for more details. \"\"\" \"\"\" Case Type", "PSL v2. You may obtain a copy of Mulan PSL", "the Mulan PSL v2 for more details. \"\"\" \"\"\" Case", "DEFAULT PRIVILEGES for role default016_01 GRANT USAGE on TYPES to", "GRANT ALL PRIVILEGES on functions to {self.username} WITH GRANT OPTION", "schema_016 revoke ALL on functions from {self.username} CASCADE CONSTRAINTS ;", "GRANT select,insert,update,truncate,references,TRIGGER,DELETE on tables to default016_01 WITH GRANT OPTION ;", "for role default016_01 in schema schema_016 revoke ALL on TYPES", "{self.username} CASCADE CONSTRAINTS; ''') excute_cmd = f''' source {self.DB_ENV_PATH}; gsql", "PRIVILEGES on tables to default016_01 WITH GRANT OPTION ; ALTER", "TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. See", "* from test_alter_default_016; ALTER DEFAULT PRIVILEGES for role {self.username} in", "(''' drop table if exists test_alter_default_016 cascade; drop type if", "select test_default_016(16); drop type if exists type016; CREATE TYPE type016", "default016_01 cascade; drop user if exists default016_01; ''') logger.info(sql_cmd) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1],", "import Logger from testcase.utils.Constant import Constant from testcase.utils.CommonSH import CommonSH", "Logger() commonsh = CommonSH('dbuser') class Privategrant(unittest.TestCase): def setUp(self): logger.info('--------Opengauss_Function_Alter_Default_Privileges_Case0016开始执行--------') self.userNode", "type016); select * from test_alter_default_016; ALTER DEFAULT PRIVILEGES for role", "Expect : 1.初始用户alter自己的权限:alter不报错,但不生效,查询权限不变 1.1.初始用户alter自己的权限 1.2.清理环境 期望:清理成功 2.sysadmin用户alter自己的权限:alter不报错,但不生效,查询权限不变 2.1.管理员用户连接创建sysadmin用户 default016_01 期望:创建成功", "drop user if exists default016_01; ''') logger.info(sql_cmd) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], sql_cmd) def", "for role default016_01 in schema schema_016 revoke ALL on tables", "You may obtain a copy of Mulan PSL v2 at:", "schema_016 cascade; create schema schema_016; ALTER DEFAULT PRIVILEGES for role", "in schema schema_016 revoke ALL on TYPES from {self.username} CASCADE", "# 初始用户密码 self.password = <PASSWORD> def test_common_user_permission(self): logger.info('--------1.初始用户alter自己的权限--------') logger.info('--------1.1.初始用户alter自己的权限--------') sql_cmd", "(c_int int,c_text text); drop table if exists test_alter_default_016 cascade; create", "TYPES from {self.username} CASCADE CONSTRAINTS; ''') excute_cmd = f''' source", "from yat.test import Node sys.path.append(sys.path[0] + \"/../\") from testcase.utils.Logger import", "yat.test import macro from yat.test import Node sys.path.append(sys.path[0] + \"/../\")", "GRANT EXECUTE on functions to {self.username} WITH GRANT OPTION ;", "test_alter_default_016 cascade; create table test_alter_default_016(id type016); select * from test_alter_default_016;", "PURPOSE. See the Mulan PSL v2 for more details. \"\"\"", "Case Name : 初始用户和sysadmin自己alter自己权限 Description : 1.初始用户alter自己的权限:alter不报错,但不生效,查询权限不变 1.1.初始用户alter自己的权限 1.2.清理环境 期望:清理成功", "''' logger.info(excute_cmd) msg = self.userNode.sh(excute_cmd).result() logger.info(msg) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], msg) self.assertIn(self.Constant.ALTER_DEFAULT_PRIVILEGES, msg)", "gsql -d {self.userNode.db_name} -p {self.userNode.db_port} -U {self.username} -W {self.password} -c", "1.1.初始用户alter自己的权限 1.2.清理环境 期望:清理成功 2.sysadmin用户alter自己的权限:alter不报错,但不生效,查询权限不变 2.1.管理员用户连接创建sysadmin用户 default016_01 期望:创建成功 2.2.default016_016用户连接 执行alter测试 2.3.清理", "test_alter_default_016 cascade; create table test_alter_default_016(id int unique); select * from", "执行alter测试 2.3.清理 期望:清理成功 备注:以上alter测试包括对表(包含视图),类型,函数的权限测试 History : \"\"\" import sys import", "create table test_alter_default_016(id type016); select * from test_alter_default_016; ALTER DEFAULT", "ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED", "on TYPES to default016_01 WITH GRANT OPTION ; ALTER DEFAULT", "schema schema_016 revoke ALL on functions from {self.username} CASCADE CONSTRAINTS", "on TYPES to {self.username} WITH GRANT OPTION ; ALTER DEFAULT", "; ALTER DEFAULT PRIVILEGES for role default016_01 revoke USAGE on", "type016; CREATE TYPE type016 AS (c_int int,c_text text); drop table", "revoke select,insert,update,truncate,references,TRIGGER,DELETE on tables from default016_01 CASCADE CONSTRAINTS; ALTER DEFAULT", "terms and conditions of the Mulan PSL v2. You may", "msg) logger.info('--------1.2.清理环境--------') sql_cmd = (''' drop table if exists test_alter_default_016", "OR FIT FOR A PARTICULAR PURPOSE. See the Mulan PSL", "; ALTER DEFAULT PRIVILEGES for role {self.username} revoke select,insert,update,truncate,references,TRIGGER,DELETE on", "msg = self.userNode.sh(excute_cmd).result() logger.info(msg) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], msg) self.assertIn(self.Constant.ALTER_DEFAULT_PRIVILEGES, msg) logger.info('--------2.3.清理--------') sql_cmd", "exists type016; CREATE TYPE type016 AS (c_int int,c_text text); drop", "on tables from {self.username} CASCADE CONSTRAINTS; ALTER DEFAULT PRIVILEGES for", "GRANT ALL PRIVILEGES on tables to {self.username} WITH GRANT OPTION", "AS (c_int int,c_text text); drop table if exists test_alter_default_016 cascade;", "default016_01 WITH GRANT OPTION ; ALTER DEFAULT PRIVILEGES for role", "期望:清理成功 备注:以上alter测试包括对表(包含视图),类型,函数的权限测试 Expect : 1.初始用户alter自己的权限:alter不报错,但不生效,查询权限不变 1.1.初始用户alter自己的权限 1.2.清理环境 期望:清理成功 2.sysadmin用户alter自己的权限:alter不报错,但不生效,查询权限不变 2.1.管理员用户连接创建sysadmin用户", "IS PROVIDED ON AN \"AS IS\" BASIS, WITHOUT WARRANTIES OF", "in schema schema_016 revoke ALL on functions from {self.username} CASCADE", "default016_01; create user default016_01 password '{<PASSWORD>}'; grant all privileges to", "create schema schema_016; ALTER DEFAULT PRIVILEGES for role {self.username} in", "{self.username} in schema schema_016 GRANT ALL PRIVILEGES on tables to", "PRIVILEGES for role default016_01 in schema schema_016 GRANT ALL PRIVILEGES", "USAGE on TYPES to {self.username} WITH GRANT OPTION ; drop", "if exists test_alter_default_016 cascade; create table test_alter_default_016(id int unique); select", "= self.userNode.sh(excute_cmd).result() logger.info(msg) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], msg) self.assertIn(self.Constant.ALTER_DEFAULT_PRIVILEGES, msg) logger.info('--------1.2.清理环境--------') sql_cmd =", "BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A", "logger.info('--------2.sysadmin用户alter自己的权限--------') logger.info('--------2.1.管理员用户连接创建sysadmin用户 default016_01 --------') sql_cmd = commonsh.execut_db_sql(f''' drop owned by", "testcase.utils.Constant import Constant from testcase.utils.CommonSH import CommonSH logger = Logger()", "b int:= a; begin for i in 1..a loop b:=b+1;", "{self.username} WITH GRANT OPTION ; ALTER DEFAULT PRIVILEGES for role", "tearDown(self): logger.info('----------------------------------清理环境----------------------------------') sql_cmd = commonsh.execut_db_sql(''' drop owned by default016_01 cascade;", "in 1..a loop b:=b+1; end loop; return b; end; select", "self.Constant = Constant() # 初始用户用户名 self.username = self.userNode.ssh_user # 初始用户密码", "for i in 1..a loop b:=b+1; end loop; return b;", "schema_016 cascade; create schema schema_016; drop table if exists test_alter_default_016", "2.1.管理员用户连接创建sysadmin用户 default016_01 期望:创建成功 2.2.default016_016用户连接 执行alter测试 2.3.清理 期望:清理成功 备注:以上alter测试包括对表(包含视图),类型,函数的权限测试 Expect :", "{self.userNode.db_name} -p {self.userNode.db_port} -U default016_01 -W {macro.COMMON_PASSWD} -c \"{sql_cmd}\" '''", "MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. See the Mulan", "schema_016 GRANT ALL PRIVILEGES on TYPES to {self.username} WITH GRANT", "; ALTER DEFAULT PRIVILEGES for role default016_01 GRANT EXECUTE on", "{self.username} in schema schema_016 revoke ALL on functions from {self.username}", "AN \"AS IS\" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER", "THIS SOFTWARE IS PROVIDED ON AN \"AS IS\" BASIS, WITHOUT", "DEFAULT PRIVILEGES for role {self.username} in schema schema_016 GRANT ALL", "; ALTER DEFAULT PRIVILEGES for role default016_01 in schema schema_016", "from default016_01 CASCADE CONSTRAINTS; ALTER DEFAULT PRIVILEGES for role default016_01", "Huawei Technologies Co.,Ltd. openGauss is licensed under Mulan PSL v2.", "= f''' source {self.DB_ENV_PATH}; gsql -d {self.userNode.db_name} -p {self.userNode.db_port} -U", "PSL v2 for more details. \"\"\" \"\"\" Case Type :", "--------') sql_cmd = commonsh.execut_db_sql(f''' drop owned by default016_01 cascade; drop", "Node('dbuser') self.DB_ENV_PATH = macro.DB_ENV_PATH self.Constant = Constant() # 初始用户用户名 self.username", "schema schema_016 GRANT ALL PRIVILEGES on functions to {self.username} WITH", "DEFAULT PRIVILEGES for role default016_01 revoke EXECUTE on functions from", "cascade; drop user if exists default016_01; ''') logger.info(sql_cmd) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], sql_cmd)", "CONSTRAINTS ; ALTER DEFAULT PRIVILEGES for role {self.username} revoke select,insert,update,truncate,references,TRIGGER,DELETE", "for role {self.username} in schema schema_016 GRANT ALL PRIVILEGES on", "ALTER DEFAULT PRIVILEGES for role {self.username} GRANT select,insert,update,truncate,references,TRIGGER,DELETE on tables", "DEFAULT PRIVILEGES for role default016_01 in schema schema_016 GRANT ALL", "EXECUTE on functions from default016_01 CASCADE CONSTRAINTS; ALTER DEFAULT PRIVILEGES", "owned by default016_01 cascade; drop user if exists default016_01; create", "begin for i in 1..a loop b:=b+1; end loop; return", "default016_01 password '{<PASSWORD>}'; grant all privileges to default016_01; ''') logger.info(sql_cmd)", "''') excute_cmd = f''' source {self.DB_ENV_PATH}; gsql -d {self.userNode.db_name} -p", "* from test_alter_default_016; drop function if exists test_default_016(int) cascade; create", "functions from {self.username} CASCADE CONSTRAINTS ; ALTER DEFAULT PRIVILEGES for", "unittest from yat.test import macro from yat.test import Node sys.path.append(sys.path[0]", "DEFAULT PRIVILEGES for role default016_01 GRANT EXECUTE on functions to", "msg) logger.info('--------2.sysadmin用户alter自己的权限--------') logger.info('--------2.1.管理员用户连接创建sysadmin用户 default016_01 --------') sql_cmd = commonsh.execut_db_sql(f''' drop owned", "setUp(self): logger.info('--------Opengauss_Function_Alter_Default_Privileges_Case0016开始执行--------') self.userNode = Node('dbuser') self.DB_ENV_PATH = macro.DB_ENV_PATH self.Constant =", "''' logger.info(excute_cmd) msg = self.userNode.sh(excute_cmd).result() logger.info(msg) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], msg) logger.info('--------2.sysadmin用户alter自己的权限--------') logger.info('--------2.1.管理员用户连接创建sysadmin用户", "cascade; create or replace function test_default_016(a int) return int as", "self.DB_ENV_PATH = macro.DB_ENV_PATH self.Constant = Constant() # 初始用户用户名 self.username =", "-W {self.password} -c \"{sql_cmd}\" ''' logger.info(excute_cmd) msg = self.userNode.sh(excute_cmd).result() logger.info(msg)", "= Logger() commonsh = CommonSH('dbuser') class Privategrant(unittest.TestCase): def setUp(self): logger.info('--------Opengauss_Function_Alter_Default_Privileges_Case0016开始执行--------')", "cascade; create table test_alter_default_016(id int unique); select * from test_alter_default_016;", "tables from default016_01 CASCADE CONSTRAINTS ; ALTER DEFAULT PRIVILEGES for", "if exists test_alter_default_016 cascade; create table test_alter_default_016(id type016); select *", "at: http://license.coscl.org.cn/MulanPSL2 THIS SOFTWARE IS PROVIDED ON AN \"AS IS\"", "from test_alter_default_016; ALTER DEFAULT PRIVILEGES for role {self.username} in schema", "select,insert,update,truncate,references,TRIGGER,DELETE on tables from {self.username} CASCADE CONSTRAINTS; ALTER DEFAULT PRIVILEGES", "\"\"\" \"\"\" Case Type : 功能测试 Case Name : 初始用户和sysadmin自己alter自己权限", "conditions of the Mulan PSL v2. You may obtain a", "import Constant from testcase.utils.CommonSH import CommonSH logger = Logger() commonsh", "2.2.default016_016用户连接 执行alter测试 2.3.清理 期望:清理成功 备注:以上alter测试包括对表(包含视图),类型,函数的权限测试 Expect : 1.初始用户alter自己的权限:alter不报错,但不生效,查询权限不变 1.1.初始用户alter自己的权限 1.2.清理环境", "PSL v2. You can use this software according to the", "functions to default016_01 WITH GRANT OPTION ; ALTER DEFAULT PRIVILEGES", "select * from test_alter_default_016; ALTER DEFAULT PRIVILEGES for role default016_01", "test_alter_default_016; ALTER DEFAULT PRIVILEGES for role default016_01 in schema schema_016", "table if exists test_alter_default_016 cascade; drop type if exists type016", "sql_cmd = (f''' drop schema if exists schema_016 cascade; create", "drop table if exists test_alter_default_016 cascade; create table test_alter_default_016(id type016);", "(c) 2022 Huawei Technologies Co.,Ltd. openGauss is licensed under Mulan", "ALL PRIVILEGES on functions to default016_01 WITH GRANT OPTION ;", "ALTER DEFAULT PRIVILEGES for role default016_01 GRANT USAGE on TYPES", "WITH GRANT OPTION ; drop schema if exists schema_016 cascade;", "macro from yat.test import Node sys.path.append(sys.path[0] + \"/../\") from testcase.utils.Logger", "from testcase.utils.Logger import Logger from testcase.utils.Constant import Constant from testcase.utils.CommonSH", "role default016_01 GRANT select,insert,update,truncate,references,TRIGGER,DELETE on tables to default016_01 WITH GRANT", "ALTER DEFAULT PRIVILEGES for role default016_01 in schema schema_016 revoke", "drop schema if exists schema_016 cascade; create schema schema_016; drop", "select,insert,update,truncate,references,TRIGGER,DELETE on tables to {self.username} WITH GRANT OPTION ; ALTER", "PRIVILEGES for role {self.username} revoke USAGE on TYPES from {self.username}", "role {self.username} in schema schema_016 GRANT ALL PRIVILEGES on functions", "role default016_01 revoke EXECUTE on functions from default016_01 CASCADE CONSTRAINTS;", "OPTION ; ALTER DEFAULT PRIVILEGES for role {self.username} in schema", "ALTER DEFAULT PRIVILEGES for role default016_01 GRANT EXECUTE on functions", "end loop; return b; end; select test_default_016(16); drop type if", "self.assertIn(self.Constant.ALTER_DEFAULT_PRIVILEGES, msg) logger.info('--------2.3.清理--------') sql_cmd = commonsh.execut_db_sql(f''' drop owned by default016_01", "this software according to the terms and conditions of the", "from default016_01 CASCADE CONSTRAINTS ; ALTER DEFAULT PRIVILEGES for role", "\"\"\" Copyright (c) 2022 Huawei Technologies Co.,Ltd. openGauss is licensed", "PSL v2 at: http://license.coscl.org.cn/MulanPSL2 THIS SOFTWARE IS PROVIDED ON AN", "default016_01 期望:创建成功 2.2.default016_016用户连接 执行alter测试 2.3.清理 期望:清理成功 备注:以上alter测试包括对表(包含视图),类型,函数的权限测试 Expect : 1.初始用户alter自己的权限:alter不报错,但不生效,查询权限不变", "sql_cmd) logger.info('--------2.2.default016_01用户连接 执行alter测试--------') sql_cmd = (f''' drop schema if exists", "from testcase.utils.CommonSH import CommonSH logger = Logger() commonsh = CommonSH('dbuser')", "table test_alter_default_016(id int unique); select * from test_alter_default_016; drop function", "self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], msg) self.assertIn(self.Constant.ALTER_DEFAULT_PRIVILEGES, msg) logger.info('--------2.3.清理--------') sql_cmd = commonsh.execut_db_sql(f''' drop owned", "ALTER DEFAULT PRIVILEGES for role {self.username} revoke select,insert,update,truncate,references,TRIGGER,DELETE on tables", "执行alter测试 2.3.清理 期望:清理成功 备注:以上alter测试包括对表(包含视图),类型,函数的权限测试 Expect : 1.初始用户alter自己的权限:alter不报错,但不生效,查询权限不变 1.1.初始用户alter自己的权限 1.2.清理环境 期望:清理成功", "revoke ALL on functions from {self.username} CASCADE CONSTRAINTS ; ALTER", "drop table if exists test_alter_default_016 cascade; drop type if exists", "cascade; create schema schema_016; ALTER DEFAULT PRIVILEGES for role default016_01", "in schema schema_016 revoke ALL on tables from default016_01 CASCADE", "Mulan PSL v2 at: http://license.coscl.org.cn/MulanPSL2 THIS SOFTWARE IS PROVIDED ON", "schema_016 revoke ALL on functions from default016_01 CASCADE CONSTRAINTS ;", "exists schema_016 cascade; ''') excute_cmd = f''' source {self.DB_ENV_PATH}; gsql", "drop type if exists type016; CREATE TYPE type016 AS (c_int", "= self.userNode.ssh_user # 初始用户密码 self.password = <PASSWORD> def test_common_user_permission(self): logger.info('--------1.初始用户alter自己的权限--------')", "cascade; drop schema if exists schema_016 cascade; ''') excute_cmd =", "ALTER DEFAULT PRIVILEGES for role {self.username} GRANT USAGE on TYPES", "default016_01 revoke select,insert,update,truncate,references,TRIGGER,DELETE on tables from default016_01 CASCADE CONSTRAINTS; ALTER", "drop owned by default016_01 cascade; drop user if exists default016_01;", "schema_016 GRANT ALL PRIVILEGES on tables to {self.username} WITH GRANT", "-U {self.username} -W {self.password} -c \"{sql_cmd}\" ''' logger.info(excute_cmd) msg =", "logger.info('--------2.3.清理--------') sql_cmd = commonsh.execut_db_sql(f''' drop owned by default016_01 cascade; drop", "import macro from yat.test import Node sys.path.append(sys.path[0] + \"/../\") from", "by default016_01 cascade; drop user if exists default016_01; create user", "excute_cmd = f''' source {self.DB_ENV_PATH}; gsql -d {self.userNode.db_name} -p {self.userNode.db_port}", "exists test_alter_default_016 cascade; drop type if exists type016 cascade; drop", "on TYPES from {self.username} CASCADE CONSTRAINTS; ''') excute_cmd = f'''", "more details. \"\"\" \"\"\" Case Type : 功能测试 Case Name", "{self.DB_ENV_PATH}; gsql -d {self.userNode.db_name} -p {self.userNode.db_port} -U {self.username} -W {self.password}", "select,insert,update,truncate,references,TRIGGER,DELETE on tables to default016_01 WITH GRANT OPTION ; ALTER", "on functions to default016_01 WITH GRANT OPTION ; ALTER DEFAULT", "\"{sql_cmd}\" ''' logger.info(excute_cmd) msg = self.userNode.sh(excute_cmd).result() logger.info(msg) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], msg) self.assertIn(self.Constant.ALTER_DEFAULT_PRIVILEGES,", "= <PASSWORD> def test_common_user_permission(self): logger.info('--------1.初始用户alter自己的权限--------') logger.info('--------1.1.初始用户alter自己的权限--------') sql_cmd = (f''' drop", "ALL on TYPES from {self.username} CASCADE CONSTRAINTS ; ALTER DEFAULT", "''') logger.info(sql_cmd) self.assertIn(self.Constant.CREATE_ROLE_SUCCESS_MSG, sql_cmd) logger.info('--------2.2.default016_01用户连接 执行alter测试--------') sql_cmd = (f''' drop", "obtain a copy of Mulan PSL v2 at: http://license.coscl.org.cn/MulanPSL2 THIS", "if exists test_alter_default_016 cascade; drop type if exists type016 cascade;", "schema_016 GRANT ALL PRIVILEGES on functions to {self.username} WITH GRANT", "table test_alter_default_016(id type016); select * from test_alter_default_016; ALTER DEFAULT PRIVILEGES", "; ALTER DEFAULT PRIVILEGES for role default016_01 revoke select,insert,update,truncate,references,TRIGGER,DELETE on", "privileges to default016_01; ''') logger.info(sql_cmd) self.assertIn(self.Constant.CREATE_ROLE_SUCCESS_MSG, sql_cmd) logger.info('--------2.2.default016_01用户连接 执行alter测试--------') sql_cmd", "from testcase.utils.Constant import Constant from testcase.utils.CommonSH import CommonSH logger =", "schema_016 GRANT ALL PRIVILEGES on tables to default016_01 WITH GRANT", "{self.username} in schema schema_016 GRANT ALL PRIVILEGES on functions to", "schema schema_016; drop table if exists test_alter_default_016 cascade; create table", "2.3.清理 期望:清理成功 备注:以上alter测试包括对表(包含视图),类型,函数的权限测试 History : \"\"\" import sys import unittest", "ALTER DEFAULT PRIVILEGES for role {self.username} in schema schema_016 GRANT", "default016_01 cascade; drop user if exists default016_01; ''') logger.info(sql_cmd) logger.info('--------Opengauss_Function_Alter_Default_Privileges_Case0016执行结束--------')", "CommonSH('dbuser') class Privategrant(unittest.TestCase): def setUp(self): logger.info('--------Opengauss_Function_Alter_Default_Privileges_Case0016开始执行--------') self.userNode = Node('dbuser') self.DB_ENV_PATH", "CONSTRAINTS ; ALTER DEFAULT PRIVILEGES for role {self.username} revoke USAGE", "test_alter_default_016; drop function if exists test_default_016(int) cascade; create or replace", "self.userNode.sh(excute_cmd).result() logger.info(msg) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], msg) self.assertIn(self.Constant.ALTER_DEFAULT_PRIVILEGES, msg) logger.info('--------1.2.清理环境--------') sql_cmd = ('''", "{self.DB_ENV_PATH}; gsql -d {self.userNode.db_name} -p {self.userNode.db_port} -U default016_01 -W {macro.COMMON_PASSWD}", "commonsh.execut_db_sql(f''' drop owned by default016_01 cascade; drop user if exists", "if exists default016_01; ''') logger.info(sql_cmd) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], sql_cmd) def tearDown(self): logger.info('----------------------------------清理环境----------------------------------')", "You can use this software according to the terms and", "exists test_alter_default_016 cascade; create table test_alter_default_016(id type016); select * from", "cascade; drop type if exists type016 cascade; drop function if", "WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING", "type if exists type016 cascade; drop function if exists test_default_016(int)", "; ALTER DEFAULT PRIVILEGES for role {self.username} GRANT EXECUTE on", "CASCADE CONSTRAINTS ; ALTER DEFAULT PRIVILEGES for role {self.username} revoke", "Name : 初始用户和sysadmin自己alter自己权限 Description : 1.初始用户alter自己的权限:alter不报错,但不生效,查询权限不变 1.1.初始用户alter自己的权限 1.2.清理环境 期望:清理成功 2.sysadmin用户alter自己的权限:alter不报错,但不生效,查询权限不变", "\"{sql_cmd}\" ''' logger.info(excute_cmd) msg = self.userNode.sh(excute_cmd).result() logger.info(msg) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], msg) logger.info('--------2.sysadmin用户alter自己的权限--------')", "from test_alter_default_016; ALTER DEFAULT PRIVILEGES for role default016_01 in schema", "logger.info('----------------------------------清理环境----------------------------------') sql_cmd = commonsh.execut_db_sql(''' drop owned by default016_01 cascade; drop", "on tables to default016_01 WITH GRANT OPTION ; ALTER DEFAULT", "test_alter_default_016(id int unique); select * from test_alter_default_016; drop function if", "PRIVILEGES on functions to default016_01 WITH GRANT OPTION ; ALTER", "v2 for more details. \"\"\" \"\"\" Case Type : 功能测试", "logger.info(excute_cmd) msg = self.userNode.sh(excute_cmd).result() logger.info(msg) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], msg) self.assertIn(self.Constant.ALTER_DEFAULT_PRIVILEGES, msg) logger.info('--------1.2.清理环境--------')", "sys.path.append(sys.path[0] + \"/../\") from testcase.utils.Logger import Logger from testcase.utils.Constant import", "GRANT USAGE on TYPES to default016_01 WITH GRANT OPTION ;", "TYPES to default016_01 WITH GRANT OPTION ; drop schema if", "NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. See the", "for role {self.username} revoke EXECUTE on functions from {self.username} CASCADE", "default016_01 in schema schema_016 GRANT ALL PRIVILEGES on tables to", "OPTION ; ALTER DEFAULT PRIVILEGES for role default016_01 GRANT USAGE", "* from test_alter_default_016; ALTER DEFAULT PRIVILEGES for role default016_01 in", "in schema schema_016 revoke ALL on functions from default016_01 CASCADE", "(f''' drop schema if exists schema_016 cascade; create schema schema_016;", "revoke USAGE on TYPES from {self.username} CASCADE CONSTRAINTS; ''') excute_cmd", "openGauss is licensed under Mulan PSL v2. You can use", "on TYPES from {self.username} CASCADE CONSTRAINTS ; ALTER DEFAULT PRIVILEGES", "create user default016_01 password '{<PASSWORD>}'; grant all privileges to default016_01;", "the Mulan PSL v2. You may obtain a copy of", "USAGE on TYPES from {self.username} CASCADE CONSTRAINTS; ''') excute_cmd =", "CONSTRAINTS; ALTER DEFAULT PRIVILEGES for role {self.username} in schema schema_016", "= self.userNode.sh(excute_cmd).result() logger.info(msg) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], msg) self.assertIn(self.Constant.ALTER_DEFAULT_PRIVILEGES, msg) logger.info('--------2.3.清理--------') sql_cmd =", "return b; end; select test_default_016(16); drop type if exists type016;", "{self.username} CASCADE CONSTRAINTS ; ALTER DEFAULT PRIVILEGES for role {self.username}", "source {self.DB_ENV_PATH}; gsql -d {self.userNode.db_name} -p {self.userNode.db_port} -U {self.username} -W", "PRIVILEGES for role {self.username} GRANT select,insert,update,truncate,references,TRIGGER,DELETE on tables to {self.username}", "cascade; create table test_alter_default_016(id type016); select * from test_alter_default_016; ALTER", "default016_01 cascade; drop user if exists default016_01; create user default016_01", "1..a loop b:=b+1; end loop; return b; end; select test_default_016(16);", "schema schema_016; ALTER DEFAULT PRIVILEGES for role {self.username} in schema", "FOR A PARTICULAR PURPOSE. See the Mulan PSL v2 for", "role default016_01 GRANT USAGE on TYPES to default016_01 WITH GRANT", "Mulan PSL v2. You can use this software according to", "DEFAULT PRIVILEGES for role {self.username} GRANT EXECUTE on functions to", "OPTION ; ALTER DEFAULT PRIVILEGES for role {self.username} GRANT USAGE", "test_alter_default_016; ALTER DEFAULT PRIVILEGES for role {self.username} in schema schema_016", "OPTION ; ALTER DEFAULT PRIVILEGES for role {self.username} GRANT select,insert,update,truncate,references,TRIGGER,DELETE", "Constant from testcase.utils.CommonSH import CommonSH logger = Logger() commonsh =", "self.userNode.sh(excute_cmd).result() logger.info(msg) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], msg) logger.info('--------2.sysadmin用户alter自己的权限--------') logger.info('--------2.1.管理员用户连接创建sysadmin用户 default016_01 --------') sql_cmd =", "INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR", "期望:清理成功 备注:以上alter测试包括对表(包含视图),类型,函数的权限测试 History : \"\"\" import sys import unittest from", "functions to {self.username} WITH GRANT OPTION ; ALTER DEFAULT PRIVILEGES", "role {self.username} GRANT select,insert,update,truncate,references,TRIGGER,DELETE on tables to {self.username} WITH GRANT", "; ALTER DEFAULT PRIVILEGES for role default016_01 revoke EXECUTE on", "sql_cmd) def tearDown(self): logger.info('----------------------------------清理环境----------------------------------') sql_cmd = commonsh.execut_db_sql(''' drop owned by", "PRIVILEGES for role default016_01 GRANT select,insert,update,truncate,references,TRIGGER,DELETE on tables to default016_01", "CommonSH logger = Logger() commonsh = CommonSH('dbuser') class Privategrant(unittest.TestCase): def", "GRANT OPTION ; ALTER DEFAULT PRIVILEGES for role {self.username} GRANT", "to the terms and conditions of the Mulan PSL v2.", "DEFAULT PRIVILEGES for role {self.username} in schema schema_016 revoke ALL", "text); drop table if exists test_alter_default_016 cascade; create table test_alter_default_016(id", "NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR", "; ALTER DEFAULT PRIVILEGES for role {self.username} in schema schema_016", "default016_01; ''') logger.info(sql_cmd) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], sql_cmd) def tearDown(self): logger.info('----------------------------------清理环境----------------------------------') sql_cmd =", "\"\"\" Case Type : 功能测试 Case Name : 初始用户和sysadmin自己alter自己权限 Description", "DEFAULT PRIVILEGES for role {self.username} revoke EXECUTE on functions from", "1.初始用户alter自己的权限:alter不报错,但不生效,查询权限不变 1.1.初始用户alter自己的权限 1.2.清理环境 期望:清理成功 2.sysadmin用户alter自己的权限:alter不报错,但不生效,查询权限不变 2.1.管理员用户连接创建sysadmin用户 default016_01 期望:创建成功 2.2.default016_016用户连接 执行alter测试", "for role {self.username} GRANT USAGE on TYPES to {self.username} WITH", "ALTER DEFAULT PRIVILEGES for role default016_01 GRANT select,insert,update,truncate,references,TRIGGER,DELETE on tables", "; ALTER DEFAULT PRIVILEGES for role default016_01 GRANT USAGE on", "in schema schema_016 GRANT ALL PRIVILEGES on tables to {self.username}", "DEFAULT PRIVILEGES for role default016_01 in schema schema_016 revoke ALL", "TYPES to {self.username} WITH GRANT OPTION ; drop schema if", "tables to {self.username} WITH GRANT OPTION ; ALTER DEFAULT PRIVILEGES", "on functions to {self.username} WITH GRANT OPTION ; ALTER DEFAULT", "<PASSWORD> def test_common_user_permission(self): logger.info('--------1.初始用户alter自己的权限--------') logger.info('--------1.1.初始用户alter自己的权限--------') sql_cmd = (f''' drop schema", "{self.username} GRANT select,insert,update,truncate,references,TRIGGER,DELETE on tables to {self.username} WITH GRANT OPTION", "schema schema_016 GRANT ALL PRIVILEGES on functions to default016_01 WITH", "= Node('dbuser') self.DB_ENV_PATH = macro.DB_ENV_PATH self.Constant = Constant() # 初始用户用户名", "test_default_016(int) cascade; drop schema if exists schema_016 cascade; ''') excute_cmd", "details. \"\"\" \"\"\" Case Type : 功能测试 Case Name :", "commonsh.execut_db_sql(''' drop owned by default016_01 cascade; drop user if exists", "in schema schema_016 GRANT ALL PRIVILEGES on functions to {self.username}", "CREATE TYPE type016 AS (c_int int,c_text text); drop table if", "备注:以上alter测试包括对表(包含视图),类型,函数的权限测试 Expect : 1.初始用户alter自己的权限:alter不报错,但不生效,查询权限不变 1.1.初始用户alter自己的权限 1.2.清理环境 期望:清理成功 2.sysadmin用户alter自己的权限:alter不报错,但不生效,查询权限不变 2.1.管理员用户连接创建sysadmin用户 default016_01", ": 1.初始用户alter自己的权限:alter不报错,但不生效,查询权限不变 1.1.初始用户alter自己的权限 1.2.清理环境 期望:清理成功 2.sysadmin用户alter自己的权限:alter不报错,但不生效,查询权限不变 2.1.管理员用户连接创建sysadmin用户 default016_01 期望:创建成功 2.2.default016_016用户连接", "class Privategrant(unittest.TestCase): def setUp(self): logger.info('--------Opengauss_Function_Alter_Default_Privileges_Case0016开始执行--------') self.userNode = Node('dbuser') self.DB_ENV_PATH =", "role {self.username} GRANT USAGE on TYPES to {self.username} WITH GRANT", "can use this software according to the terms and conditions", "2.sysadmin用户alter自己的权限:alter不报错,但不生效,查询权限不变 2.1.管理员用户连接创建sysadmin用户 default016_01 期望:创建成功 2.2.default016_016用户连接 执行alter测试 2.3.清理 期望:清理成功 备注:以上alter测试包括对表(包含视图),类型,函数的权限测试 History", "msg) logger.info('--------2.3.清理--------') sql_cmd = commonsh.execut_db_sql(f''' drop owned by default016_01 cascade;", "PRIVILEGES for role default016_01 revoke select,insert,update,truncate,references,TRIGGER,DELETE on tables from default016_01", "exists default016_01; create user default016_01 password '{<PASSWORD>}'; grant all privileges", "for role {self.username} in schema schema_016 revoke ALL on TYPES", "{self.userNode.db_port} -U default016_01 -W {macro.COMMON_PASSWD} -c \"{sql_cmd}\" ''' logger.info(excute_cmd) msg", "drop table if exists test_alter_default_016 cascade; create table test_alter_default_016(id int", "if exists type016; CREATE TYPE type016 AS (c_int int,c_text text);", "BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR IMPLIED,", "for role {self.username} revoke USAGE on TYPES from {self.username} CASCADE", "self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], msg) self.assertIn(self.Constant.ALTER_DEFAULT_PRIVILEGES, msg) logger.info('--------1.2.清理环境--------') sql_cmd = (''' drop table", "PARTICULAR PURPOSE. See the Mulan PSL v2 for more details.", "function test_default_016(a int) return int as b int:= a; begin", "on TYPES to {self.username} WITH GRANT OPTION ; drop schema", "select * from test_alter_default_016; drop function if exists test_default_016(int) cascade;", "role default016_01 in schema schema_016 GRANT ALL PRIVILEGES on TYPES", "role {self.username} in schema schema_016 revoke ALL on TYPES from", "GRANT ALL PRIVILEGES on tables to default016_01 WITH GRANT OPTION", "user if exists default016_01; ''') logger.info(sql_cmd) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], sql_cmd) def tearDown(self):", "b; end; select test_default_016(16); drop type if exists type016; CREATE", "default016_01 WITH GRANT OPTION ; drop schema if exists schema_016", "logger.info('--------1.初始用户alter自己的权限--------') logger.info('--------1.1.初始用户alter自己的权限--------') sql_cmd = (f''' drop schema if exists schema_016", "role {self.username} in schema schema_016 GRANT ALL PRIVILEGES on TYPES", "PRIVILEGES for role {self.username} GRANT EXECUTE on functions to {self.username}", "DEFAULT PRIVILEGES for role {self.username} revoke select,insert,update,truncate,references,TRIGGER,DELETE on tables from", "= macro.DB_ENV_PATH self.Constant = Constant() # 初始用户用户名 self.username = self.userNode.ssh_user", "role default016_01 in schema schema_016 GRANT ALL PRIVILEGES on tables", "default016_01 CASCADE CONSTRAINTS; ALTER DEFAULT PRIVILEGES for role default016_01 in", "ALL PRIVILEGES on TYPES to default016_01 WITH GRANT OPTION ;", "PRIVILEGES for role default016_01 GRANT EXECUTE on functions to default016_01", "2.1.管理员用户连接创建sysadmin用户 default016_01 期望:创建成功 2.2.default016_016用户连接 执行alter测试 2.3.清理 期望:清理成功 备注:以上alter测试包括对表(包含视图),类型,函数的权限测试 History :", "self.userNode = Node('dbuser') self.DB_ENV_PATH = macro.DB_ENV_PATH self.Constant = Constant() #", "role {self.username} in schema schema_016 revoke ALL on tables from", "EXECUTE on functions to {self.username} WITH GRANT OPTION ; ALTER", "revoke ALL on TYPES from default016_01 CASCADE CONSTRAINTS ; ALTER", "TYPES from default016_01 CASCADE CONSTRAINTS ; ALTER DEFAULT PRIVILEGES for", "default016_01 revoke USAGE on TYPES from default016_01 CASCADE CONSTRAINTS; ''')", "functions from {self.username} CASCADE CONSTRAINTS; ALTER DEFAULT PRIVILEGES for role", "{self.userNode.db_port} -U {self.username} -W {self.password} -c \"{sql_cmd}\" ''' logger.info(excute_cmd) msg", "in schema schema_016 GRANT ALL PRIVILEGES on tables to default016_01", "cascade; create schema schema_016; drop table if exists test_alter_default_016 cascade;", "; ALTER DEFAULT PRIVILEGES for role default016_01 GRANT select,insert,update,truncate,references,TRIGGER,DELETE on", "; ALTER DEFAULT PRIVILEGES for role {self.username} revoke EXECUTE on", "{self.username} revoke EXECUTE on functions from {self.username} CASCADE CONSTRAINTS; ALTER", "if exists schema_016 cascade; create schema schema_016; drop table if", "DEFAULT PRIVILEGES for role default016_01 revoke select,insert,update,truncate,references,TRIGGER,DELETE on tables from", "for role default016_01 revoke USAGE on TYPES from default016_01 CASCADE", "cascade; drop user if exists default016_01; create user default016_01 password", "GRANT OPTION ; ALTER DEFAULT PRIVILEGES for role {self.username} in", "ALTER DEFAULT PRIVILEGES for role {self.username} GRANT EXECUTE on functions", "on TYPES to default016_01 WITH GRANT OPTION ; drop schema", "Logger from testcase.utils.Constant import Constant from testcase.utils.CommonSH import CommonSH logger", "schema schema_016 revoke ALL on tables from default016_01 CASCADE CONSTRAINTS", "in schema schema_016 revoke ALL on tables from {self.username} CASCADE", "on tables from {self.username} CASCADE CONSTRAINTS ; ALTER DEFAULT PRIVILEGES", "tables from default016_01 CASCADE CONSTRAINTS; ALTER DEFAULT PRIVILEGES for role", "use this software according to the terms and conditions of", "all privileges to default016_01; ''') logger.info(sql_cmd) self.assertIn(self.Constant.CREATE_ROLE_SUCCESS_MSG, sql_cmd) logger.info('--------2.2.default016_01用户连接 执行alter测试--------')", "2.sysadmin用户alter自己的权限:alter不报错,但不生效,查询权限不变 2.1.管理员用户连接创建sysadmin用户 default016_01 期望:创建成功 2.2.default016_016用户连接 执行alter测试 2.3.清理 期望:清理成功 备注:以上alter测试包括对表(包含视图),类型,函数的权限测试 Expect", "int as b int:= a; begin for i in 1..a", "ALTER DEFAULT PRIVILEGES for role {self.username} revoke EXECUTE on functions", "default016_01 in schema schema_016 revoke ALL on tables from default016_01", "logger.info(sql_cmd) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], sql_cmd) def tearDown(self): logger.info('----------------------------------清理环境----------------------------------') sql_cmd = commonsh.execut_db_sql(''' drop", "logger = Logger() commonsh = CommonSH('dbuser') class Privategrant(unittest.TestCase): def setUp(self):", "PRIVILEGES for role default016_01 revoke EXECUTE on functions from default016_01", "by default016_01 cascade; drop user if exists default016_01; ''') logger.info(sql_cmd)", "logger.info(excute_cmd) msg = self.userNode.sh(excute_cmd).result() logger.info(msg) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], msg) self.assertIn(self.Constant.ALTER_DEFAULT_PRIVILEGES, msg) logger.info('--------2.3.清理--------')", "v2. You may obtain a copy of Mulan PSL v2", "i in 1..a loop b:=b+1; end loop; return b; end;", "; drop schema if exists schema_016 cascade; create schema schema_016;", "PRIVILEGES for role {self.username} in schema schema_016 GRANT ALL PRIVILEGES", "\"\"\" import sys import unittest from yat.test import macro from", "{self.username} -W {self.password} -c \"{sql_cmd}\" ''' logger.info(excute_cmd) msg = self.userNode.sh(excute_cmd).result()", "from {self.username} CASCADE CONSTRAINTS; ''') excute_cmd = f''' source {self.DB_ENV_PATH};", "if exists test_default_016(int) cascade; create or replace function test_default_016(a int)", "CONSTRAINTS ; ALTER DEFAULT PRIVILEGES for role default016_01 revoke USAGE", "sql_cmd = commonsh.execut_db_sql(f''' drop owned by default016_01 cascade; drop user", "# 初始用户用户名 self.username = self.userNode.ssh_user # 初始用户密码 self.password = <PASSWORD>", "return int as b int:= a; begin for i in", "期望:创建成功 2.2.default016_016用户连接 执行alter测试 2.3.清理 期望:清理成功 备注:以上alter测试包括对表(包含视图),类型,函数的权限测试 Expect : 1.初始用户alter自己的权限:alter不报错,但不生效,查询权限不变 1.1.初始用户alter自己的权限", "= commonsh.execut_db_sql(f''' drop owned by default016_01 cascade; drop user if", "import unittest from yat.test import macro from yat.test import Node", "DEFAULT PRIVILEGES for role {self.username} GRANT select,insert,update,truncate,references,TRIGGER,DELETE on tables to", "Copyright (c) 2022 Huawei Technologies Co.,Ltd. openGauss is licensed under", "test_default_016(a int) return int as b int:= a; begin for", "exists default016_01; ''') logger.info(sql_cmd) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], sql_cmd) def tearDown(self): logger.info('----------------------------------清理环境----------------------------------') sql_cmd", "schema schema_016 revoke ALL on tables from {self.username} CASCADE CONSTRAINTS", "CASCADE CONSTRAINTS; ALTER DEFAULT PRIVILEGES for role {self.username} in schema", "for role default016_01 GRANT select,insert,update,truncate,references,TRIGGER,DELETE on tables to default016_01 WITH", "default016_01 期望:创建成功 2.2.default016_016用户连接 执行alter测试 2.3.清理 期望:清理成功 备注:以上alter测试包括对表(包含视图),类型,函数的权限测试 History : \"\"\"", "for role {self.username} in schema schema_016 revoke ALL on functions", "exists test_default_016(int) cascade; create or replace function test_default_016(a int) return", "user if exists default016_01; create user default016_01 password '{<PASSWORD>}'; grant", "ALTER DEFAULT PRIVILEGES for role default016_01 in schema schema_016 GRANT", "self.password = <PASSWORD> def test_common_user_permission(self): logger.info('--------1.初始用户alter自己的权限--------') logger.info('--------1.1.初始用户alter自己的权限--------') sql_cmd = (f'''", "for role {self.username} revoke select,insert,update,truncate,references,TRIGGER,DELETE on tables from {self.username} CASCADE", "; ALTER DEFAULT PRIVILEGES for role {self.username} revoke USAGE on", "or replace function test_default_016(a int) return int as b int:=", "table if exists test_alter_default_016 cascade; create table test_alter_default_016(id type016); select", "may obtain a copy of Mulan PSL v2 at: http://license.coscl.org.cn/MulanPSL2", "WITH GRANT OPTION ; ALTER DEFAULT PRIVILEGES for role default016_01", "= commonsh.execut_db_sql(''' drop owned by default016_01 cascade; drop user if", "on functions from default016_01 CASCADE CONSTRAINTS ; ALTER DEFAULT PRIVILEGES", "from default016_01 CASCADE CONSTRAINTS; ''') excute_cmd = f''' source {self.DB_ENV_PATH};", "-U default016_01 -W {macro.COMMON_PASSWD} -c \"{sql_cmd}\" ''' logger.info(excute_cmd) msg =", "a; begin for i in 1..a loop b:=b+1; end loop;", "初始用户和sysadmin自己alter自己权限 Description : 1.初始用户alter自己的权限:alter不报错,但不生效,查询权限不变 1.1.初始用户alter自己的权限 1.2.清理环境 期望:清理成功 2.sysadmin用户alter自己的权限:alter不报错,但不生效,查询权限不变 2.1.管理员用户连接创建sysadmin用户 default016_01", "期望:清理成功 2.sysadmin用户alter自己的权限:alter不报错,但不生效,查询权限不变 2.1.管理员用户连接创建sysadmin用户 default016_01 期望:创建成功 2.2.default016_016用户连接 执行alter测试 2.3.清理 期望:清理成功 备注:以上alter测试包括对表(包含视图),类型,函数的权限测试", "ALL PRIVILEGES on tables to default016_01 WITH GRANT OPTION ;", "to default016_01 WITH GRANT OPTION ; ALTER DEFAULT PRIVILEGES for", "copy of Mulan PSL v2 at: http://license.coscl.org.cn/MulanPSL2 THIS SOFTWARE IS", "2022 Huawei Technologies Co.,Ltd. openGauss is licensed under Mulan PSL", "and conditions of the Mulan PSL v2. You may obtain", "1.2.清理环境 期望:清理成功 2.sysadmin用户alter自己的权限:alter不报错,但不生效,查询权限不变 2.1.管理员用户连接创建sysadmin用户 default016_01 期望:创建成功 2.2.default016_016用户连接 执行alter测试 2.3.清理 期望:清理成功", "msg = self.userNode.sh(excute_cmd).result() logger.info(msg) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], msg) logger.info('--------2.sysadmin用户alter自己的权限--------') logger.info('--------2.1.管理员用户连接创建sysadmin用户 default016_01 --------')", "PRIVILEGES on TYPES to {self.username} WITH GRANT OPTION ; ALTER", "logger.info('--------1.1.初始用户alter自己的权限--------') sql_cmd = (f''' drop schema if exists schema_016 cascade;", "logger.info(excute_cmd) msg = self.userNode.sh(excute_cmd).result() logger.info(msg) self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], msg) logger.info('--------2.sysadmin用户alter自己的权限--------') logger.info('--------2.1.管理员用户连接创建sysadmin用户 default016_01", "GRANT ALL PRIVILEGES on functions to default016_01 WITH GRANT OPTION", "= (''' drop table if exists test_alter_default_016 cascade; drop type", "if exists schema_016 cascade; create schema schema_016; ALTER DEFAULT PRIVILEGES", "role default016_01 in schema schema_016 revoke ALL on TYPES from", "role {self.username} revoke select,insert,update,truncate,references,TRIGGER,DELETE on tables from {self.username} CASCADE CONSTRAINTS;", "create schema schema_016; drop table if exists test_alter_default_016 cascade; create", "v2 at: http://license.coscl.org.cn/MulanPSL2 THIS SOFTWARE IS PROVIDED ON AN \"AS", "PRIVILEGES for role {self.username} GRANT USAGE on TYPES to {self.username}", "password '{<PASSWORD>}'; grant all privileges to default016_01; ''') logger.info(sql_cmd) self.assertIn(self.Constant.CREATE_ROLE_SUCCESS_MSG,", "in schema schema_016 GRANT ALL PRIVILEGES on TYPES to default016_01", "role default016_01 in schema schema_016 revoke ALL on tables from", "user default016_01 password '{<PASSWORD>}'; grant all privileges to default016_01; ''')", "WARRANTIES OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING BUT", "default016_01 --------') sql_cmd = commonsh.execut_db_sql(f''' drop owned by default016_01 cascade;", "{self.username} revoke select,insert,update,truncate,references,TRIGGER,DELETE on tables from {self.username} CASCADE CONSTRAINTS; ALTER", "tables from {self.username} CASCADE CONSTRAINTS; ALTER DEFAULT PRIVILEGES for role", "import CommonSH logger = Logger() commonsh = CommonSH('dbuser') class Privategrant(unittest.TestCase):", "schema if exists schema_016 cascade; create schema schema_016; ALTER DEFAULT", "default016_01 GRANT select,insert,update,truncate,references,TRIGGER,DELETE on tables to default016_01 WITH GRANT OPTION", "初始用户密码 self.password = <PASSWORD> def test_common_user_permission(self): logger.info('--------1.初始用户alter自己的权限--------') logger.info('--------1.1.初始用户alter自己的权限--------') sql_cmd =", "self.username = self.userNode.ssh_user # 初始用户密码 self.password = <PASSWORD> def test_common_user_permission(self):", "in schema schema_016 GRANT ALL PRIVILEGES on TYPES to {self.username}", "sql_cmd = commonsh.execut_db_sql(''' drop owned by default016_01 cascade; drop user", "CONSTRAINTS ; ALTER DEFAULT PRIVILEGES for role default016_01 revoke EXECUTE", "ALL on tables from default016_01 CASCADE CONSTRAINTS ; ALTER DEFAULT", "{self.username} in schema schema_016 GRANT ALL PRIVILEGES on TYPES to", "revoke ALL on tables from default016_01 CASCADE CONSTRAINTS ; ALTER", "-d {self.userNode.db_name} -p {self.userNode.db_port} -U default016_01 -W {macro.COMMON_PASSWD} -c \"{sql_cmd}\"", "role default016_01 GRANT EXECUTE on functions to default016_01 WITH GRANT", "f''' source {self.DB_ENV_PATH}; gsql -d {self.userNode.db_name} -p {self.userNode.db_port} -U {self.username}", "of Mulan PSL v2 at: http://license.coscl.org.cn/MulanPSL2 THIS SOFTWARE IS PROVIDED", "ALTER DEFAULT PRIVILEGES for role default016_01 revoke select,insert,update,truncate,references,TRIGGER,DELETE on tables", "schema_016 revoke ALL on TYPES from default016_01 CASCADE CONSTRAINTS ;", "http://license.coscl.org.cn/MulanPSL2 THIS SOFTWARE IS PROVIDED ON AN \"AS IS\" BASIS,", "if exists test_default_016(int) cascade; drop schema if exists schema_016 cascade;", "exists schema_016 cascade; create schema schema_016; drop table if exists", "Constant() # 初始用户用户名 self.username = self.userNode.ssh_user # 初始用户密码 self.password =", "schema schema_016 GRANT ALL PRIVILEGES on TYPES to {self.username} WITH", "PRIVILEGES for role {self.username} revoke EXECUTE on functions from {self.username}", "if exists schema_016 cascade; ''') excute_cmd = f''' source {self.DB_ENV_PATH};", "OPTION ; drop schema if exists schema_016 cascade; create schema", "test_alter_default_016(id type016); select * from test_alter_default_016; ALTER DEFAULT PRIVILEGES for", "msg) self.assertIn(self.Constant.ALTER_DEFAULT_PRIVILEGES, msg) logger.info('--------1.2.清理环境--------') sql_cmd = (''' drop table if", "GRANT OPTION ; drop schema if exists schema_016 cascade; create", "Co.,Ltd. openGauss is licensed under Mulan PSL v2. You can", "\"AS IS\" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS", "exists schema_016 cascade; create schema schema_016; ALTER DEFAULT PRIVILEGES for", "schema_016; drop table if exists test_alter_default_016 cascade; create table test_alter_default_016(id", "role {self.username} revoke USAGE on TYPES from {self.username} CASCADE CONSTRAINTS;", "IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT", "CONSTRAINTS ; ALTER DEFAULT PRIVILEGES for role {self.username} revoke EXECUTE", "source {self.DB_ENV_PATH}; gsql -d {self.userNode.db_name} -p {self.userNode.db_port} -U default016_01 -W", "功能测试 Case Name : 初始用户和sysadmin自己alter自己权限 Description : 1.初始用户alter自己的权限:alter不报错,但不生效,查询权限不变 1.1.初始用户alter自己的权限 1.2.清理环境", "LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.", "ALTER DEFAULT PRIVILEGES for role default016_01 revoke EXECUTE on functions", "-W {macro.COMMON_PASSWD} -c \"{sql_cmd}\" ''' logger.info(excute_cmd) msg = self.userNode.sh(excute_cmd).result() logger.info(msg)", "to default016_01 WITH GRANT OPTION ; drop schema if exists", "revoke ALL on functions from default016_01 CASCADE CONSTRAINTS ; ALTER", "msg) self.assertIn(self.Constant.ALTER_DEFAULT_PRIVILEGES, msg) logger.info('--------2.3.清理--------') sql_cmd = commonsh.execut_db_sql(f''' drop owned by", "as b int:= a; begin for i in 1..a loop", "schema_016 GRANT ALL PRIVILEGES on functions to default016_01 WITH GRANT", "tables from {self.username} CASCADE CONSTRAINTS ; ALTER DEFAULT PRIVILEGES for", "self.userNode.ssh_user # 初始用户密码 self.password = <PASSWORD> def test_common_user_permission(self): logger.info('--------1.初始用户alter自己的权限--------') logger.info('--------1.1.初始用户alter自己的权限--------')", "tables to default016_01 WITH GRANT OPTION ; ALTER DEFAULT PRIVILEGES", "of the Mulan PSL v2. You may obtain a copy", "Mulan PSL v2. You may obtain a copy of Mulan", "2.2.default016_016用户连接 执行alter测试 2.3.清理 期望:清理成功 备注:以上alter测试包括对表(包含视图),类型,函数的权限测试 History : \"\"\" import sys", "commonsh = CommonSH('dbuser') class Privategrant(unittest.TestCase): def setUp(self): logger.info('--------Opengauss_Function_Alter_Default_Privileges_Case0016开始执行--------') self.userNode =", "is licensed under Mulan PSL v2. You can use this", "select,insert,update,truncate,references,TRIGGER,DELETE on tables from default016_01 CASCADE CONSTRAINTS; ALTER DEFAULT PRIVILEGES", "for role {self.username} GRANT select,insert,update,truncate,references,TRIGGER,DELETE on tables to {self.username} WITH", "the terms and conditions of the Mulan PSL v2. You", "{self.username} WITH GRANT OPTION ; drop schema if exists schema_016", "functions from default016_01 CASCADE CONSTRAINTS ; ALTER DEFAULT PRIVILEGES for", "GRANT OPTION ; ALTER DEFAULT PRIVILEGES for role default016_01 in", "logger.info(sql_cmd) self.assertIn(self.Constant.CREATE_ROLE_SUCCESS_MSG, sql_cmd) logger.info('--------2.2.default016_01用户连接 执行alter测试--------') sql_cmd = (f''' drop schema", "PRIVILEGES on functions to {self.username} WITH GRANT OPTION ; ALTER", "GRANT EXECUTE on functions to default016_01 WITH GRANT OPTION ;", "{self.username} in schema schema_016 revoke ALL on tables from {self.username}", "for role default016_01 revoke select,insert,update,truncate,references,TRIGGER,DELETE on tables from default016_01 CASCADE", "History : \"\"\" import sys import unittest from yat.test import", "Type : 功能测试 Case Name : 初始用户和sysadmin自己alter自己权限 Description : 1.初始用户alter自己的权限:alter不报错,但不生效,查询权限不变", "default016_01 in schema schema_016 revoke ALL on functions from default016_01", "create table test_alter_default_016(id int unique); select * from test_alter_default_016; drop", "cascade; ''') excute_cmd = f''' source {self.DB_ENV_PATH}; gsql -d {self.userNode.db_name}", "See the Mulan PSL v2 for more details. \"\"\" \"\"\"", "default016_01 CASCADE CONSTRAINTS; ''') excute_cmd = f''' source {self.DB_ENV_PATH}; gsql", "in schema schema_016 GRANT ALL PRIVILEGES on functions to default016_01", "schema_016 cascade; ''') excute_cmd = f''' source {self.DB_ENV_PATH}; gsql -d", "GRANT select,insert,update,truncate,references,TRIGGER,DELETE on tables to {self.username} WITH GRANT OPTION ;", "OPTION ; ALTER DEFAULT PRIVILEGES for role default016_01 GRANT select,insert,update,truncate,references,TRIGGER,DELETE", "GRANT USAGE on TYPES to {self.username} WITH GRANT OPTION ;", "for more details. \"\"\" \"\"\" Case Type : 功能测试 Case", "Description : 1.初始用户alter自己的权限:alter不报错,但不生效,查询权限不变 1.1.初始用户alter自己的权限 1.2.清理环境 期望:清理成功 2.sysadmin用户alter自己的权限:alter不报错,但不生效,查询权限不变 2.1.管理员用户连接创建sysadmin用户 default016_01 期望:创建成功", "期望:创建成功 2.2.default016_016用户连接 执行alter测试 2.3.清理 期望:清理成功 备注:以上alter测试包括对表(包含视图),类型,函数的权限测试 History : \"\"\" import", "on functions from {self.username} CASCADE CONSTRAINTS ; ALTER DEFAULT PRIVILEGES", "OPTION ; ALTER DEFAULT PRIVILEGES for role default016_01 GRANT EXECUTE", "f''' source {self.DB_ENV_PATH}; gsql -d {self.userNode.db_name} -p {self.userNode.db_port} -U default016_01", "= (f''' drop schema if exists schema_016 cascade; create schema", "from test_alter_default_016; drop function if exists test_default_016(int) cascade; create or", "ALL PRIVILEGES on tables to {self.username} WITH GRANT OPTION ;" ]
[ "detectedclosed else 'false' } } if domainname is not None:", "Shadows API. :param view: InfrastructureView :return: Infrastructure generator \"\"\" if", "severities is None else severities, 'alerted': 'true' if alerted else", ".ds_base_service import DSBaseService from .ds_find_service import DSFindService from ..model.infrastructure import", "from .ds_find_service import DSFindService from ..model.infrastructure import Infrastructure class InfrastructureService(DSFindService):", "'true' if markedclosed else 'false', 'detectedClosed': 'true' if detectedclosed else", "alerted=False, reverse=None): view = { 'filter': { 'detectedOpen': detectedopen, 'severities':", "if markedclosed else 'false', 'detectedClosed': 'true' if detectedclosed else 'false'", "File: infrastructure_service.py # # Licensed under Apache 2.0 (https://www.apache.org/licenses/LICENSE-2.0.txt) #", "markedclosed=False, severities=None, alerted=False, reverse=None): view = { 'filter': { 'detectedOpen':", "'markedClosed': 'true' if markedclosed else 'false', 'detectedClosed': 'true' if detectedclosed", "def infrastructure_view(detectedopen='ALL', domainname=None, detectedclosed=False, markedclosed=False, severities=None, alerted=False, reverse=None): view =", "import DSFindService from ..model.infrastructure import Infrastructure class InfrastructureService(DSFindService): def __init__(self,", "if severities is None else severities, 'alerted': 'true' if alerted", "if domainname is not None: view['filter']['domainName'] = domainname if reverse", "view = Infrastructure.infrastructure_view() return self._find_all_pages('/api/ip-ports', view, Infrastructure) @staticmethod @DSBaseService.paginated(size=500) @DSBaseService.sorted('published')", "in page groups. :param view: InfrastructureView :return: Infrastructure generator \"\"\"", "= Infrastructure.infrastructure_view() return self._find_all_pages('/api/ip-ports', view, Infrastructure) @staticmethod @DSBaseService.paginated(size=500) @DSBaseService.sorted('published') def", "not None: view['filter']['domainName'] = domainname if reverse is not None:", "domainname is not None: view['filter']['domainName'] = domainname if reverse is", "Infrastructure.infrastructure_view() return self._find_all_pages('/api/ip-ports', view, Infrastructure) @staticmethod @DSBaseService.paginated(size=500) @DSBaseService.sorted('published') def infrastructure_view(detectedopen='ALL',", "if detectedclosed else 'false' } } if domainname is not", "view is None: view = Infrastructure.infrastructure_view() return self._find_all_pages('/api/ip-ports', view, Infrastructure)", "Infrastructure) def find_all_pages(self, view=None): \"\"\" Streams all infrastructure objects retrieved", "# # Licensed under Apache 2.0 (https://www.apache.org/licenses/LICENSE-2.0.txt) # from .ds_base_service", "ds_api_secret_key, proxy=None): super(InfrastructureService, self).__init__(ds_api_key, ds_api_secret_key, proxy=proxy) def find_all(self, view=None): \"\"\"", "DSBaseService from .ds_find_service import DSFindService from ..model.infrastructure import Infrastructure class", "2.0 (https://www.apache.org/licenses/LICENSE-2.0.txt) # from .ds_base_service import DSBaseService from .ds_find_service import", "..model.infrastructure import Infrastructure class InfrastructureService(DSFindService): def __init__(self, ds_api_key, ds_api_secret_key, proxy=None):", "None: view['filter']['domainName'] = domainname if reverse is not None: view['sort']", "retrieved from the Digital Shadows API in page groups. :param", "groups. :param view: InfrastructureView :return: Infrastructure generator \"\"\" if view", "(https://www.apache.org/licenses/LICENSE-2.0.txt) # from .ds_base_service import DSBaseService from .ds_find_service import DSFindService", "under Apache 2.0 (https://www.apache.org/licenses/LICENSE-2.0.txt) # from .ds_base_service import DSBaseService from", "None else severities, 'alerted': 'true' if alerted else 'false', 'markedClosed':", "infrastructure objects retrieved from the Digital Shadows API in page", "detectedopen, 'severities': [] if severities is None else severities, 'alerted':", "detectedclosed=False, markedclosed=False, severities=None, alerted=False, reverse=None): view = { 'filter': {", "markedclosed else 'false', 'detectedClosed': 'true' if detectedclosed else 'false' }", "Streams all infrastructure objects retrieved from the Digital Shadows API", "if view is None: view = Infrastructure.infrastructure_view() return self._find_all_pages('/api/ip-ports', view,", "= InfrastructureService.infrastructure_view() return self._find_all('/api/ip-ports', view, Infrastructure) def find_all_pages(self, view=None): \"\"\"", "# File: infrastructure_service.py # # Licensed under Apache 2.0 (https://www.apache.org/licenses/LICENSE-2.0.txt)", "find_all_pages(self, view=None): \"\"\" Streams all infrastructure objects retrieved from the", "view is None: view = InfrastructureService.infrastructure_view() return self._find_all('/api/ip-ports', view, Infrastructure)", "import Infrastructure class InfrastructureService(DSFindService): def __init__(self, ds_api_key, ds_api_secret_key, proxy=None): super(InfrastructureService,", "{ 'direction': 'ASCENDING' if reverse else 'DESCENDING', 'property': 'published' }", "__init__(self, ds_api_key, ds_api_secret_key, proxy=None): super(InfrastructureService, self).__init__(ds_api_key, ds_api_secret_key, proxy=proxy) def find_all(self,", "proxy=None): super(InfrastructureService, self).__init__(ds_api_key, ds_api_secret_key, proxy=proxy) def find_all(self, view=None): \"\"\" Streams", "the Digital Shadows API in page groups. :param view: InfrastructureView", "retrieved from the Digital Shadows API. :param view: InfrastructureView :return:", "'false', 'detectedClosed': 'true' if detectedclosed else 'false' } } if", "} } if domainname is not None: view['filter']['domainName'] = domainname", "class InfrastructureService(DSFindService): def __init__(self, ds_api_key, ds_api_secret_key, proxy=None): super(InfrastructureService, self).__init__(ds_api_key, ds_api_secret_key,", "'false' } } if domainname is not None: view['filter']['domainName'] =", "from .ds_base_service import DSBaseService from .ds_find_service import DSFindService from ..model.infrastructure", "objects retrieved from the Digital Shadows API. :param view: InfrastructureView", "Shadows API in page groups. :param view: InfrastructureView :return: Infrastructure", "not None: view['sort'] = { 'direction': 'ASCENDING' if reverse else", "Licensed under Apache 2.0 (https://www.apache.org/licenses/LICENSE-2.0.txt) # from .ds_base_service import DSBaseService", "ds_api_secret_key, proxy=proxy) def find_all(self, view=None): \"\"\" Streams all infrastructure objects", "if reverse is not None: view['sort'] = { 'direction': 'ASCENDING'", "view['filter']['domainName'] = domainname if reverse is not None: view['sort'] =", ".ds_find_service import DSFindService from ..model.infrastructure import Infrastructure class InfrastructureService(DSFindService): def", "# from .ds_base_service import DSBaseService from .ds_find_service import DSFindService from", "'false', 'markedClosed': 'true' if markedclosed else 'false', 'detectedClosed': 'true' if", "from the Digital Shadows API in page groups. :param view:", "objects retrieved from the Digital Shadows API in page groups.", "Infrastructure generator \"\"\" if view is None: view = Infrastructure.infrastructure_view()", "super(InfrastructureService, self).__init__(ds_api_key, ds_api_secret_key, proxy=proxy) def find_all(self, view=None): \"\"\" Streams all", "view, Infrastructure) def find_all_pages(self, view=None): \"\"\" Streams all infrastructure objects", "= { 'filter': { 'detectedOpen': detectedopen, 'severities': [] if severities", "Streams all infrastructure objects retrieved from the Digital Shadows API.", "severities=None, alerted=False, reverse=None): view = { 'filter': { 'detectedOpen': detectedopen,", "infrastructure_view(detectedopen='ALL', domainname=None, detectedclosed=False, markedclosed=False, severities=None, alerted=False, reverse=None): view = {", "view, Infrastructure) @staticmethod @DSBaseService.paginated(size=500) @DSBaseService.sorted('published') def infrastructure_view(detectedopen='ALL', domainname=None, detectedclosed=False, markedclosed=False,", "'alerted': 'true' if alerted else 'false', 'markedClosed': 'true' if markedclosed", "= domainname if reverse is not None: view['sort'] = {", "return self._find_all('/api/ip-ports', view, Infrastructure) def find_all_pages(self, view=None): \"\"\" Streams all", "return self._find_all_pages('/api/ip-ports', view, Infrastructure) @staticmethod @DSBaseService.paginated(size=500) @DSBaseService.sorted('published') def infrastructure_view(detectedopen='ALL', domainname=None,", ":param view: InfrastructureView :return: Infrastructure generator \"\"\" if view is", "from the Digital Shadows API. :param view: InfrastructureView :return: Infrastructure", "proxy=proxy) def find_all(self, view=None): \"\"\" Streams all infrastructure objects retrieved", "self).__init__(ds_api_key, ds_api_secret_key, proxy=proxy) def find_all(self, view=None): \"\"\" Streams all infrastructure", "API. :param view: InfrastructureView :return: Infrastructure generator \"\"\" if view", "Infrastructure generator \"\"\" if view is None: view = InfrastructureService.infrastructure_view()", "the Digital Shadows API. :param view: InfrastructureView :return: Infrastructure generator", "view = { 'filter': { 'detectedOpen': detectedopen, 'severities': [] if", "'severities': [] if severities is None else severities, 'alerted': 'true'", "'detectedClosed': 'true' if detectedclosed else 'false' } } if domainname", "= { 'direction': 'ASCENDING' if reverse else 'DESCENDING', 'property': 'published'", "self._find_all('/api/ip-ports', view, Infrastructure) def find_all_pages(self, view=None): \"\"\" Streams all infrastructure", "infrastructure objects retrieved from the Digital Shadows API. :param view:", "is not None: view['filter']['domainName'] = domainname if reverse is not", "domainname if reverse is not None: view['sort'] = { 'direction':", "else 'false', 'markedClosed': 'true' if markedclosed else 'false', 'detectedClosed': 'true'", "view=None): \"\"\" Streams all infrastructure objects retrieved from the Digital", "InfrastructureService(DSFindService): def __init__(self, ds_api_key, ds_api_secret_key, proxy=None): super(InfrastructureService, self).__init__(ds_api_key, ds_api_secret_key, proxy=proxy)", "@staticmethod @DSBaseService.paginated(size=500) @DSBaseService.sorted('published') def infrastructure_view(detectedopen='ALL', domainname=None, detectedclosed=False, markedclosed=False, severities=None, alerted=False,", "else severities, 'alerted': 'true' if alerted else 'false', 'markedClosed': 'true'", "Digital Shadows API in page groups. :param view: InfrastructureView :return:", "\"\"\" Streams all infrastructure objects retrieved from the Digital Shadows", "\"\"\" if view is None: view = Infrastructure.infrastructure_view() return self._find_all_pages('/api/ip-ports',", "def find_all(self, view=None): \"\"\" Streams all infrastructure objects retrieved from", "} if domainname is not None: view['filter']['domainName'] = domainname if", "view: InfrastructureView :return: Infrastructure generator \"\"\" if view is None:", "if alerted else 'false', 'markedClosed': 'true' if markedclosed else 'false',", "reverse=None): view = { 'filter': { 'detectedOpen': detectedopen, 'severities': []", "def find_all_pages(self, view=None): \"\"\" Streams all infrastructure objects retrieved from", "is None else severities, 'alerted': 'true' if alerted else 'false',", "from ..model.infrastructure import Infrastructure class InfrastructureService(DSFindService): def __init__(self, ds_api_key, ds_api_secret_key,", ":return: Infrastructure generator \"\"\" if view is None: view =", "is None: view = InfrastructureService.infrastructure_view() return self._find_all('/api/ip-ports', view, Infrastructure) def", "# Licensed under Apache 2.0 (https://www.apache.org/licenses/LICENSE-2.0.txt) # from .ds_base_service import", "is not None: view['sort'] = { 'direction': 'ASCENDING' if reverse", "if view is None: view = InfrastructureService.infrastructure_view() return self._find_all('/api/ip-ports', view,", "Infrastructure class InfrastructureService(DSFindService): def __init__(self, ds_api_key, ds_api_secret_key, proxy=None): super(InfrastructureService, self).__init__(ds_api_key,", "None: view = InfrastructureService.infrastructure_view() return self._find_all('/api/ip-ports', view, Infrastructure) def find_all_pages(self,", "InfrastructureService.infrastructure_view() return self._find_all('/api/ip-ports', view, Infrastructure) def find_all_pages(self, view=None): \"\"\" Streams", "page groups. :param view: InfrastructureView :return: Infrastructure generator \"\"\" if", "'detectedOpen': detectedopen, 'severities': [] if severities is None else severities,", "ds_api_key, ds_api_secret_key, proxy=None): super(InfrastructureService, self).__init__(ds_api_key, ds_api_secret_key, proxy=proxy) def find_all(self, view=None):", "'true' if detectedclosed else 'false' } } if domainname is", "all infrastructure objects retrieved from the Digital Shadows API in", "def __init__(self, ds_api_key, ds_api_secret_key, proxy=None): super(InfrastructureService, self).__init__(ds_api_key, ds_api_secret_key, proxy=proxy) def", "Digital Shadows API. :param view: InfrastructureView :return: Infrastructure generator \"\"\"", "InfrastructureView :return: Infrastructure generator \"\"\" if view is None: view", "\"\"\" if view is None: view = InfrastructureService.infrastructure_view() return self._find_all('/api/ip-ports',", "@DSBaseService.paginated(size=500) @DSBaseService.sorted('published') def infrastructure_view(detectedopen='ALL', domainname=None, detectedclosed=False, markedclosed=False, severities=None, alerted=False, reverse=None):", "self._find_all_pages('/api/ip-ports', view, Infrastructure) @staticmethod @DSBaseService.paginated(size=500) @DSBaseService.sorted('published') def infrastructure_view(detectedopen='ALL', domainname=None, detectedclosed=False,", "else 'false' } } if domainname is not None: view['filter']['domainName']", "@DSBaseService.sorted('published') def infrastructure_view(detectedopen='ALL', domainname=None, detectedclosed=False, markedclosed=False, severities=None, alerted=False, reverse=None): view", "DSFindService from ..model.infrastructure import Infrastructure class InfrastructureService(DSFindService): def __init__(self, ds_api_key,", "None: view['sort'] = { 'direction': 'ASCENDING' if reverse else 'DESCENDING',", "Infrastructure) @staticmethod @DSBaseService.paginated(size=500) @DSBaseService.sorted('published') def infrastructure_view(detectedopen='ALL', domainname=None, detectedclosed=False, markedclosed=False, severities=None,", "else 'false', 'detectedClosed': 'true' if detectedclosed else 'false' } }", "severities, 'alerted': 'true' if alerted else 'false', 'markedClosed': 'true' if", "API in page groups. :param view: InfrastructureView :return: Infrastructure generator", "generator \"\"\" if view is None: view = InfrastructureService.infrastructure_view() return", "'filter': { 'detectedOpen': detectedopen, 'severities': [] if severities is None", "[] if severities is None else severities, 'alerted': 'true' if", "find_all(self, view=None): \"\"\" Streams all infrastructure objects retrieved from the", "None: view = Infrastructure.infrastructure_view() return self._find_all_pages('/api/ip-ports', view, Infrastructure) @staticmethod @DSBaseService.paginated(size=500)", "domainname=None, detectedclosed=False, markedclosed=False, severities=None, alerted=False, reverse=None): view = { 'filter':", "'true' if alerted else 'false', 'markedClosed': 'true' if markedclosed else", "is None: view = Infrastructure.infrastructure_view() return self._find_all_pages('/api/ip-ports', view, Infrastructure) @staticmethod", "{ 'filter': { 'detectedOpen': detectedopen, 'severities': [] if severities is", "import DSBaseService from .ds_find_service import DSFindService from ..model.infrastructure import Infrastructure", "view['sort'] = { 'direction': 'ASCENDING' if reverse else 'DESCENDING', 'property':", "alerted else 'false', 'markedClosed': 'true' if markedclosed else 'false', 'detectedClosed':", "{ 'detectedOpen': detectedopen, 'severities': [] if severities is None else", "'ASCENDING' if reverse else 'DESCENDING', 'property': 'published' } return view", "all infrastructure objects retrieved from the Digital Shadows API. :param", "infrastructure_service.py # # Licensed under Apache 2.0 (https://www.apache.org/licenses/LICENSE-2.0.txt) # from", "'direction': 'ASCENDING' if reverse else 'DESCENDING', 'property': 'published' } return", "generator \"\"\" if view is None: view = Infrastructure.infrastructure_view() return", "view = InfrastructureService.infrastructure_view() return self._find_all('/api/ip-ports', view, Infrastructure) def find_all_pages(self, view=None):", "Apache 2.0 (https://www.apache.org/licenses/LICENSE-2.0.txt) # from .ds_base_service import DSBaseService from .ds_find_service", "reverse is not None: view['sort'] = { 'direction': 'ASCENDING' if" ]
[ "os import sys import tempfile import yaml import zipfile import", "protein_accession: str = \"\" def get_fields(self): return [self.feat_type, self.name, self.range_start,", "from google.protobuf import json_format from ncbi.datasets.v1alpha1 import dataset_catalog_pb2 from ncbi.datasets.v1alpha1.reports", "return fname return None @dataclass class Gene: id: str feat_type:", "run(self): for assm_acc, seq_acc, start, stop, *extra in self.read_data(): self.find_all_for_location(assm_acc,", "else self.name def find_genes_by_loc(gff3_db, csvout, assm_acc, seq_acc, start, stop, extra_fields):", "field from enum import Enum, unique, auto import os import", "str = \"\" def get_fields(self): return [self.feat_type, self.name, self.range_start, self.range_stop,", "return self.protein_accession if self.protein_accession else self.name def find_genes_by_loc(gff3_db, csvout, assm_acc,", "prot_acc, ) csvout.writerow([assm_acc, seq_acc, start, stop, *extra_fields, *geneobj.get_fields()]) found_genes.append(geneobj) return", "seq_acc, start, stop, *extra in self.read_data(): self.find_all_for_location(assm_acc, seq_acc, start, stop,", "= None): files = defaultdict(list) for assm in catalog.assemblies: acc", "import dataset_catalog_pb2 from ncbi.datasets.v1alpha1.reports import assembly_pb2 from ncbi.datasets.reports.report_reader import DatasetsReportReader", "in gff_files: self.process_loc_for_gff(zin, gff_fname, assm_acc, seq_acc, start, stop, extra_fields) except", "help=f'root of input data directory [{self.default_packages_dir}]') parser.add_argument('--locs', type=str, help='file containing", "featuretype=feat_types, completely_within=False): gene_name = gene.attributes.get('Name', None)[0] prot_acc = \"\" if", "retrieve_data_catalog(zin) gff_files = get_catalog_files(catalog, dataset_catalog_pb2.File.FileType.GFF3) for assm_acc, gff_files in gff_files.items():", "self.protein_accession if self.protein_accession else self.name def find_genes_by_loc(gff3_db, csvout, assm_acc, seq_acc,", "def get_zip_file_for_acc(acc, path): fname = os.path.join(path, f'{acc}.zip') if os.path.isfile(fname): return", "name_val(self): return self.protein_accession if self.protein_accession else self.name def find_genes_by_loc(gff3_db, csvout,", "import sys import tempfile import yaml import zipfile import gffutils", "stop, extra_fields): found_genes = [] feat_types = ('gene', 'pseudogene') for", "for path in report_files: yaml = zip_in.read(path) rpt_rdr = DatasetsReportReader()", "assm_acc) for path in report_files: yaml = zip_in.read(path) rpt_rdr =", "filepaths def get_catalog_files(catalog: dataset_catalog_pb2.Catalog, desired_filetype: dataset_catalog_pb2.File.FileType, assm_acc: str = None):", "continue for f in assm.files: filepath = os.path.join('ncbi_dataset', 'data', f.file_path)", "assm_acc, gff_files in gff_files.items(): report = retrieve_assembly_report(zin, catalog, assm_acc) for", "== 'protein_coding': cds = list(gff3_db.children(gene, featuretype='CDS')) prot_acc = cds[0].attributes.get('protein_id', None)[0]", "Enum, unique, auto import os import sys import tempfile import", "locations') self.args = parser.parse_args() self.writer = csv.writer(sys.stdout, dialect='excel-tab') def read_data(self):", "tmpfile: tmpfile.write(zin.read(gff_fname)) db = gffutils.create_db( tmpfile.name, dbfn=':memory:', force=True, keep_order=True, merge_strategy='merge',", "str): report_files = get_catalog_files(catalog, desired_filetype, assm_acc) filepaths = [] for", "default_packages_dir = os.path.join('var', 'data', 'packages') def __init__(self): parser = argparse.ArgumentParser()", "None)[0] geneobj = Gene( gene.id, gene.featuretype, gene_name, gene.chrom, gene.strand, gene.start,", "cds[0].attributes.get('protein_id', None)[0] geneobj = Gene( gene.id, gene.featuretype, gene_name, gene.chrom, gene.strand,", "class Gene: id: str feat_type: str name: str chrom: str", "= gffutils.create_db( tmpfile.name, dbfn=':memory:', force=True, keep_order=True, merge_strategy='merge', sort_attribute_values=True ) find_genes_by_loc(db,", "zipfile.ZipFile(zip_file, 'r') as zin: catalog = retrieve_data_catalog(zin) gff_files = get_catalog_files(catalog,", "stop, extra_fields): zip_file = get_zip_file_for_acc(assm_acc, self.args.packages_dir) try: with zipfile.ZipFile(zip_file, 'r')", "assm_acc != acc: continue for f in assm.files: filepath =", "def run(self): for assm_acc, seq_acc, start, stop, *extra in self.read_data():", "None)[0] prot_acc = \"\" if gene.attributes['gene_biotype'][0] == 'protein_coding': cds =", "prot_acc = cds[0].attributes.get('protein_id', None)[0] geneobj = Gene( gene.id, gene.featuretype, gene_name,", "start, stop, *extra_fields, *geneobj.get_fields()]) found_genes.append(geneobj) return found_genes class FindGenesByLoc: default_packages_dir", "[] for assm_acc, paths in report_files.items(): filepaths.extend(paths) return filepaths def", "for f in assm.files: filepath = os.path.join('ncbi_dataset', 'data', f.file_path) if", "db = gffutils.create_db( tmpfile.name, dbfn=':memory:', force=True, keep_order=True, merge_strategy='merge', sort_attribute_values=True )", "paths in report_files.items(): filepaths.extend(paths) return filepaths def get_catalog_files(catalog: dataset_catalog_pb2.Catalog, desired_filetype:", "None): files = defaultdict(list) for assm in catalog.assemblies: acc =", "report = retrieve_assembly_report(zin, catalog, assm_acc) for gff_fname in gff_files: self.process_loc_for_gff(zin,", "unique, auto import os import sys import tempfile import yaml", "as zin: catalog = retrieve_data_catalog(zin) gff_files = get_catalog_files(catalog, dataset_catalog_pb2.File.FileType.GFF3) for", "@dataclass class Gene: id: str feat_type: str name: str chrom:", "'packages') def __init__(self): parser = argparse.ArgumentParser() parser.add_argument('--packages-dir', type=str, default=self.default_packages_dir, help=f'root", "= get_catalog_files(catalog, desired_filetype, assm_acc) filepaths = [] for assm_acc, paths", "dialect='excel-tab') def read_data(self): for row in csv.reader(iter(sys.stdin.readline, ''), dialect='excel-tab'): yield", "gene.chrom, gene.strand, gene.start, gene.stop, prot_acc, ) csvout.writerow([assm_acc, seq_acc, start, stop,", "dataset_catalog_pb2.File.FileType.GFF3) for assm_acc, gff_files in gff_files.items(): report = retrieve_assembly_report(zin, catalog,", "= \"\" if gene.attributes['gene_biotype'][0] == 'protein_coding': cds = list(gff3_db.children(gene, featuretype='CDS'))", "None @dataclass class Gene: id: str feat_type: str name: str", "'data', 'packages') def __init__(self): parser = argparse.ArgumentParser() parser.add_argument('--packages-dir', type=str, default=self.default_packages_dir,", "Gene: id: str feat_type: str name: str chrom: str strand:", "str strand: str range_start: int range_stop: int protein_accession: str =", "sort_attribute_values=True ) find_genes_by_loc(db, self.writer, assm_acc, seq_acc, start, stop, extra_fields) def", "in gff_files.items(): report = retrieve_assembly_report(zin, catalog, assm_acc) for gff_fname in", "import defaultdict import csv from dataclasses import dataclass, field from", "gff3_db.region(seqid=seq_acc, start=start, end=stop, featuretype=feat_types, completely_within=False): gene_name = gene.attributes.get('Name', None)[0] prot_acc", "of input data directory [{self.default_packages_dir}]') parser.add_argument('--locs', type=str, help='file containing genomic", "stop, *extra in self.read_data(): self.find_all_for_location(assm_acc, seq_acc, start, stop, extra) def", "int range_stop: int protein_accession: str = \"\" def get_fields(self): return", "keep_order=True, merge_strategy='merge', sort_attribute_values=True ) find_genes_by_loc(db, self.writer, assm_acc, seq_acc, start, stop,", "gff_fname in gff_files: self.process_loc_for_gff(zin, gff_fname, assm_acc, seq_acc, start, stop, extra_fields)", "report_files: yaml = zip_in.read(path) rpt_rdr = DatasetsReportReader() return rpt_rdr.assembly_report(yaml) def", "= gene.attributes.get('Name', None)[0] prot_acc = \"\" if gene.attributes['gene_biotype'][0] == 'protein_coding':", "import os import sys import tempfile import yaml import zipfile", "self.args.packages_dir) try: with zipfile.ZipFile(zip_file, 'r') as zin: catalog = retrieve_data_catalog(zin)", "import csv from dataclasses import dataclass, field from enum import", "dataset_catalog_pb2 from ncbi.datasets.v1alpha1.reports import assembly_pb2 from ncbi.datasets.reports.report_reader import DatasetsReportReader def", "gene_name, gene.chrom, gene.strand, gene.start, gene.stop, prot_acc, ) csvout.writerow([assm_acc, seq_acc, start,", "gene.stop, prot_acc, ) csvout.writerow([assm_acc, seq_acc, start, stop, *extra_fields, *geneobj.get_fields()]) found_genes.append(geneobj)", "dataset_catalog_pb2.File.FileType.DATA_REPORT, assm_acc) for path in report_files: yaml = zip_in.read(path) rpt_rdr", "assm_acc: str): report_files = get_catalog_files(catalog, desired_filetype, assm_acc) filepaths = []", "import assembly_pb2 from ncbi.datasets.reports.report_reader import DatasetsReportReader def retrieve_assembly_report(zip_in, catalog, assm_acc:", "yaml import zipfile import gffutils from google.protobuf import json_format from", "fname return None @dataclass class Gene: id: str feat_type: str", "directory [{self.default_packages_dir}]') parser.add_argument('--locs', type=str, help='file containing genomic locations') self.args =", "DatasetsReportReader() return rpt_rdr.assembly_report(yaml) def retrieve_data_catalog(zip_in) -> dataset_catalog_pb2.Catalog: catalog_json = zip_in.read('ncbi_dataset/data/dataset_catalog.json')", "from collections import defaultdict import csv from dataclasses import dataclass,", "-> dataset_catalog_pb2.Catalog: catalog_json = zip_in.read('ncbi_dataset/data/dataset_catalog.json') return json_format.Parse(catalog_json, dataset_catalog_pb2.Catalog()) def get_catalog_files_for_assembly(catalog:", "os.path.isfile(fname): return fname return None @dataclass class Gene: id: str", "*geneobj.get_fields()]) found_genes.append(geneobj) return found_genes class FindGenesByLoc: default_packages_dir = os.path.join('var', 'data',", "start, stop, extra_fields) except zipfile.BadZipFile: print(f'{zip_file} is not a zip", "input data directory [{self.default_packages_dir}]') parser.add_argument('--locs', type=str, help='file containing genomic locations')", "self.args = parser.parse_args() self.writer = csv.writer(sys.stdout, dialect='excel-tab') def read_data(self): for", "f.file_path) if f.file_type == desired_filetype: files[acc].append(filepath) return files def get_zip_file_for_acc(acc,", "= retrieve_data_catalog(zin) gff_files = get_catalog_files(catalog, dataset_catalog_pb2.File.FileType.GFF3) for assm_acc, gff_files in", "gff_files = get_catalog_files(catalog, dataset_catalog_pb2.File.FileType.GFF3) for assm_acc, gff_files in gff_files.items(): report", "ncbi.datasets.v1alpha1.reports import assembly_pb2 from ncbi.datasets.reports.report_reader import DatasetsReportReader def retrieve_assembly_report(zip_in, catalog,", "csvout, assm_acc, seq_acc, start, stop, extra_fields): found_genes = [] feat_types", "type=str, help='file containing genomic locations') self.args = parser.parse_args() self.writer =", "zipfile.BadZipFile: print(f'{zip_file} is not a zip file') if __name__ ==", "stop, extra_fields) def find_all_for_location(self, assm_acc, seq_acc, start, stop, extra_fields): zip_file", "catalog_json = zip_in.read('ncbi_dataset/data/dataset_catalog.json') return json_format.Parse(catalog_json, dataset_catalog_pb2.Catalog()) def get_catalog_files_for_assembly(catalog: dataset_catalog_pb2.Catalog, desired_filetype:", "csv.reader(iter(sys.stdin.readline, ''), dialect='excel-tab'): yield row def run(self): for assm_acc, seq_acc,", "str = None): files = defaultdict(list) for assm in catalog.assemblies:", "f.file_type == desired_filetype: files[acc].append(filepath) return files def get_zip_file_for_acc(acc, path): fname", "*extra in self.read_data(): self.find_all_for_location(assm_acc, seq_acc, start, stop, extra) def process_loc_for_gff(self,", "get_catalog_files(catalog: dataset_catalog_pb2.Catalog, desired_filetype: dataset_catalog_pb2.File.FileType, assm_acc: str = None): files =", "in catalog.assemblies: acc = assm.accession if assm_acc and assm_acc !=", "seq_acc, start, stop, extra_fields): zip_file = get_zip_file_for_acc(assm_acc, self.args.packages_dir) try: with", "import Enum, unique, auto import os import sys import tempfile", "read_data(self): for row in csv.reader(iter(sys.stdin.readline, ''), dialect='excel-tab'): yield row def", "row in csv.reader(iter(sys.stdin.readline, ''), dialect='excel-tab'): yield row def run(self): for", "find_all_for_location(self, assm_acc, seq_acc, start, stop, extra_fields): zip_file = get_zip_file_for_acc(assm_acc, self.args.packages_dir)", "path): fname = os.path.join(path, f'{acc}.zip') if os.path.isfile(fname): return fname return", "start, stop, *extra in self.read_data(): self.find_all_for_location(assm_acc, seq_acc, start, stop, extra)", "= DatasetsReportReader() return rpt_rdr.assembly_report(yaml) def retrieve_data_catalog(zip_in) -> dataset_catalog_pb2.Catalog: catalog_json =", "import argparse from collections import defaultdict import csv from dataclasses", "Gene( gene.id, gene.featuretype, gene_name, gene.chrom, gene.strand, gene.start, gene.stop, prot_acc, )", "if self.protein_accession else self.name def find_genes_by_loc(gff3_db, csvout, assm_acc, seq_acc, start,", "gff_files.items(): report = retrieve_assembly_report(zin, catalog, assm_acc) for gff_fname in gff_files:", "except zipfile.BadZipFile: print(f'{zip_file} is not a zip file') if __name__", "self.writer = csv.writer(sys.stdout, dialect='excel-tab') def read_data(self): for row in csv.reader(iter(sys.stdin.readline,", "parser = argparse.ArgumentParser() parser.add_argument('--packages-dir', type=str, default=self.default_packages_dir, help=f'root of input data", "in gff3_db.region(seqid=seq_acc, start=start, end=stop, featuretype=feat_types, completely_within=False): gene_name = gene.attributes.get('Name', None)[0]", "desired_filetype: files[acc].append(filepath) return files def get_zip_file_for_acc(acc, path): fname = os.path.join(path,", "geneobj = Gene( gene.id, gene.featuretype, gene_name, gene.chrom, gene.strand, gene.start, gene.stop,", "desired_filetype, assm_acc) filepaths = [] for assm_acc, paths in report_files.items():", "csv from dataclasses import dataclass, field from enum import Enum,", "zip_file = get_zip_file_for_acc(assm_acc, self.args.packages_dir) try: with zipfile.ZipFile(zip_file, 'r') as zin:", "found_genes = [] feat_types = ('gene', 'pseudogene') for gene in", "row def run(self): for assm_acc, seq_acc, start, stop, *extra in", "stop, extra_fields): with tempfile.NamedTemporaryFile() as tmpfile: tmpfile.write(zin.read(gff_fname)) db = gffutils.create_db(", "path in report_files: yaml = zip_in.read(path) rpt_rdr = DatasetsReportReader() return", "collections import defaultdict import csv from dataclasses import dataclass, field", "get_fields(self): return [self.feat_type, self.name, self.range_start, self.range_stop, self.protein_accession] def name_val(self): return", "assm.files: filepath = os.path.join('ncbi_dataset', 'data', f.file_path) if f.file_type == desired_filetype:", "in assm.files: filepath = os.path.join('ncbi_dataset', 'data', f.file_path) if f.file_type ==", "from enum import Enum, unique, auto import os import sys", "zin, gff_fname, assm_acc, seq_acc, start, stop, extra_fields): with tempfile.NamedTemporaryFile() as", "sys import tempfile import yaml import zipfile import gffutils from", "seq_acc, start, stop, extra_fields): found_genes = [] feat_types = ('gene',", "csv.writer(sys.stdout, dialect='excel-tab') def read_data(self): for row in csv.reader(iter(sys.stdin.readline, ''), dialect='excel-tab'):", "assm_acc) filepaths = [] for assm_acc, paths in report_files.items(): filepaths.extend(paths)", "= os.path.join('ncbi_dataset', 'data', f.file_path) if f.file_type == desired_filetype: files[acc].append(filepath) return", "'protein_coding': cds = list(gff3_db.children(gene, featuretype='CDS')) prot_acc = cds[0].attributes.get('protein_id', None)[0] geneobj", "import tempfile import yaml import zipfile import gffutils from google.protobuf", "name: str chrom: str strand: str range_start: int range_stop: int", "\"\" def get_fields(self): return [self.feat_type, self.name, self.range_start, self.range_stop, self.protein_accession] def", "gff_files in gff_files.items(): report = retrieve_assembly_report(zin, catalog, assm_acc) for gff_fname", "is not a zip file') if __name__ == '__main__': FindGenesByLoc().run()", "try: with zipfile.ZipFile(zip_file, 'r') as zin: catalog = retrieve_data_catalog(zin) gff_files", "def retrieve_assembly_report(zip_in, catalog, assm_acc: str) -> assembly_pb2.AssemblyDataReport: report_files = get_catalog_files_for_assembly(catalog,", "feat_type: str name: str chrom: str strand: str range_start: int", "''), dialect='excel-tab'): yield row def run(self): for assm_acc, seq_acc, start,", "parser.add_argument('--locs', type=str, help='file containing genomic locations') self.args = parser.parse_args() self.writer", "for gene in gff3_db.region(seqid=seq_acc, start=start, end=stop, featuretype=feat_types, completely_within=False): gene_name =", "= \"\" def get_fields(self): return [self.feat_type, self.name, self.range_start, self.range_stop, self.protein_accession]", "dataset_catalog_pb2.Catalog: catalog_json = zip_in.read('ncbi_dataset/data/dataset_catalog.json') return json_format.Parse(catalog_json, dataset_catalog_pb2.Catalog()) def get_catalog_files_for_assembly(catalog: dataset_catalog_pb2.Catalog,", "yield row def run(self): for assm_acc, seq_acc, start, stop, *extra", "= csv.writer(sys.stdout, dialect='excel-tab') def read_data(self): for row in csv.reader(iter(sys.stdin.readline, ''),", "import DatasetsReportReader def retrieve_assembly_report(zip_in, catalog, assm_acc: str) -> assembly_pb2.AssemblyDataReport: report_files", "gene in gff3_db.region(seqid=seq_acc, start=start, end=stop, featuretype=feat_types, completely_within=False): gene_name = gene.attributes.get('Name',", "gff_fname, assm_acc, seq_acc, start, stop, extra_fields): with tempfile.NamedTemporaryFile() as tmpfile:", "found_genes class FindGenesByLoc: default_packages_dir = os.path.join('var', 'data', 'packages') def __init__(self):", "tempfile.NamedTemporaryFile() as tmpfile: tmpfile.write(zin.read(gff_fname)) db = gffutils.create_db( tmpfile.name, dbfn=':memory:', force=True,", "= get_catalog_files(catalog, dataset_catalog_pb2.File.FileType.GFF3) for assm_acc, gff_files in gff_files.items(): report =", "int protein_accession: str = \"\" def get_fields(self): return [self.feat_type, self.name,", "force=True, keep_order=True, merge_strategy='merge', sort_attribute_values=True ) find_genes_by_loc(db, self.writer, assm_acc, seq_acc, start,", "def process_loc_for_gff(self, zin, gff_fname, assm_acc, seq_acc, start, stop, extra_fields): with", "yaml = zip_in.read(path) rpt_rdr = DatasetsReportReader() return rpt_rdr.assembly_report(yaml) def retrieve_data_catalog(zip_in)", "for assm in catalog.assemblies: acc = assm.accession if assm_acc and", "return rpt_rdr.assembly_report(yaml) def retrieve_data_catalog(zip_in) -> dataset_catalog_pb2.Catalog: catalog_json = zip_in.read('ncbi_dataset/data/dataset_catalog.json') return", "self.protein_accession] def name_val(self): return self.protein_accession if self.protein_accession else self.name def", "__init__(self): parser = argparse.ArgumentParser() parser.add_argument('--packages-dir', type=str, default=self.default_packages_dir, help=f'root of input", "tmpfile.name, dbfn=':memory:', force=True, keep_order=True, merge_strategy='merge', sort_attribute_values=True ) find_genes_by_loc(db, self.writer, assm_acc,", "self.name def find_genes_by_loc(gff3_db, csvout, assm_acc, seq_acc, start, stop, extra_fields): found_genes", "google.protobuf import json_format from ncbi.datasets.v1alpha1 import dataset_catalog_pb2 from ncbi.datasets.v1alpha1.reports import", "dialect='excel-tab'): yield row def run(self): for assm_acc, seq_acc, start, stop,", "filepaths.extend(paths) return filepaths def get_catalog_files(catalog: dataset_catalog_pb2.Catalog, desired_filetype: dataset_catalog_pb2.File.FileType, assm_acc: str", "from ncbi.datasets.reports.report_reader import DatasetsReportReader def retrieve_assembly_report(zip_in, catalog, assm_acc: str) ->", "start, stop, extra_fields): found_genes = [] feat_types = ('gene', 'pseudogene')", "dataclasses import dataclass, field from enum import Enum, unique, auto", "def retrieve_data_catalog(zip_in) -> dataset_catalog_pb2.Catalog: catalog_json = zip_in.read('ncbi_dataset/data/dataset_catalog.json') return json_format.Parse(catalog_json, dataset_catalog_pb2.Catalog())", "files def get_zip_file_for_acc(acc, path): fname = os.path.join(path, f'{acc}.zip') if os.path.isfile(fname):", "tempfile import yaml import zipfile import gffutils from google.protobuf import", "report_files = get_catalog_files_for_assembly(catalog, dataset_catalog_pb2.File.FileType.DATA_REPORT, assm_acc) for path in report_files: yaml", "in report_files.items(): filepaths.extend(paths) return filepaths def get_catalog_files(catalog: dataset_catalog_pb2.Catalog, desired_filetype: dataset_catalog_pb2.File.FileType,", "cds = list(gff3_db.children(gene, featuretype='CDS')) prot_acc = cds[0].attributes.get('protein_id', None)[0] geneobj =", "argparse from collections import defaultdict import csv from dataclasses import", "('gene', 'pseudogene') for gene in gff3_db.region(seqid=seq_acc, start=start, end=stop, featuretype=feat_types, completely_within=False):", "return [self.feat_type, self.name, self.range_start, self.range_stop, self.protein_accession] def name_val(self): return self.protein_accession", "default=self.default_packages_dir, help=f'root of input data directory [{self.default_packages_dir}]') parser.add_argument('--locs', type=str, help='file", "os.path.join('var', 'data', 'packages') def __init__(self): parser = argparse.ArgumentParser() parser.add_argument('--packages-dir', type=str,", "type=str, default=self.default_packages_dir, help=f'root of input data directory [{self.default_packages_dir}]') parser.add_argument('--locs', type=str,", "from ncbi.datasets.v1alpha1.reports import assembly_pb2 from ncbi.datasets.reports.report_reader import DatasetsReportReader def retrieve_assembly_report(zip_in,", "return None @dataclass class Gene: id: str feat_type: str name:", "merge_strategy='merge', sort_attribute_values=True ) find_genes_by_loc(db, self.writer, assm_acc, seq_acc, start, stop, extra_fields)", "str feat_type: str name: str chrom: str strand: str range_start:", "stop, *extra_fields, *geneobj.get_fields()]) found_genes.append(geneobj) return found_genes class FindGenesByLoc: default_packages_dir =", "= retrieve_assembly_report(zin, catalog, assm_acc) for gff_fname in gff_files: self.process_loc_for_gff(zin, gff_fname,", "prot_acc = \"\" if gene.attributes['gene_biotype'][0] == 'protein_coding': cds = list(gff3_db.children(gene,", "ncbi.datasets.reports.report_reader import DatasetsReportReader def retrieve_assembly_report(zip_in, catalog, assm_acc: str) -> assembly_pb2.AssemblyDataReport:", "extra_fields): found_genes = [] feat_types = ('gene', 'pseudogene') for gene", "extra_fields) def find_all_for_location(self, assm_acc, seq_acc, start, stop, extra_fields): zip_file =", "self.find_all_for_location(assm_acc, seq_acc, start, stop, extra) def process_loc_for_gff(self, zin, gff_fname, assm_acc,", "genomic locations') self.args = parser.parse_args() self.writer = csv.writer(sys.stdout, dialect='excel-tab') def", "self.range_start, self.range_stop, self.protein_accession] def name_val(self): return self.protein_accession if self.protein_accession else", "start, stop, extra_fields): with tempfile.NamedTemporaryFile() as tmpfile: tmpfile.write(zin.read(gff_fname)) db =", "= get_zip_file_for_acc(assm_acc, self.args.packages_dir) try: with zipfile.ZipFile(zip_file, 'r') as zin: catalog", "import zipfile import gffutils from google.protobuf import json_format from ncbi.datasets.v1alpha1", "self.read_data(): self.find_all_for_location(assm_acc, seq_acc, start, stop, extra) def process_loc_for_gff(self, zin, gff_fname,", "= zip_in.read(path) rpt_rdr = DatasetsReportReader() return rpt_rdr.assembly_report(yaml) def retrieve_data_catalog(zip_in) ->", "start, stop, extra_fields) def find_all_for_location(self, assm_acc, seq_acc, start, stop, extra_fields):", "print(f'{zip_file} is not a zip file') if __name__ == '__main__':", "if f.file_type == desired_filetype: files[acc].append(filepath) return files def get_zip_file_for_acc(acc, path):", "for row in csv.reader(iter(sys.stdin.readline, ''), dialect='excel-tab'): yield row def run(self):", "assm_acc, seq_acc, start, stop, extra_fields) def find_all_for_location(self, assm_acc, seq_acc, start,", "get_zip_file_for_acc(acc, path): fname = os.path.join(path, f'{acc}.zip') if os.path.isfile(fname): return fname", "return json_format.Parse(catalog_json, dataset_catalog_pb2.Catalog()) def get_catalog_files_for_assembly(catalog: dataset_catalog_pb2.Catalog, desired_filetype: dataset_catalog_pb2.File.FileType, assm_acc: str):", "f'{acc}.zip') if os.path.isfile(fname): return fname return None @dataclass class Gene:", "gff_fname, assm_acc, seq_acc, start, stop, extra_fields) except zipfile.BadZipFile: print(f'{zip_file} is", "def find_genes_by_loc(gff3_db, csvout, assm_acc, seq_acc, start, stop, extra_fields): found_genes =", "ncbi.datasets.v1alpha1 import dataset_catalog_pb2 from ncbi.datasets.v1alpha1.reports import assembly_pb2 from ncbi.datasets.reports.report_reader import", "f in assm.files: filepath = os.path.join('ncbi_dataset', 'data', f.file_path) if f.file_type", "with zipfile.ZipFile(zip_file, 'r') as zin: catalog = retrieve_data_catalog(zin) gff_files =", "return filepaths def get_catalog_files(catalog: dataset_catalog_pb2.Catalog, desired_filetype: dataset_catalog_pb2.File.FileType, assm_acc: str =", "assm_acc: str) -> assembly_pb2.AssemblyDataReport: report_files = get_catalog_files_for_assembly(catalog, dataset_catalog_pb2.File.FileType.DATA_REPORT, assm_acc) for", "start=start, end=stop, featuretype=feat_types, completely_within=False): gene_name = gene.attributes.get('Name', None)[0] prot_acc =", "= list(gff3_db.children(gene, featuretype='CDS')) prot_acc = cds[0].attributes.get('protein_id', None)[0] geneobj = Gene(", "= [] for assm_acc, paths in report_files.items(): filepaths.extend(paths) return filepaths", "dataset_catalog_pb2.Catalog, desired_filetype: dataset_catalog_pb2.File.FileType, assm_acc: str = None): files = defaultdict(list)", "FindGenesByLoc: default_packages_dir = os.path.join('var', 'data', 'packages') def __init__(self): parser =", "dbfn=':memory:', force=True, keep_order=True, merge_strategy='merge', sort_attribute_values=True ) find_genes_by_loc(db, self.writer, assm_acc, seq_acc,", "report_files = get_catalog_files(catalog, desired_filetype, assm_acc) filepaths = [] for assm_acc,", "desired_filetype: dataset_catalog_pb2.File.FileType, assm_acc: str = None): files = defaultdict(list) for", "catalog, assm_acc) for gff_fname in gff_files: self.process_loc_for_gff(zin, gff_fname, assm_acc, seq_acc,", "[self.feat_type, self.name, self.range_start, self.range_stop, self.protein_accession] def name_val(self): return self.protein_accession if", "process_loc_for_gff(self, zin, gff_fname, assm_acc, seq_acc, start, stop, extra_fields): with tempfile.NamedTemporaryFile()", "extra_fields): zip_file = get_zip_file_for_acc(assm_acc, self.args.packages_dir) try: with zipfile.ZipFile(zip_file, 'r') as", "-> assembly_pb2.AssemblyDataReport: report_files = get_catalog_files_for_assembly(catalog, dataset_catalog_pb2.File.FileType.DATA_REPORT, assm_acc) for path in", "DatasetsReportReader def retrieve_assembly_report(zip_in, catalog, assm_acc: str) -> assembly_pb2.AssemblyDataReport: report_files =", "range_start: int range_stop: int protein_accession: str = \"\" def get_fields(self):", "argparse.ArgumentParser() parser.add_argument('--packages-dir', type=str, default=self.default_packages_dir, help=f'root of input data directory [{self.default_packages_dir}]')", "seq_acc, start, stop, extra_fields) except zipfile.BadZipFile: print(f'{zip_file} is not a", "completely_within=False): gene_name = gene.attributes.get('Name', None)[0] prot_acc = \"\" if gene.attributes['gene_biotype'][0]", "assm_acc) for gff_fname in gff_files: self.process_loc_for_gff(zin, gff_fname, assm_acc, seq_acc, start,", "list(gff3_db.children(gene, featuretype='CDS')) prot_acc = cds[0].attributes.get('protein_id', None)[0] geneobj = Gene( gene.id,", "assm_acc, seq_acc, start, stop, *extra in self.read_data(): self.find_all_for_location(assm_acc, seq_acc, start,", "self.writer, assm_acc, seq_acc, start, stop, extra_fields) def find_all_for_location(self, assm_acc, seq_acc,", "extra) def process_loc_for_gff(self, zin, gff_fname, assm_acc, seq_acc, start, stop, extra_fields):", "acc = assm.accession if assm_acc and assm_acc != acc: continue", "assm.accession if assm_acc and assm_acc != acc: continue for f", "from dataclasses import dataclass, field from enum import Enum, unique,", "as tmpfile: tmpfile.write(zin.read(gff_fname)) db = gffutils.create_db( tmpfile.name, dbfn=':memory:', force=True, keep_order=True,", "'pseudogene') for gene in gff3_db.region(seqid=seq_acc, start=start, end=stop, featuretype=feat_types, completely_within=False): gene_name", "retrieve_assembly_report(zip_in, catalog, assm_acc: str) -> assembly_pb2.AssemblyDataReport: report_files = get_catalog_files_for_assembly(catalog, dataset_catalog_pb2.File.FileType.DATA_REPORT,", "get_catalog_files(catalog, desired_filetype, assm_acc) filepaths = [] for assm_acc, paths in", "dataset_catalog_pb2.Catalog()) def get_catalog_files_for_assembly(catalog: dataset_catalog_pb2.Catalog, desired_filetype: dataset_catalog_pb2.File.FileType, assm_acc: str): report_files =", "= [] feat_types = ('gene', 'pseudogene') for gene in gff3_db.region(seqid=seq_acc,", "assm_acc, seq_acc, start, stop, extra_fields): found_genes = [] feat_types =", "'r') as zin: catalog = retrieve_data_catalog(zin) gff_files = get_catalog_files(catalog, dataset_catalog_pb2.File.FileType.GFF3)", "json_format.Parse(catalog_json, dataset_catalog_pb2.Catalog()) def get_catalog_files_for_assembly(catalog: dataset_catalog_pb2.Catalog, desired_filetype: dataset_catalog_pb2.File.FileType, assm_acc: str): report_files", "import yaml import zipfile import gffutils from google.protobuf import json_format", "csvout.writerow([assm_acc, seq_acc, start, stop, *extra_fields, *geneobj.get_fields()]) found_genes.append(geneobj) return found_genes class", "str name: str chrom: str strand: str range_start: int range_stop:", "in self.read_data(): self.find_all_for_location(assm_acc, seq_acc, start, stop, extra) def process_loc_for_gff(self, zin,", "rpt_rdr.assembly_report(yaml) def retrieve_data_catalog(zip_in) -> dataset_catalog_pb2.Catalog: catalog_json = zip_in.read('ncbi_dataset/data/dataset_catalog.json') return json_format.Parse(catalog_json,", "retrieve_assembly_report(zin, catalog, assm_acc) for gff_fname in gff_files: self.process_loc_for_gff(zin, gff_fname, assm_acc,", "gene_name = gene.attributes.get('Name', None)[0] prot_acc = \"\" if gene.attributes['gene_biotype'][0] ==", "chrom: str strand: str range_start: int range_stop: int protein_accession: str", "files[acc].append(filepath) return files def get_zip_file_for_acc(acc, path): fname = os.path.join(path, f'{acc}.zip')", "fname = os.path.join(path, f'{acc}.zip') if os.path.isfile(fname): return fname return None", "acc: continue for f in assm.files: filepath = os.path.join('ncbi_dataset', 'data',", "\"\" if gene.attributes['gene_biotype'][0] == 'protein_coding': cds = list(gff3_db.children(gene, featuretype='CDS')) prot_acc", "extra_fields): with tempfile.NamedTemporaryFile() as tmpfile: tmpfile.write(zin.read(gff_fname)) db = gffutils.create_db( tmpfile.name,", "zipfile import gffutils from google.protobuf import json_format from ncbi.datasets.v1alpha1 import", "range_stop: int protein_accession: str = \"\" def get_fields(self): return [self.feat_type,", "return found_genes class FindGenesByLoc: default_packages_dir = os.path.join('var', 'data', 'packages') def", "assembly_pb2.AssemblyDataReport: report_files = get_catalog_files_for_assembly(catalog, dataset_catalog_pb2.File.FileType.DATA_REPORT, assm_acc) for path in report_files:", "parser.add_argument('--packages-dir', type=str, default=self.default_packages_dir, help=f'root of input data directory [{self.default_packages_dir}]') parser.add_argument('--locs',", "!= acc: continue for f in assm.files: filepath = os.path.join('ncbi_dataset',", "= cds[0].attributes.get('protein_id', None)[0] geneobj = Gene( gene.id, gene.featuretype, gene_name, gene.chrom,", "stop, extra_fields) except zipfile.BadZipFile: print(f'{zip_file} is not a zip file')", "*extra_fields, *geneobj.get_fields()]) found_genes.append(geneobj) return found_genes class FindGenesByLoc: default_packages_dir = os.path.join('var',", "gene.id, gene.featuretype, gene_name, gene.chrom, gene.strand, gene.start, gene.stop, prot_acc, ) csvout.writerow([assm_acc,", "tmpfile.write(zin.read(gff_fname)) db = gffutils.create_db( tmpfile.name, dbfn=':memory:', force=True, keep_order=True, merge_strategy='merge', sort_attribute_values=True", "catalog = retrieve_data_catalog(zin) gff_files = get_catalog_files(catalog, dataset_catalog_pb2.File.FileType.GFF3) for assm_acc, gff_files", "catalog, assm_acc: str) -> assembly_pb2.AssemblyDataReport: report_files = get_catalog_files_for_assembly(catalog, dataset_catalog_pb2.File.FileType.DATA_REPORT, assm_acc)", "filepaths = [] for assm_acc, paths in report_files.items(): filepaths.extend(paths) return", "self.name, self.range_start, self.range_stop, self.protein_accession] def name_val(self): return self.protein_accession if self.protein_accession", "if gene.attributes['gene_biotype'][0] == 'protein_coding': cds = list(gff3_db.children(gene, featuretype='CDS')) prot_acc =", "seq_acc, start, stop, extra) def process_loc_for_gff(self, zin, gff_fname, assm_acc, seq_acc,", "assm_acc and assm_acc != acc: continue for f in assm.files:", "import gffutils from google.protobuf import json_format from ncbi.datasets.v1alpha1 import dataset_catalog_pb2", "and assm_acc != acc: continue for f in assm.files: filepath", "self.protein_accession else self.name def find_genes_by_loc(gff3_db, csvout, assm_acc, seq_acc, start, stop,", "zin: catalog = retrieve_data_catalog(zin) gff_files = get_catalog_files(catalog, dataset_catalog_pb2.File.FileType.GFF3) for assm_acc,", "rpt_rdr = DatasetsReportReader() return rpt_rdr.assembly_report(yaml) def retrieve_data_catalog(zip_in) -> dataset_catalog_pb2.Catalog: catalog_json", "defaultdict(list) for assm in catalog.assemblies: acc = assm.accession if assm_acc", "for assm_acc, paths in report_files.items(): filepaths.extend(paths) return filepaths def get_catalog_files(catalog:", "= argparse.ArgumentParser() parser.add_argument('--packages-dir', type=str, default=self.default_packages_dir, help=f'root of input data directory", "get_zip_file_for_acc(assm_acc, self.args.packages_dir) try: with zipfile.ZipFile(zip_file, 'r') as zin: catalog =", "zip_in.read(path) rpt_rdr = DatasetsReportReader() return rpt_rdr.assembly_report(yaml) def retrieve_data_catalog(zip_in) -> dataset_catalog_pb2.Catalog:", "gffutils.create_db( tmpfile.name, dbfn=':memory:', force=True, keep_order=True, merge_strategy='merge', sort_attribute_values=True ) find_genes_by_loc(db, self.writer,", "get_catalog_files(catalog, dataset_catalog_pb2.File.FileType.GFF3) for assm_acc, gff_files in gff_files.items(): report = retrieve_assembly_report(zin,", ") find_genes_by_loc(db, self.writer, assm_acc, seq_acc, start, stop, extra_fields) def find_all_for_location(self,", "= Gene( gene.id, gene.featuretype, gene_name, gene.chrom, gene.strand, gene.start, gene.stop, prot_acc,", "for gff_fname in gff_files: self.process_loc_for_gff(zin, gff_fname, assm_acc, seq_acc, start, stop,", "stop, extra) def process_loc_for_gff(self, zin, gff_fname, assm_acc, seq_acc, start, stop,", "assm_acc, seq_acc, start, stop, extra_fields): zip_file = get_zip_file_for_acc(assm_acc, self.args.packages_dir) try:", "def name_val(self): return self.protein_accession if self.protein_accession else self.name def find_genes_by_loc(gff3_db,", "self.range_stop, self.protein_accession] def name_val(self): return self.protein_accession if self.protein_accession else self.name", "= os.path.join('var', 'data', 'packages') def __init__(self): parser = argparse.ArgumentParser() parser.add_argument('--packages-dir',", "start, stop, extra_fields): zip_file = get_zip_file_for_acc(assm_acc, self.args.packages_dir) try: with zipfile.ZipFile(zip_file,", "featuretype='CDS')) prot_acc = cds[0].attributes.get('protein_id', None)[0] geneobj = Gene( gene.id, gene.featuretype,", "os.path.join(path, f'{acc}.zip') if os.path.isfile(fname): return fname return None @dataclass class", "gene.attributes.get('Name', None)[0] prot_acc = \"\" if gene.attributes['gene_biotype'][0] == 'protein_coding': cds", "defaultdict import csv from dataclasses import dataclass, field from enum", "get_catalog_files_for_assembly(catalog, dataset_catalog_pb2.File.FileType.DATA_REPORT, assm_acc) for path in report_files: yaml = zip_in.read(path)", "= ('gene', 'pseudogene') for gene in gff3_db.region(seqid=seq_acc, start=start, end=stop, featuretype=feat_types,", "data directory [{self.default_packages_dir}]') parser.add_argument('--locs', type=str, help='file containing genomic locations') self.args", "gene.attributes['gene_biotype'][0] == 'protein_coding': cds = list(gff3_db.children(gene, featuretype='CDS')) prot_acc = cds[0].attributes.get('protein_id',", "help='file containing genomic locations') self.args = parser.parse_args() self.writer = csv.writer(sys.stdout,", "find_genes_by_loc(gff3_db, csvout, assm_acc, seq_acc, start, stop, extra_fields): found_genes = []", "[{self.default_packages_dir}]') parser.add_argument('--locs', type=str, help='file containing genomic locations') self.args = parser.parse_args()", "report_files.items(): filepaths.extend(paths) return filepaths def get_catalog_files(catalog: dataset_catalog_pb2.Catalog, desired_filetype: dataset_catalog_pb2.File.FileType, assm_acc:", "enum import Enum, unique, auto import os import sys import", "id: str feat_type: str name: str chrom: str strand: str", "found_genes.append(geneobj) return found_genes class FindGenesByLoc: default_packages_dir = os.path.join('var', 'data', 'packages')", "for assm_acc, seq_acc, start, stop, *extra in self.read_data(): self.find_all_for_location(assm_acc, seq_acc,", "extra_fields) except zipfile.BadZipFile: print(f'{zip_file} is not a zip file') if", "files = defaultdict(list) for assm in catalog.assemblies: acc = assm.accession", "seq_acc, start, stop, *extra_fields, *geneobj.get_fields()]) found_genes.append(geneobj) return found_genes class FindGenesByLoc:", "def read_data(self): for row in csv.reader(iter(sys.stdin.readline, ''), dialect='excel-tab'): yield row", "def get_fields(self): return [self.feat_type, self.name, self.range_start, self.range_stop, self.protein_accession] def name_val(self):", "from ncbi.datasets.v1alpha1 import dataset_catalog_pb2 from ncbi.datasets.v1alpha1.reports import assembly_pb2 from ncbi.datasets.reports.report_reader", "import json_format from ncbi.datasets.v1alpha1 import dataset_catalog_pb2 from ncbi.datasets.v1alpha1.reports import assembly_pb2", "= parser.parse_args() self.writer = csv.writer(sys.stdout, dialect='excel-tab') def read_data(self): for row", ") csvout.writerow([assm_acc, seq_acc, start, stop, *extra_fields, *geneobj.get_fields()]) found_genes.append(geneobj) return found_genes", "filepath = os.path.join('ncbi_dataset', 'data', f.file_path) if f.file_type == desired_filetype: files[acc].append(filepath)", "with tempfile.NamedTemporaryFile() as tmpfile: tmpfile.write(zin.read(gff_fname)) db = gffutils.create_db( tmpfile.name, dbfn=':memory:',", "assm_acc, seq_acc, start, stop, extra_fields) except zipfile.BadZipFile: print(f'{zip_file} is not", "in csv.reader(iter(sys.stdin.readline, ''), dialect='excel-tab'): yield row def run(self): for assm_acc,", "auto import os import sys import tempfile import yaml import", "containing genomic locations') self.args = parser.parse_args() self.writer = csv.writer(sys.stdout, dialect='excel-tab')", "dataset_catalog_pb2.File.FileType, assm_acc: str = None): files = defaultdict(list) for assm", "json_format from ncbi.datasets.v1alpha1 import dataset_catalog_pb2 from ncbi.datasets.v1alpha1.reports import assembly_pb2 from", "def get_catalog_files_for_assembly(catalog: dataset_catalog_pb2.Catalog, desired_filetype: dataset_catalog_pb2.File.FileType, assm_acc: str): report_files = get_catalog_files(catalog,", "'data', f.file_path) if f.file_type == desired_filetype: files[acc].append(filepath) return files def", "def find_all_for_location(self, assm_acc, seq_acc, start, stop, extra_fields): zip_file = get_zip_file_for_acc(assm_acc,", "retrieve_data_catalog(zip_in) -> dataset_catalog_pb2.Catalog: catalog_json = zip_in.read('ncbi_dataset/data/dataset_catalog.json') return json_format.Parse(catalog_json, dataset_catalog_pb2.Catalog()) def", "parser.parse_args() self.writer = csv.writer(sys.stdout, dialect='excel-tab') def read_data(self): for row in", "= defaultdict(list) for assm in catalog.assemblies: acc = assm.accession if", "self.process_loc_for_gff(zin, gff_fname, assm_acc, seq_acc, start, stop, extra_fields) except zipfile.BadZipFile: print(f'{zip_file}", "strand: str range_start: int range_stop: int protein_accession: str = \"\"", "desired_filetype: dataset_catalog_pb2.File.FileType, assm_acc: str): report_files = get_catalog_files(catalog, desired_filetype, assm_acc) filepaths", "for assm_acc, gff_files in gff_files.items(): report = retrieve_assembly_report(zin, catalog, assm_acc)", "start, stop, extra) def process_loc_for_gff(self, zin, gff_fname, assm_acc, seq_acc, start,", "dataset_catalog_pb2.File.FileType, assm_acc: str): report_files = get_catalog_files(catalog, desired_filetype, assm_acc) filepaths =", "class FindGenesByLoc: default_packages_dir = os.path.join('var', 'data', 'packages') def __init__(self): parser", "assm_acc: str = None): files = defaultdict(list) for assm in", "def get_catalog_files(catalog: dataset_catalog_pb2.Catalog, desired_filetype: dataset_catalog_pb2.File.FileType, assm_acc: str = None): files", "gffutils from google.protobuf import json_format from ncbi.datasets.v1alpha1 import dataset_catalog_pb2 from", "zip_in.read('ncbi_dataset/data/dataset_catalog.json') return json_format.Parse(catalog_json, dataset_catalog_pb2.Catalog()) def get_catalog_files_for_assembly(catalog: dataset_catalog_pb2.Catalog, desired_filetype: dataset_catalog_pb2.File.FileType, assm_acc:", "os.path.join('ncbi_dataset', 'data', f.file_path) if f.file_type == desired_filetype: files[acc].append(filepath) return files", "gene.strand, gene.start, gene.stop, prot_acc, ) csvout.writerow([assm_acc, seq_acc, start, stop, *extra_fields,", "return files def get_zip_file_for_acc(acc, path): fname = os.path.join(path, f'{acc}.zip') if", "dataclass, field from enum import Enum, unique, auto import os", "gene.start, gene.stop, prot_acc, ) csvout.writerow([assm_acc, seq_acc, start, stop, *extra_fields, *geneobj.get_fields()])", "assm in catalog.assemblies: acc = assm.accession if assm_acc and assm_acc", "== desired_filetype: files[acc].append(filepath) return files def get_zip_file_for_acc(acc, path): fname =", "assm_acc, seq_acc, start, stop, extra_fields): with tempfile.NamedTemporaryFile() as tmpfile: tmpfile.write(zin.read(gff_fname))", "import dataclass, field from enum import Enum, unique, auto import", "if os.path.isfile(fname): return fname return None @dataclass class Gene: id:", "if assm_acc and assm_acc != acc: continue for f in", "str range_start: int range_stop: int protein_accession: str = \"\" def", "gene.featuretype, gene_name, gene.chrom, gene.strand, gene.start, gene.stop, prot_acc, ) csvout.writerow([assm_acc, seq_acc,", "assm_acc, paths in report_files.items(): filepaths.extend(paths) return filepaths def get_catalog_files(catalog: dataset_catalog_pb2.Catalog,", "= get_catalog_files_for_assembly(catalog, dataset_catalog_pb2.File.FileType.DATA_REPORT, assm_acc) for path in report_files: yaml =", "= zip_in.read('ncbi_dataset/data/dataset_catalog.json') return json_format.Parse(catalog_json, dataset_catalog_pb2.Catalog()) def get_catalog_files_for_assembly(catalog: dataset_catalog_pb2.Catalog, desired_filetype: dataset_catalog_pb2.File.FileType,", "[] feat_types = ('gene', 'pseudogene') for gene in gff3_db.region(seqid=seq_acc, start=start,", "seq_acc, start, stop, extra_fields): with tempfile.NamedTemporaryFile() as tmpfile: tmpfile.write(zin.read(gff_fname)) db", "feat_types = ('gene', 'pseudogene') for gene in gff3_db.region(seqid=seq_acc, start=start, end=stop,", "get_catalog_files_for_assembly(catalog: dataset_catalog_pb2.Catalog, desired_filetype: dataset_catalog_pb2.File.FileType, assm_acc: str): report_files = get_catalog_files(catalog, desired_filetype,", "in report_files: yaml = zip_in.read(path) rpt_rdr = DatasetsReportReader() return rpt_rdr.assembly_report(yaml)", "dataset_catalog_pb2.Catalog, desired_filetype: dataset_catalog_pb2.File.FileType, assm_acc: str): report_files = get_catalog_files(catalog, desired_filetype, assm_acc)", "= os.path.join(path, f'{acc}.zip') if os.path.isfile(fname): return fname return None @dataclass", "def __init__(self): parser = argparse.ArgumentParser() parser.add_argument('--packages-dir', type=str, default=self.default_packages_dir, help=f'root of", "find_genes_by_loc(db, self.writer, assm_acc, seq_acc, start, stop, extra_fields) def find_all_for_location(self, assm_acc,", "seq_acc, start, stop, extra_fields) def find_all_for_location(self, assm_acc, seq_acc, start, stop,", "end=stop, featuretype=feat_types, completely_within=False): gene_name = gene.attributes.get('Name', None)[0] prot_acc = \"\"", "= assm.accession if assm_acc and assm_acc != acc: continue for", "assembly_pb2 from ncbi.datasets.reports.report_reader import DatasetsReportReader def retrieve_assembly_report(zip_in, catalog, assm_acc: str)", "str) -> assembly_pb2.AssemblyDataReport: report_files = get_catalog_files_for_assembly(catalog, dataset_catalog_pb2.File.FileType.DATA_REPORT, assm_acc) for path", "catalog.assemblies: acc = assm.accession if assm_acc and assm_acc != acc:", "str chrom: str strand: str range_start: int range_stop: int protein_accession:", "gff_files: self.process_loc_for_gff(zin, gff_fname, assm_acc, seq_acc, start, stop, extra_fields) except zipfile.BadZipFile:" ]
[ "Meta: unique_together = ('trip', 'stop',) class Capacity(models.Model): trip = models.ForeignKey(Trip)", "stop_time = models.ForeignKey(StopTime) service_date = models.ForeignKey(ServiceDate) capacity1st = models.IntegerField('capacity1st', null=True,", "Route, Service, ServiceDate, Shape, ShapePoint, Stop, StopTime, Trip, Agency) class", "capacity2nd = models.IntegerField('capacity2nd', null=True, blank=True) class Meta: unique_together = ('trip',", "blank=True) capacity2nd = models.IntegerField('capacity2nd', null=True, blank=True) class Meta: unique_together =", "= ('trip', 'stop',) class Capacity(models.Model): trip = models.ForeignKey(Trip) stop_time =", "import models from multigtfs.models import ( Block, Fare, FareRule, Feed,", "models.IntegerField('capacity1st', null=True, blank=True) capacity2nd = models.IntegerField('capacity2nd', null=True, blank=True) class Meta:", "FareRule, Feed, Frequency, Route, Service, ServiceDate, Shape, ShapePoint, Stop, StopTime,", "Service, ServiceDate, Shape, ShapePoint, Stop, StopTime, Trip, Agency) class Path(models.Model):", "Fare, FareRule, Feed, Frequency, Route, Service, ServiceDate, Shape, ShapePoint, Stop,", "Feed, Frequency, Route, Service, ServiceDate, Shape, ShapePoint, Stop, StopTime, Trip,", "Agency) class Path(models.Model): trip = models.ForeignKey(Trip) stop = models.ForeignKey(Stop) path", "unique_together = ('trip', 'stop',) class Capacity(models.Model): trip = models.ForeignKey(Trip) stop_time", "'stop',) class Capacity(models.Model): trip = models.ForeignKey(Trip) stop_time = models.ForeignKey(StopTime) service_date", "trip = models.ForeignKey(Trip) stop_time = models.ForeignKey(StopTime) service_date = models.ForeignKey(ServiceDate) capacity1st", "models from multigtfs.models import ( Block, Fare, FareRule, Feed, Frequency,", "( Block, Fare, FareRule, Feed, Frequency, Route, Service, ServiceDate, Shape,", "Block, Fare, FareRule, Feed, Frequency, Route, Service, ServiceDate, Shape, ShapePoint,", "multigtfs.models import ( Block, Fare, FareRule, Feed, Frequency, Route, Service,", "null=True, blank=True) class Meta: unique_together = ('trip', 'stop',) class Capacity(models.Model):", "= models.ForeignKey(StopTime) service_date = models.ForeignKey(ServiceDate) capacity1st = models.IntegerField('capacity1st', null=True, blank=True)", "import ( Block, Fare, FareRule, Feed, Frequency, Route, Service, ServiceDate,", "models.CharField(max_length=1024, null=True, blank=True) class Meta: unique_together = ('trip', 'stop',) class", "ServiceDate, Shape, ShapePoint, Stop, StopTime, Trip, Agency) class Path(models.Model): trip", "null=True, blank=True) capacity2nd = models.IntegerField('capacity2nd', null=True, blank=True) class Meta: unique_together", "django.db import models from multigtfs.models import ( Block, Fare, FareRule,", "= models.ForeignKey(Stop) path = models.CharField(max_length=1024, null=True, blank=True) class Meta: unique_together", "= models.CharField(max_length=1024, null=True, blank=True) class Meta: unique_together = ('trip', 'stop',)", "from django.db import models from multigtfs.models import ( Block, Fare,", "= models.ForeignKey(Trip) stop = models.ForeignKey(Stop) path = models.CharField(max_length=1024, null=True, blank=True)", "blank=True) class Meta: unique_together = ('trip', 'stop',) class Capacity(models.Model): trip", "StopTime, Trip, Agency) class Path(models.Model): trip = models.ForeignKey(Trip) stop =", "from multigtfs.models import ( Block, Fare, FareRule, Feed, Frequency, Route,", "Trip, Agency) class Path(models.Model): trip = models.ForeignKey(Trip) stop = models.ForeignKey(Stop)", "Path(models.Model): trip = models.ForeignKey(Trip) stop = models.ForeignKey(Stop) path = models.CharField(max_length=1024,", "('trip', 'stop',) class Capacity(models.Model): trip = models.ForeignKey(Trip) stop_time = models.ForeignKey(StopTime)", "models.IntegerField('capacity2nd', null=True, blank=True) class Meta: unique_together = ('trip', 'stop_time', 'service_date')", "class Meta: unique_together = ('trip', 'stop',) class Capacity(models.Model): trip =", "= models.ForeignKey(ServiceDate) capacity1st = models.IntegerField('capacity1st', null=True, blank=True) capacity2nd = models.IntegerField('capacity2nd',", "= models.IntegerField('capacity2nd', null=True, blank=True) class Meta: unique_together = ('trip', 'stop_time',", "Frequency, Route, Service, ServiceDate, Shape, ShapePoint, Stop, StopTime, Trip, Agency)", "Stop, StopTime, Trip, Agency) class Path(models.Model): trip = models.ForeignKey(Trip) stop", "models.ForeignKey(StopTime) service_date = models.ForeignKey(ServiceDate) capacity1st = models.IntegerField('capacity1st', null=True, blank=True) capacity2nd", "class Path(models.Model): trip = models.ForeignKey(Trip) stop = models.ForeignKey(Stop) path =", "<filename>web/backend/backend_django/apps/capacity/models.py from django.db import models from multigtfs.models import ( Block,", "models.ForeignKey(Stop) path = models.CharField(max_length=1024, null=True, blank=True) class Meta: unique_together =", "= models.ForeignKey(Trip) stop_time = models.ForeignKey(StopTime) service_date = models.ForeignKey(ServiceDate) capacity1st =", "capacity1st = models.IntegerField('capacity1st', null=True, blank=True) capacity2nd = models.IntegerField('capacity2nd', null=True, blank=True)", "models.ForeignKey(Trip) stop = models.ForeignKey(Stop) path = models.CharField(max_length=1024, null=True, blank=True) class", "Shape, ShapePoint, Stop, StopTime, Trip, Agency) class Path(models.Model): trip =", "path = models.CharField(max_length=1024, null=True, blank=True) class Meta: unique_together = ('trip',", "class Capacity(models.Model): trip = models.ForeignKey(Trip) stop_time = models.ForeignKey(StopTime) service_date =", "= models.IntegerField('capacity1st', null=True, blank=True) capacity2nd = models.IntegerField('capacity2nd', null=True, blank=True) class", "service_date = models.ForeignKey(ServiceDate) capacity1st = models.IntegerField('capacity1st', null=True, blank=True) capacity2nd =", "ShapePoint, Stop, StopTime, Trip, Agency) class Path(models.Model): trip = models.ForeignKey(Trip)", "models.ForeignKey(Trip) stop_time = models.ForeignKey(StopTime) service_date = models.ForeignKey(ServiceDate) capacity1st = models.IntegerField('capacity1st',", "Capacity(models.Model): trip = models.ForeignKey(Trip) stop_time = models.ForeignKey(StopTime) service_date = models.ForeignKey(ServiceDate)", "trip = models.ForeignKey(Trip) stop = models.ForeignKey(Stop) path = models.CharField(max_length=1024, null=True,", "models.ForeignKey(ServiceDate) capacity1st = models.IntegerField('capacity1st', null=True, blank=True) capacity2nd = models.IntegerField('capacity2nd', null=True,", "stop = models.ForeignKey(Stop) path = models.CharField(max_length=1024, null=True, blank=True) class Meta:" ]
[ "* except ImportError: pass PORT = 9028 SERVICE_NAME = 'interface'", "from public_config import * except ImportError: pass PORT = 9028", "public_config import * except ImportError: pass PORT = 9028 SERVICE_NAME", "try: from public_config import * except ImportError: pass PORT =", "import * except ImportError: pass PORT = 9028 SERVICE_NAME =" ]
[ ".models import Pokemon from .serializers import PokemonSerializer from .filters import", "queryset = Pokemon.objects.all() serializer_class = PokemonSerializer @method_decorator(cache_page(7200)) def get(self, request,", "from .models import Pokemon from .serializers import PokemonSerializer from .filters", "import PokemonFilterSet # Create your views here. class PokemonListAPIView(ListAPIView): queryset", ") from .models import Pokemon from .serializers import PokemonSerializer from", "PokemonCreateAPIView(CreateAPIView): queryset = Pokemon.objects.all() serializer_class = PokemonSerializer class PokemonUpdateAPIView(UpdateAPIView): queryset", "serializer_class = PokemonSerializer class PokemonDeleteAPIView(DestroyAPIView): queryset = Pokemon.objects.all() serializer_class =", "PokemonDetailAPIView(RetrieveAPIView): queryset = Pokemon.objects.all() serializer_class = PokemonSerializer @method_decorator(cache_page(7200)) def get(self,", "PokemonUpdateAPIView(UpdateAPIView): queryset = Pokemon.objects.all() serializer_class = PokemonSerializer class PokemonDeleteAPIView(DestroyAPIView): queryset", "= PokemonSerializer class PokemonDeleteAPIView(DestroyAPIView): queryset = Pokemon.objects.all() serializer_class = PokemonSerializer", "= Pokemon.objects.all() serializer_class = PokemonSerializer class PokemonUpdateAPIView(UpdateAPIView): queryset = Pokemon.objects.all()", "serializer_class = PokemonSerializer class PokemonUpdateAPIView(UpdateAPIView): queryset = Pokemon.objects.all() serializer_class =", "from django.utils.decorators import method_decorator from django.views.decorators.cache import cache_page from rest_framework.generics", "= Pokemon.objects.all() serializer_class = PokemonSerializer class PokemonDeleteAPIView(DestroyAPIView): queryset = Pokemon.objects.all()", "*args, **kwargs): return super().get(request, *args, **kwargs) class PokemonDetailAPIView(RetrieveAPIView): queryset =", "= PokemonFilterSet @method_decorator(cache_page(7200)) def get(self, request, *args, **kwargs): return super().get(request,", "= PokemonSerializer @method_decorator(cache_page(7200)) def get(self, request, *args, **kwargs): return super().get(request,", "queryset = Pokemon.objects.all() serializer_class = PokemonSerializer class PokemonUpdateAPIView(UpdateAPIView): queryset =", "**kwargs) class PokemonDetailAPIView(RetrieveAPIView): queryset = Pokemon.objects.all() serializer_class = PokemonSerializer @method_decorator(cache_page(7200))", "RetrieveAPIView, CreateAPIView, UpdateAPIView, DestroyAPIView ) from .models import Pokemon from", "<filename>api/api/pokemon/views.py from django.utils.decorators import method_decorator from django.views.decorators.cache import cache_page from", "Pokemon.objects.all() serializer_class = PokemonSerializer @method_decorator(cache_page(7200)) def get(self, request, *args, **kwargs):", "cache_page from rest_framework.generics import ( ListAPIView, RetrieveAPIView, CreateAPIView, UpdateAPIView, DestroyAPIView", "your views here. class PokemonListAPIView(ListAPIView): queryset = Pokemon.objects.all() serializer_class =", "queryset = Pokemon.objects.all() serializer_class = PokemonSerializer class PokemonDeleteAPIView(DestroyAPIView): queryset =", "PokemonSerializer from .filters import PokemonFilterSet # Create your views here.", "PokemonSerializer @method_decorator(cache_page(7200)) def get(self, request, *args, **kwargs): return super().get(request, *args,", "Pokemon.objects.all() serializer_class = PokemonSerializer class PokemonDeleteAPIView(DestroyAPIView): queryset = Pokemon.objects.all() serializer_class", "PokemonSerializer class PokemonUpdateAPIView(UpdateAPIView): queryset = Pokemon.objects.all() serializer_class = PokemonSerializer class", ".filters import PokemonFilterSet # Create your views here. class PokemonListAPIView(ListAPIView):", "views here. class PokemonListAPIView(ListAPIView): queryset = Pokemon.objects.all() serializer_class = PokemonSerializer", "from .serializers import PokemonSerializer from .filters import PokemonFilterSet # Create", "from rest_framework.generics import ( ListAPIView, RetrieveAPIView, CreateAPIView, UpdateAPIView, DestroyAPIView )", "from django.views.decorators.cache import cache_page from rest_framework.generics import ( ListAPIView, RetrieveAPIView,", "from .filters import PokemonFilterSet # Create your views here. class", "rest_framework.generics import ( ListAPIView, RetrieveAPIView, CreateAPIView, UpdateAPIView, DestroyAPIView ) from", "*args, **kwargs) class PokemonDetailAPIView(RetrieveAPIView): queryset = Pokemon.objects.all() serializer_class = PokemonSerializer", "class PokemonCreateAPIView(CreateAPIView): queryset = Pokemon.objects.all() serializer_class = PokemonSerializer class PokemonUpdateAPIView(UpdateAPIView):", "here. class PokemonListAPIView(ListAPIView): queryset = Pokemon.objects.all() serializer_class = PokemonSerializer filterset_class", "Pokemon from .serializers import PokemonSerializer from .filters import PokemonFilterSet #", "PokemonSerializer filterset_class = PokemonFilterSet @method_decorator(cache_page(7200)) def get(self, request, *args, **kwargs):", "UpdateAPIView, DestroyAPIView ) from .models import Pokemon from .serializers import", "super().get(request, *args, **kwargs) class PokemonDetailAPIView(RetrieveAPIView): queryset = Pokemon.objects.all() serializer_class =", "import method_decorator from django.views.decorators.cache import cache_page from rest_framework.generics import (", "def get(self, request, *args, **kwargs): return super().get(request, *args, **kwargs) class", "( ListAPIView, RetrieveAPIView, CreateAPIView, UpdateAPIView, DestroyAPIView ) from .models import", "serializer_class = PokemonSerializer filterset_class = PokemonFilterSet @method_decorator(cache_page(7200)) def get(self, request,", "= PokemonSerializer class PokemonUpdateAPIView(UpdateAPIView): queryset = Pokemon.objects.all() serializer_class = PokemonSerializer", "django.views.decorators.cache import cache_page from rest_framework.generics import ( ListAPIView, RetrieveAPIView, CreateAPIView,", "class PokemonListAPIView(ListAPIView): queryset = Pokemon.objects.all() serializer_class = PokemonSerializer filterset_class =", "Create your views here. class PokemonListAPIView(ListAPIView): queryset = Pokemon.objects.all() serializer_class", "Pokemon.objects.all() serializer_class = PokemonSerializer class PokemonUpdateAPIView(UpdateAPIView): queryset = Pokemon.objects.all() serializer_class", "get(self, request, *args, **kwargs): return super().get(request, *args, **kwargs) class PokemonDetailAPIView(RetrieveAPIView):", "return super().get(request, *args, **kwargs) class PokemonDetailAPIView(RetrieveAPIView): queryset = Pokemon.objects.all() serializer_class", "import ( ListAPIView, RetrieveAPIView, CreateAPIView, UpdateAPIView, DestroyAPIView ) from .models", "PokemonFilterSet # Create your views here. class PokemonListAPIView(ListAPIView): queryset =", "DestroyAPIView ) from .models import Pokemon from .serializers import PokemonSerializer", "Pokemon.objects.all() serializer_class = PokemonSerializer filterset_class = PokemonFilterSet @method_decorator(cache_page(7200)) def get(self,", "django.utils.decorators import method_decorator from django.views.decorators.cache import cache_page from rest_framework.generics import", "ListAPIView, RetrieveAPIView, CreateAPIView, UpdateAPIView, DestroyAPIView ) from .models import Pokemon", "import cache_page from rest_framework.generics import ( ListAPIView, RetrieveAPIView, CreateAPIView, UpdateAPIView,", "method_decorator from django.views.decorators.cache import cache_page from rest_framework.generics import ( ListAPIView,", "= PokemonSerializer filterset_class = PokemonFilterSet @method_decorator(cache_page(7200)) def get(self, request, *args,", "PokemonFilterSet @method_decorator(cache_page(7200)) def get(self, request, *args, **kwargs): return super().get(request, *args,", "queryset = Pokemon.objects.all() serializer_class = PokemonSerializer filterset_class = PokemonFilterSet @method_decorator(cache_page(7200))", "class PokemonUpdateAPIView(UpdateAPIView): queryset = Pokemon.objects.all() serializer_class = PokemonSerializer class PokemonDeleteAPIView(DestroyAPIView):", "**kwargs): return super().get(request, *args, **kwargs) class PokemonCreateAPIView(CreateAPIView): queryset = Pokemon.objects.all()", "**kwargs): return super().get(request, *args, **kwargs) class PokemonDetailAPIView(RetrieveAPIView): queryset = Pokemon.objects.all()", "*args, **kwargs): return super().get(request, *args, **kwargs) class PokemonCreateAPIView(CreateAPIView): queryset =", "return super().get(request, *args, **kwargs) class PokemonCreateAPIView(CreateAPIView): queryset = Pokemon.objects.all() serializer_class", "PokemonListAPIView(ListAPIView): queryset = Pokemon.objects.all() serializer_class = PokemonSerializer filterset_class = PokemonFilterSet", "filterset_class = PokemonFilterSet @method_decorator(cache_page(7200)) def get(self, request, *args, **kwargs): return", "= Pokemon.objects.all() serializer_class = PokemonSerializer @method_decorator(cache_page(7200)) def get(self, request, *args,", "get(self, request, *args, **kwargs): return super().get(request, *args, **kwargs) class PokemonCreateAPIView(CreateAPIView):", "import PokemonSerializer from .filters import PokemonFilterSet # Create your views", "import Pokemon from .serializers import PokemonSerializer from .filters import PokemonFilterSet", "request, *args, **kwargs): return super().get(request, *args, **kwargs) class PokemonDetailAPIView(RetrieveAPIView): queryset", ".serializers import PokemonSerializer from .filters import PokemonFilterSet # Create your", "class PokemonDetailAPIView(RetrieveAPIView): queryset = Pokemon.objects.all() serializer_class = PokemonSerializer @method_decorator(cache_page(7200)) def", "request, *args, **kwargs): return super().get(request, *args, **kwargs) class PokemonCreateAPIView(CreateAPIView): queryset", "serializer_class = PokemonSerializer @method_decorator(cache_page(7200)) def get(self, request, *args, **kwargs): return", "CreateAPIView, UpdateAPIView, DestroyAPIView ) from .models import Pokemon from .serializers", "@method_decorator(cache_page(7200)) def get(self, request, *args, **kwargs): return super().get(request, *args, **kwargs)", "# Create your views here. class PokemonListAPIView(ListAPIView): queryset = Pokemon.objects.all()", "*args, **kwargs) class PokemonCreateAPIView(CreateAPIView): queryset = Pokemon.objects.all() serializer_class = PokemonSerializer", "**kwargs) class PokemonCreateAPIView(CreateAPIView): queryset = Pokemon.objects.all() serializer_class = PokemonSerializer class", "super().get(request, *args, **kwargs) class PokemonCreateAPIView(CreateAPIView): queryset = Pokemon.objects.all() serializer_class =", "= Pokemon.objects.all() serializer_class = PokemonSerializer filterset_class = PokemonFilterSet @method_decorator(cache_page(7200)) def" ]
[ "create address with username as param tmp = self.get_parentcfg( cfg,", "ServerHumansNormalizer(pluginref), ] super(ServerUsersNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def", "my_group = self.get_parentcfg(cfg, cfgpath_abs) my_group = '/'.join(my_group['role_abspath']) for (k,ul) in", "] super(ServerInstancesNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def config_path(self):", "**kwargs ) @property def config_path(self): return type(self).NORMER_CONFIG_PATH class SrvRoleInstNormalizer(SrvRolesBaseNormalizer): def", "(like guest or developer) and ## the values should be", "cfg, my_subcfg, cfgpath_abs): if not my_subcfg: return my_subcfg ## if", "self._add_defaultsetter(kwargs, 'pw_access', DefaultSetterConstant(True) ) super(SrvUsrNormalizer, self).__init__( pluginref, *args, **kwargs )", "(SrvSubRolesNormalizer, True), ] super(SrvRolesBaseNormalizer, self).__init__( pluginref, *args, **kwargs ) def", "else: ## subrole parent = get_subdict(cfg, cfgpath_abs[:-2]) parent = parent['role_abspath']", "recursive structures, the sub normalizers can only ## be instantiated", "+= [ SrvUsrNormalizer(pluginref), ] super(ServerUsrBaseNormalizer, self).__init__( pluginref, *args, **kwargs )", "self).__init__( pluginref, *args, **kwargs ) @property def config_path(self): return ['members']", "levels (like guest or developer) and ## the values should", "cfg): tmp = ['roles', 'subroles'] \\ + urole['path'].replace('/', '/subroles/').split('/') tmp", "__init__(self, pluginref, *args, **kwargs): super(SrvRolesMembersNormalizer, self).__init__( pluginref, *args, **kwargs )", "defval=cfgpath_abs[-1]) # build role hierarchy path and parent if cfgpath_abs[-1]", "cfgpath_abs, level=3 ).get('mail_template', None) if tmp: my_subcfg['email'] = tmp.format( my_subcfg['username'].replace('_',", "*args, **kwargs ) @property def config_path(self): return [SUBDICT_METAKEY_ANY] class SrvRolesMembersNormalizer(NormalizerBase):", "= [] my_group = self.get_parentcfg(cfg, cfgpath_abs) my_group = '/'.join(my_group['role_abspath']) for", "ServersNormalizer(pluginref), ] super(ConfigRootNormalizer, self).__init__(pluginref, *args, **kwargs) class ServersNormalizer(NormalizerBase): def __init__(self,", "**kwargs ) @property def config_path(self): return ['members'] def _handle_specifics_presub(self, cfg,", "= kwargs.setdefault('sub_normalizers', []) subnorms += [ ServerBotsNormalizer(pluginref), ServerHumansNormalizer(pluginref), ] super(ServerUsersNormalizer,", "+= [ SrvRolesMembersNormalizer(pluginref), ## note: for recursive structures, the sub", "ansible.module_utils.six import iteritems, string_types from ansible_collections.smabot.base.plugins.module_utils.plugins.config_normalizing.base import ConfigNormalizerBaseMerger, NormalizerBase, NormalizerNamed,", "['subroles'] def __init__(self, pluginref, *args, **kwargs): subnorms = kwargs.setdefault('sub_normalizers', [])", "@property def name_key(self): return 'username' def _handle_specifics_presub(self, cfg, my_subcfg, cfgpath_abs):", ") subnorms = kwargs.setdefault('sub_normalizers', []) subnorms += [ ServersNormalizer(pluginref), ]", "= [] else: ## subrole parent = get_subdict(cfg, cfgpath_abs[:-2]) parent", "of users exportcfg = [] my_group = self.get_parentcfg(cfg, cfgpath_abs) my_group", "self).__init__( pluginref, *args, **kwargs ) @property def config_path(self): return ['humans']", "*args, **kwargs): super(ServerBotsNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def", ") return my_subcfg class SrvUsrCfgNormalizer(NormalizerNamed): def __init__(self, pluginref, *args, **kwargs):", "string_types from ansible_collections.smabot.base.plugins.module_utils.plugins.config_normalizing.base import ConfigNormalizerBaseMerger, NormalizerBase, NormalizerNamed, DefaultSetterConstant, DefaultSetterOtherKey from", "subnorms = kwargs.setdefault('sub_normalizers', []) subnorms += [ SrvRolesMembersNormalizer(pluginref), ## note:", "if so use this to # create address with username", "subnorms += [ ServerUsersNormalizer(pluginref), SrvRolesNormalizer(pluginref), ] super(SrvInstNormalizer, self).__init__( pluginref, *args,", "self.get_parentcfg( cfg, cfgpath_abs, level=3 ).get('mail_template', None) if tmp: my_subcfg['email'] =", "NormalizerBase, NormalizerNamed, DefaultSetterConstant, DefaultSetterOtherKey from ansible_collections.smabot.base.plugins.module_utils.utils.dicting import setdefault_none, SUBDICT_METAKEY_ANY, get_subdict", "= get_subdict(cfg, tmp, default_empty=True) setdefault_none(setdefault_none(tmp, 'members', {}), urole['level'], [] ).append(username)", "path and parent if cfgpath_abs[-1] == 'roles': ## top level", "config_path(self): return [SUBDICT_METAKEY_ANY] class SrvRolesBaseNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs):", "return [SUBDICT_METAKEY_ANY] class SrvRolesMembersNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): super(SrvRolesMembersNormalizer,", "exportcfg.append({ 'gitlab_group': my_group, 'gitlab_user': u, 'access_level': k }) my_subcfg['_exportcfg'] =", "self).__init__( pluginref, *args, **kwargs ) @property def config_path(self): return ['servers']", "SrvRolesBaseNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): subnorms = kwargs.setdefault('sub_normalizers', [])", "subnorms += [ SrvUsrNormalizer(pluginref), ] super(ServerUsrBaseNormalizer, self).__init__( pluginref, *args, **kwargs", "@property def config_path(self): return ['humans'] class SrvUsrNormalizer(NormalizerBase): def __init__(self, pluginref,", "ServerInstancesNormalizer(pluginref), ] super(ServersNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def", "keys are ## valid gitlab access levels (like guest or", "self).__init__(pluginref, *args, **kwargs) class ServersNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs):", "def config_path(self): return ['config'] @property def name_key(self): return 'username' def", "my_subcfg ## if it exists, members should be a dict", "def name_key(self): return 'username' def _handle_specifics_presub(self, cfg, my_subcfg, cfgpath_abs): mail", "*args, **kwargs ) @property def config_path(self): return ['bots'] class ServerHumansNormalizer(ServerUsrBaseNormalizer):", "if tmp: my_subcfg['email'] = tmp.format( my_subcfg['username'].replace('_', '-') ) return my_subcfg", "cfg, my_subcfg, cfgpath_abs): # do config subkey c = setdefault_none(my_subcfg,", "self).__init__( pluginref, *args, **kwargs ) def _handle_specifics_presub(self, cfg, my_subcfg, cfgpath_abs):", "import ConfigNormalizerBaseMerger, NormalizerBase, NormalizerNamed, DefaultSetterConstant, DefaultSetterOtherKey from ansible_collections.smabot.base.plugins.module_utils.utils.dicting import setdefault_none,", "default_empty=True) setdefault_none(setdefault_none(tmp, 'members', {}), urole['level'], [] ).append(username) class ConfigRootNormalizer(NormalizerBase): def", "*args, **kwargs): super(ActionModule, self).__init__(ConfigRootNormalizer(self), *args, default_merge_vars=['gitlab_cfg_defaults'], extra_merge_vars_ans=['extra_gitlab_config_maps'], **kwargs ) self._supports_check_mode", "SrvUsrCfgNormalizer(NormalizerNamed): def __init__(self, pluginref, *args, **kwargs): super(SrvUsrCfgNormalizer, self).__init__( pluginref, *args,", "class SrvRolesNormalizer(SrvRolesBaseNormalizer): def __init__(self, pluginref, *args, **kwargs): super(SrvRolesNormalizer, self).__init__( pluginref,", "[ SrvRoleInstNormalizer(pluginref), ] super(SrvSubRolesNormalizer, self).__init__( pluginref, *args, **kwargs ) @property", "**kwargs ) @property def config_path(self): return ['servers'] class ServerInstancesNormalizer(NormalizerBase): def", "in ul: exportcfg.append({ 'gitlab_group': my_group, 'gitlab_user': u, 'access_level': k })", "members should be a dict where the keys are ##", "@property def config_path(self): return [SUBDICT_METAKEY_ANY] class SrvRolesMembersNormalizer(NormalizerBase): def __init__(self, pluginref,", "users exportcfg = [] my_group = self.get_parentcfg(cfg, cfgpath_abs) my_group =", "return ['bots'] class ServerHumansNormalizer(ServerUsrBaseNormalizer): def __init__(self, pluginref, *args, **kwargs): super(ServerHumansNormalizer,", "pluginref, *args, **kwargs): super(SrvRolesMembersNormalizer, self).__init__( pluginref, *args, **kwargs ) @property", "import (absolute_import, division, print_function) __metaclass__ = type from ansible.errors import", "**kwargs ) class ServerBotsNormalizer(ServerUsrBaseNormalizer): def __init__(self, pluginref, *args, **kwargs): super(ServerBotsNormalizer,", "'/'.join(parent) return my_subcfg class SrvRolesNormalizer(SrvRolesBaseNormalizer): def __init__(self, pluginref, *args, **kwargs):", "specified for server, if so use this to # create", ") self.default_setters['name'] = DefaultSetterOtherKey('username') @property def config_path(self): return ['config'] @property", "[ SrvUsrCfgNormalizer(pluginref), ] self._add_defaultsetter(kwargs, 'pw_access', DefaultSetterConstant(True) ) super(SrvUsrNormalizer, self).__init__( pluginref,", "my_subcfg class ActionModule(ConfigNormalizerBaseMerger): def __init__(self, *args, **kwargs): super(ActionModule, self).__init__(ConfigRootNormalizer(self), *args,", "usr_roles: for ur in usr_roles: user_role_to_cfg(my_subcfg['config']['username'], ur, self.get_parentcfg(cfg, cfgpath_abs, level=3)", "[ ServerInstancesNormalizer(pluginref), ] super(ServersNormalizer, self).__init__( pluginref, *args, **kwargs ) @property", "import ansible_assert def user_role_to_cfg(username, urole, cfg): tmp = ['roles', 'subroles']", "[ SrvInstNormalizer(pluginref), ] super(ServerInstancesNormalizer, self).__init__( pluginref, *args, **kwargs ) @property", "subkey c = setdefault_none(my_subcfg, 'config', defval={}) setdefault_none(c, 'name', defval=cfgpath_abs[-1]) #", "*args, **kwargs): subnorms = kwargs.setdefault('sub_normalizers', []) subnorms += [ SrvUsrNormalizer(pluginref),", "pluginref, *args, **kwargs ) class ServerBotsNormalizer(ServerUsrBaseNormalizer): def __init__(self, pluginref, *args,", "get_subdict(cfg, tmp, default_empty=True) setdefault_none(setdefault_none(tmp, 'members', {}), urole['level'], [] ).append(username) class", "**kwargs): super(SrvUsrCfgNormalizer, self).__init__( pluginref, *args, mapkey_lvl=-2, **kwargs ) self.default_setters['name'] =", "to # create address with username as param tmp =", "urole['path'].replace('/', '/subroles/').split('/') tmp = get_subdict(cfg, tmp, default_empty=True) setdefault_none(setdefault_none(tmp, 'members', {}),", "def __init__(self, pluginref, *args, **kwargs): super(SrvRolesNormalizer, self).__init__( pluginref, *args, **kwargs", "**kwargs): subnorms = kwargs.setdefault('sub_normalizers', []) subnorms += [ SrvInstNormalizer(pluginref), ]", "ansible_assert def user_role_to_cfg(username, urole, cfg): tmp = ['roles', 'subroles'] \\", "def config_path(self): return ['bots'] class ServerHumansNormalizer(ServerUsrBaseNormalizer): def __init__(self, pluginref, *args,", "DefaultSetterOtherKey('username') @property def config_path(self): return ['config'] @property def name_key(self): return", "tmp = get_subdict(cfg, tmp, default_empty=True) setdefault_none(setdefault_none(tmp, 'members', {}), urole['level'], []", "config_path(self): return ['users'] class ServerUsrBaseNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs):", "self).__init__( pluginref, *args, **kwargs ) @property def config_path(self): return ['users']", "'subroles'] \\ + urole['path'].replace('/', '/subroles/').split('/') tmp = get_subdict(cfg, tmp, default_empty=True)", "@property def config_path(self): return ['instances'] class SrvInstNormalizer(NormalizerBase): def __init__(self, pluginref,", ") def _handle_specifics_presub(self, cfg, my_subcfg, cfgpath_abs): # do config subkey", "] super(ServerUsersNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def config_path(self):", ") @property def config_path(self): return [SUBDICT_METAKEY_ANY] def _handle_specifics_postsub(self, cfg, my_subcfg,", "def _handle_specifics_presub(self, cfg, my_subcfg, cfgpath_abs): mail = my_subcfg.get('email', None) if", "**kwargs): subnorms = kwargs.setdefault('sub_normalizers', []) subnorms += [ ServerInstancesNormalizer(pluginref), ]", "= self.get_parentcfg( cfg, cfgpath_abs, level=3 ).get('mail_template', None) if tmp: my_subcfg['email']", "@property def config_path(self): return ['members'] def _handle_specifics_presub(self, cfg, my_subcfg, cfgpath_abs):", "+= [ SrvUsrCfgNormalizer(pluginref), ] self._add_defaultsetter(kwargs, 'pw_access', DefaultSetterConstant(True) ) super(SrvUsrNormalizer, self).__init__(", "class SrvRolesBaseNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): subnorms = kwargs.setdefault('sub_normalizers',", "pluginref, *args, **kwargs ) @property def config_path(self): return ['servers'] class", "[SUBDICT_METAKEY_ANY] def _handle_specifics_postsub(self, cfg, my_subcfg, cfgpath_abs): usr_roles = my_subcfg.get('roles', None)", "config_path(self): return [SUBDICT_METAKEY_ANY] class SrvRolesMembersNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs):", "class ServerUsrBaseNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): subnorms = kwargs.setdefault('sub_normalizers',", "'-') ) return my_subcfg class ActionModule(ConfigNormalizerBaseMerger): def __init__(self, *args, **kwargs):", "ansible_collections.smabot.base.plugins.module_utils.utils.utils import ansible_assert def user_role_to_cfg(username, urole, cfg): tmp = ['roles',", "c['parent'] = '/'.join(parent) return my_subcfg class SrvRolesNormalizer(SrvRolesBaseNormalizer): def __init__(self, pluginref,", "] super(SrvSubRolesNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def config_path(self):", "access levels (like guest or developer) and ## the values", "@property def config_path(self): return ['users'] class ServerUsrBaseNormalizer(NormalizerBase): def __init__(self, pluginref,", "def config_path(self): return ['humans'] class SrvUsrNormalizer(NormalizerBase): def __init__(self, pluginref, *args,", "kwargs.setdefault('sub_normalizers', []) subnorms += [ ServerInstancesNormalizer(pluginref), ] super(ServersNormalizer, self).__init__( pluginref,", "is explicitly given, check if mail # template is specified", ") @property def config_path(self): return ['users'] class ServerUsrBaseNormalizer(NormalizerBase): def __init__(self,", "user_role_to_cfg(my_subcfg['config']['username'], ur, self.get_parentcfg(cfg, cfgpath_abs, level=3) ) return my_subcfg class SrvUsrCfgNormalizer(NormalizerNamed):", "exportcfg return my_subcfg class ServerUsersNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs):", "pluginref, *args, **kwargs ) @property def config_path(self): return ['bots'] class", "are ## valid gitlab access levels (like guest or developer)", "explicitly given, check if mail # template is specified for", "NormalizerNamed, DefaultSetterConstant, DefaultSetterOtherKey from ansible_collections.smabot.base.plugins.module_utils.utils.dicting import setdefault_none, SUBDICT_METAKEY_ANY, get_subdict from", "import iteritems, string_types from ansible_collections.smabot.base.plugins.module_utils.plugins.config_normalizing.base import ConfigNormalizerBaseMerger, NormalizerBase, NormalizerNamed, DefaultSetterConstant,", ") @property def config_path(self): return ['servers'] class ServerInstancesNormalizer(NormalizerBase): def __init__(self,", "def config_path(self): return ['servers'] class ServerInstancesNormalizer(NormalizerBase): def __init__(self, pluginref, *args,", "kwargs.setdefault('sub_normalizers', []) subnorms += [ ServerBotsNormalizer(pluginref), ServerHumansNormalizer(pluginref), ] super(ServerUsersNormalizer, self).__init__(", "cfgpath_abs, level=3) ) return my_subcfg class SrvUsrCfgNormalizer(NormalizerNamed): def __init__(self, pluginref,", "__init__(self, pluginref, *args, **kwargs): subnorms = kwargs.setdefault('sub_normalizers', []) subnorms +=", "[ ServersNormalizer(pluginref), ] super(ConfigRootNormalizer, self).__init__(pluginref, *args, **kwargs) class ServersNormalizer(NormalizerBase): def", "'gitlab_group': my_group, 'gitlab_user': u, 'access_level': k }) my_subcfg['_exportcfg'] = exportcfg", "'username' def _handle_specifics_presub(self, cfg, my_subcfg, cfgpath_abs): mail = my_subcfg.get('email', None)", "kwargs.setdefault('sub_normalizers', []) subnorms += [ SrvRolesMembersNormalizer(pluginref), ## note: for recursive", "*args, **kwargs): subnorms = kwargs.setdefault('sub_normalizers', []) subnorms += [ SrvRoleInstNormalizer(pluginref),", "return ['config'] @property def name_key(self): return 'username' def _handle_specifics_presub(self, cfg,", "<gh_stars>0 from __future__ import (absolute_import, division, print_function) __metaclass__ = type", "] super(ServersNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def config_path(self):", "'pw_access', DefaultSetterConstant(True) ) super(SrvUsrNormalizer, self).__init__( pluginref, *args, **kwargs ) @property", ") return my_subcfg class ActionModule(ConfigNormalizerBaseMerger): def __init__(self, *args, **kwargs): super(ActionModule,", "__future__ import (absolute_import, division, print_function) __metaclass__ = type from ansible.errors", "not mail: # if not mail address is explicitly given,", "**kwargs): super(ServerHumansNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def config_path(self):", "config_path(self): return ['instances'] class SrvInstNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs):", "SrvRolesMembersNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): super(SrvRolesMembersNormalizer, self).__init__( pluginref, *args,", "kwargs.setdefault('sub_normalizers', []) subnorms += [ SrvUsrCfgNormalizer(pluginref), ] self._add_defaultsetter(kwargs, 'pw_access', DefaultSetterConstant(True)", "pluginref, *args, **kwargs): super(SrvUsrCfgNormalizer, self).__init__( pluginref, *args, mapkey_lvl=-2, **kwargs )", "return type(self).NORMER_CONFIG_PATH class SrvRoleInstNormalizer(SrvRolesBaseNormalizer): def __init__(self, pluginref, *args, **kwargs): super(SrvRoleInstNormalizer,", "iteritems(my_subcfg): for u in ul: exportcfg.append({ 'gitlab_group': my_group, 'gitlab_user': u,", "kwargs.setdefault('sub_normalizers', []) subnorms += [ SrvUsrNormalizer(pluginref), ] super(ServerUsrBaseNormalizer, self).__init__( pluginref,", "super(ServerHumansNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def config_path(self): return", "## be instantiated if the corresponding key actually exists ##", "subnorms += [ ServerInstancesNormalizer(pluginref), ] super(ServersNormalizer, self).__init__( pluginref, *args, **kwargs", "self).__init__(ConfigRootNormalizer(self), *args, default_merge_vars=['gitlab_cfg_defaults'], extra_merge_vars_ans=['extra_gitlab_config_maps'], **kwargs ) self._supports_check_mode = False self._supports_async", "__init__(self, pluginref, *args, **kwargs): super(ServerBotsNormalizer, self).__init__( pluginref, *args, **kwargs )", "[]) subnorms += [ SrvInstNormalizer(pluginref), ] super(ServerInstancesNormalizer, self).__init__( pluginref, *args,", "'config', defval={}) setdefault_none(c, 'name', defval=cfgpath_abs[-1]) # build role hierarchy path", "+= [ SrvInstNormalizer(pluginref), ] super(ServerInstancesNormalizer, self).__init__( pluginref, *args, **kwargs )", "**kwargs): subnorms = kwargs.setdefault('sub_normalizers', []) subnorms += [ ServerBotsNormalizer(pluginref), ServerHumansNormalizer(pluginref),", "subnorms = kwargs.setdefault('sub_normalizers', []) subnorms += [ SrvInstNormalizer(pluginref), ] super(ServerInstancesNormalizer,", "where the keys are ## valid gitlab access levels (like", "mail = my_subcfg.get('email', None) if not mail: # if not", ") @property def config_path(self): return type(self).NORMER_CONFIG_PATH class SrvRoleInstNormalizer(SrvRolesBaseNormalizer): def __init__(self,", "ServerUsersNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): subnorms = kwargs.setdefault('sub_normalizers', [])", "return my_subcfg class SrvRolesNormalizer(SrvRolesBaseNormalizer): def __init__(self, pluginref, *args, **kwargs): super(SrvRolesNormalizer,", "username as param tmp = self.get_parentcfg( cfg, cfgpath_abs, level=3 ).get('mail_template',", "my_subcfg, cfgpath_abs): # do config subkey c = setdefault_none(my_subcfg, 'config',", "= kwargs.setdefault('sub_normalizers', []) subnorms += [ SrvInstNormalizer(pluginref), ] super(ServerInstancesNormalizer, self).__init__(", "developer) and ## the values should be a list of", "pluginref, *args, **kwargs): super(ServerHumansNormalizer, self).__init__( pluginref, *args, **kwargs ) @property", "subnorms = kwargs.setdefault('sub_normalizers', []) subnorms += [ ServerInstancesNormalizer(pluginref), ] super(ServersNormalizer,", "self.default_setters['name'] = DefaultSetterOtherKey('username') @property def config_path(self): return ['config'] @property def", "config_path(self): return ['servers'] class ServerInstancesNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs):", "@property def config_path(self): return [SUBDICT_METAKEY_ANY] def _handle_specifics_postsub(self, cfg, my_subcfg, cfgpath_abs):", "subnorms = kwargs.setdefault('sub_normalizers', []) subnorms += [ ServersNormalizer(pluginref), ] super(ConfigRootNormalizer,", "*args, **kwargs ) @property def config_path(self): return [SUBDICT_METAKEY_ANY] def _handle_specifics_postsub(self,", "tmp, default_empty=True) setdefault_none(setdefault_none(tmp, 'members', {}), urole['level'], [] ).append(username) class ConfigRootNormalizer(NormalizerBase):", "class SrvSubRolesNormalizer(NormalizerBase): NORMER_CONFIG_PATH = ['subroles'] def __init__(self, pluginref, *args, **kwargs):", "pluginref, *args, **kwargs ) @property def config_path(self): return type(self).NORMER_CONFIG_PATH class", "subnorms = kwargs.setdefault('sub_normalizers', []) subnorms += [ ServerBotsNormalizer(pluginref), ServerHumansNormalizer(pluginref), ]", "class SrvRolesMembersNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): super(SrvRolesMembersNormalizer, self).__init__( pluginref,", "'gitlab_user': u, 'access_level': k }) my_subcfg['_exportcfg'] = exportcfg return my_subcfg", "division, print_function) __metaclass__ = type from ansible.errors import AnsibleOptionsError from", "kwargs.setdefault('sub_normalizers', []) subnorms += [ ServersNormalizer(pluginref), ] super(ConfigRootNormalizer, self).__init__(pluginref, *args,", "super(SrvRolesMembersNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def config_path(self): return", "indefinite recursions of death (SrvSubRolesNormalizer, True), ] super(SrvRolesBaseNormalizer, self).__init__( pluginref,", "pluginref, *args, **kwargs ) @property def config_path(self): return ['users'] class", "self._add_defaultsetter(kwargs, 'random_pwlen', DefaultSetterConstant(80) ) subnorms = kwargs.setdefault('sub_normalizers', []) subnorms +=", "None) if tmp: my_subcfg['email'] = tmp.format( my_subcfg['username'].replace('_', '-') ) return", "my_subcfg.get('roles', None) if usr_roles: for ur in usr_roles: user_role_to_cfg(my_subcfg['config']['username'], ur,", "+= [ ServerInstancesNormalizer(pluginref), ] super(ServersNormalizer, self).__init__( pluginref, *args, **kwargs )", "DefaultSetterConstant, DefaultSetterOtherKey from ansible_collections.smabot.base.plugins.module_utils.utils.dicting import setdefault_none, SUBDICT_METAKEY_ANY, get_subdict from ansible_collections.smabot.base.plugins.module_utils.utils.utils", "my_subcfg, cfgpath_abs): mail = my_subcfg.get('email', None) if not mail: #", "subrole parent = get_subdict(cfg, cfgpath_abs[:-2]) parent = parent['role_abspath'] my_subcfg['role_abspath'] =", ").append(username) class ConfigRootNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): self._add_defaultsetter(kwargs, 'random_pwlen',", "_handle_specifics_postsub(self, cfg, my_subcfg, cfgpath_abs): usr_roles = my_subcfg.get('roles', None) if usr_roles:", "def __init__(self, pluginref, *args, **kwargs): subnorms = kwargs.setdefault('sub_normalizers', []) subnorms", "level parent = [] else: ## subrole parent = get_subdict(cfg,", "parent + [c['name']] c['parent'] = '/'.join(parent) return my_subcfg class SrvRolesNormalizer(SrvRolesBaseNormalizer):", "from ansible.errors import AnsibleOptionsError from ansible.module_utils.six import iteritems, string_types from", "['users'] class ServerUsrBaseNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): subnorms =", "[ SrvUsrNormalizer(pluginref), ] super(ServerUsrBaseNormalizer, self).__init__( pluginref, *args, **kwargs ) class", "== 'roles': ## top level parent = [] else: ##", "def __init__(self, pluginref, *args, **kwargs): super(SrvUsrCfgNormalizer, self).__init__( pluginref, *args, mapkey_lvl=-2,", "ServerBotsNormalizer(pluginref), ServerHumansNormalizer(pluginref), ] super(ServerUsersNormalizer, self).__init__( pluginref, *args, **kwargs ) @property", "[]) subnorms += [ ServersNormalizer(pluginref), ] super(ConfigRootNormalizer, self).__init__(pluginref, *args, **kwargs)", "[] my_group = self.get_parentcfg(cfg, cfgpath_abs) my_group = '/'.join(my_group['role_abspath']) for (k,ul)", "config_path(self): return ['bots'] class ServerHumansNormalizer(ServerUsrBaseNormalizer): def __init__(self, pluginref, *args, **kwargs):", "__init__(self, pluginref, *args, **kwargs): self._add_defaultsetter(kwargs, 'random_pwlen', DefaultSetterConstant(80) ) subnorms =", "[] ).append(username) class ConfigRootNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): self._add_defaultsetter(kwargs,", "*args, **kwargs): subnorms = kwargs.setdefault('sub_normalizers', []) subnorms += [ ServerUsersNormalizer(pluginref),", "parent = [] else: ## subrole parent = get_subdict(cfg, cfgpath_abs[:-2])", "pluginref, *args, **kwargs ) def _handle_specifics_presub(self, cfg, my_subcfg, cfgpath_abs): #", "get_subdict(cfg, cfgpath_abs[:-2]) parent = parent['role_abspath'] my_subcfg['role_abspath'] = parent + [c['name']]", "my_group = '/'.join(my_group['role_abspath']) for (k,ul) in iteritems(my_subcfg): for u in", "self).__init__( pluginref, *args, **kwargs ) @property def config_path(self): return type(self).NORMER_CONFIG_PATH", "def _handle_specifics_presub(self, cfg, my_subcfg, cfgpath_abs): # do config subkey c", "['roles'] class SrvSubRolesNormalizer(NormalizerBase): NORMER_CONFIG_PATH = ['subroles'] def __init__(self, pluginref, *args,", "config_path(self): return ['members'] def _handle_specifics_presub(self, cfg, my_subcfg, cfgpath_abs): if not", "ul: exportcfg.append({ 'gitlab_group': my_group, 'gitlab_user': u, 'access_level': k }) my_subcfg['_exportcfg']", "'random_pwlen', DefaultSetterConstant(80) ) subnorms = kwargs.setdefault('sub_normalizers', []) subnorms += [", "import AnsibleOptionsError from ansible.module_utils.six import iteritems, string_types from ansible_collections.smabot.base.plugins.module_utils.plugins.config_normalizing.base import", "if not my_subcfg: return my_subcfg ## if it exists, members", "if cfgpath_abs[-1] == 'roles': ## top level parent = []", "ansible.errors import AnsibleOptionsError from ansible.module_utils.six import iteritems, string_types from ansible_collections.smabot.base.plugins.module_utils.plugins.config_normalizing.base", "import setdefault_none, SUBDICT_METAKEY_ANY, get_subdict from ansible_collections.smabot.base.plugins.module_utils.utils.utils import ansible_assert def user_role_to_cfg(username,", "+ [c['name']] c['parent'] = '/'.join(parent) return my_subcfg class SrvRolesNormalizer(SrvRolesBaseNormalizer): def", "def config_path(self): return ['members'] def _handle_specifics_presub(self, cfg, my_subcfg, cfgpath_abs): if", "NORMER_CONFIG_PATH = ['subroles'] def __init__(self, pluginref, *args, **kwargs): subnorms =", "or developer) and ## the values should be a list", ").get('mail_template', None) if tmp: my_subcfg['email'] = tmp.format( my_subcfg['username'].replace('_', '-') )", "for u in ul: exportcfg.append({ 'gitlab_group': my_group, 'gitlab_user': u, 'access_level':", "__init__(self, pluginref, *args, **kwargs): super(ServerHumansNormalizer, self).__init__( pluginref, *args, **kwargs )", "it exists, members should be a dict where the keys", "only ## be instantiated if the corresponding key actually exists", "for (k,ul) in iteritems(my_subcfg): for u in ul: exportcfg.append({ 'gitlab_group':", "## valid gitlab access levels (like guest or developer) and", "ServersNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): subnorms = kwargs.setdefault('sub_normalizers', [])", "return ['instances'] class SrvInstNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): subnorms", "[ ServerUsersNormalizer(pluginref), SrvRolesNormalizer(pluginref), ] super(SrvInstNormalizer, self).__init__( pluginref, *args, **kwargs )", "if mail # template is specified for server, if so", "a list of users exportcfg = [] my_group = self.get_parentcfg(cfg,", "config_path(self): return [SUBDICT_METAKEY_ANY] def _handle_specifics_postsub(self, cfg, my_subcfg, cfgpath_abs): usr_roles =", "return my_subcfg class SrvUsrCfgNormalizer(NormalizerNamed): def __init__(self, pluginref, *args, **kwargs): super(SrvUsrCfgNormalizer,", "= my_subcfg.get('roles', None) if usr_roles: for ur in usr_roles: user_role_to_cfg(my_subcfg['config']['username'],", "super(SrvInstNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def config_path(self): return", "not my_subcfg: return my_subcfg ## if it exists, members should", "pluginref, *args, **kwargs): super(SrvRolesNormalizer, self).__init__( pluginref, *args, **kwargs ) @property", "ur in usr_roles: user_role_to_cfg(my_subcfg['config']['username'], ur, self.get_parentcfg(cfg, cfgpath_abs, level=3) ) return", "def config_path(self): return ['roles'] class SrvSubRolesNormalizer(NormalizerBase): NORMER_CONFIG_PATH = ['subroles'] def", "setdefault_none(c, 'name', defval=cfgpath_abs[-1]) # build role hierarchy path and parent", "key actually exists ## to avoid indefinite recursions of death", "SrvSubRolesNormalizer(NormalizerBase): NORMER_CONFIG_PATH = ['subroles'] def __init__(self, pluginref, *args, **kwargs): subnorms", "super(ServersNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def config_path(self): return", "*args, **kwargs ) @property def config_path(self): return ['servers'] class ServerInstancesNormalizer(NormalizerBase):", "exists ## to avoid indefinite recursions of death (SrvSubRolesNormalizer, True),", "*args, **kwargs): subnorms = kwargs.setdefault('sub_normalizers', []) subnorms += [ SrvInstNormalizer(pluginref),", "cfgpath_abs[-1] == 'roles': ## top level parent = [] else:", "guest or developer) and ## the values should be a", "subnorms = kwargs.setdefault('sub_normalizers', []) subnorms += [ SrvRoleInstNormalizer(pluginref), ] super(SrvSubRolesNormalizer,", "**kwargs ) @property def config_path(self): return [SUBDICT_METAKEY_ANY] class SrvRolesMembersNormalizer(NormalizerBase): def", "DefaultSetterConstant(80) ) subnorms = kwargs.setdefault('sub_normalizers', []) subnorms += [ ServersNormalizer(pluginref),", "death (SrvSubRolesNormalizer, True), ] super(SrvRolesBaseNormalizer, self).__init__( pluginref, *args, **kwargs )", "avoid indefinite recursions of death (SrvSubRolesNormalizer, True), ] super(SrvRolesBaseNormalizer, self).__init__(", "SrvInstNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): subnorms = kwargs.setdefault('sub_normalizers', [])", "cfg, cfgpath_abs, level=3 ).get('mail_template', None) if tmp: my_subcfg['email'] = tmp.format(", "= '/'.join(parent) return my_subcfg class SrvRolesNormalizer(SrvRolesBaseNormalizer): def __init__(self, pluginref, *args,", "should be a list of users exportcfg = [] my_group", "return ['roles'] class SrvSubRolesNormalizer(NormalizerBase): NORMER_CONFIG_PATH = ['subroles'] def __init__(self, pluginref,", "self).__init__( pluginref, *args, **kwargs ) @property def config_path(self): return ['bots']", "= type from ansible.errors import AnsibleOptionsError from ansible.module_utils.six import iteritems,", "[c['name']] c['parent'] = '/'.join(parent) return my_subcfg class SrvRolesNormalizer(SrvRolesBaseNormalizer): def __init__(self,", "note: for recursive structures, the sub normalizers can only ##", "def config_path(self): return [SUBDICT_METAKEY_ANY] class SrvRolesMembersNormalizer(NormalizerBase): def __init__(self, pluginref, *args,", "class SrvUsrCfgNormalizer(NormalizerNamed): def __init__(self, pluginref, *args, **kwargs): super(SrvUsrCfgNormalizer, self).__init__( pluginref,", "subnorms += [ SrvRoleInstNormalizer(pluginref), ] super(SrvSubRolesNormalizer, self).__init__( pluginref, *args, **kwargs", "my_subcfg['_exportcfg'] = exportcfg return my_subcfg class ServerUsersNormalizer(NormalizerBase): def __init__(self, pluginref,", ") @property def config_path(self): return [SUBDICT_METAKEY_ANY] class SrvRolesBaseNormalizer(NormalizerBase): def __init__(self,", "be instantiated if the corresponding key actually exists ## to", "def user_role_to_cfg(username, urole, cfg): tmp = ['roles', 'subroles'] \\ +", "be a dict where the keys are ## valid gitlab", "config_path(self): return ['humans'] class SrvUsrNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs):", "SrvUsrNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): subnorms = kwargs.setdefault('sub_normalizers', [])", "return ['humans'] class SrvUsrNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): subnorms", "the values should be a list of users exportcfg =", "my_subcfg, cfgpath_abs): usr_roles = my_subcfg.get('roles', None) if usr_roles: for ur", "class ActionModule(ConfigNormalizerBaseMerger): def __init__(self, *args, **kwargs): super(ActionModule, self).__init__(ConfigRootNormalizer(self), *args, default_merge_vars=['gitlab_cfg_defaults'],", "= kwargs.setdefault('sub_normalizers', []) subnorms += [ ServerUsersNormalizer(pluginref), SrvRolesNormalizer(pluginref), ] super(SrvInstNormalizer,", "in usr_roles: user_role_to_cfg(my_subcfg['config']['username'], ur, self.get_parentcfg(cfg, cfgpath_abs, level=3) ) return my_subcfg", "*args, **kwargs ) @property def config_path(self): return [SUBDICT_METAKEY_ANY] class SrvRolesBaseNormalizer(NormalizerBase):", "def _handle_specifics_postsub(self, cfg, my_subcfg, cfgpath_abs): usr_roles = my_subcfg.get('roles', None) if", "cfgpath_abs): if not my_subcfg: return my_subcfg ## if it exists,", "*args, **kwargs ) def _handle_specifics_presub(self, cfg, my_subcfg, cfgpath_abs): # do", "class ServersNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): subnorms = kwargs.setdefault('sub_normalizers',", "SUBDICT_METAKEY_ANY, get_subdict from ansible_collections.smabot.base.plugins.module_utils.utils.utils import ansible_assert def user_role_to_cfg(username, urole, cfg):", "kwargs.setdefault('sub_normalizers', []) subnorms += [ SrvRoleInstNormalizer(pluginref), ] super(SrvSubRolesNormalizer, self).__init__( pluginref,", "address is explicitly given, check if mail # template is", "**kwargs): subnorms = kwargs.setdefault('sub_normalizers', []) subnorms += [ SrvRoleInstNormalizer(pluginref), ]", "setdefault_none, SUBDICT_METAKEY_ANY, get_subdict from ansible_collections.smabot.base.plugins.module_utils.utils.utils import ansible_assert def user_role_to_cfg(username, urole,", "**kwargs): super(SrvRoleInstNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def config_path(self):", "*args, **kwargs): self._add_defaultsetter(kwargs, 'random_pwlen', DefaultSetterConstant(80) ) subnorms = kwargs.setdefault('sub_normalizers', [])", "recursions of death (SrvSubRolesNormalizer, True), ] super(SrvRolesBaseNormalizer, self).__init__( pluginref, *args,", "super(SrvSubRolesNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def config_path(self): return", "super(ActionModule, self).__init__(ConfigRootNormalizer(self), *args, default_merge_vars=['gitlab_cfg_defaults'], extra_merge_vars_ans=['extra_gitlab_config_maps'], **kwargs ) self._supports_check_mode = False", "*args, **kwargs): subnorms = kwargs.setdefault('sub_normalizers', []) subnorms += [ SrvRolesMembersNormalizer(pluginref),", "+= [ ServerBotsNormalizer(pluginref), ServerHumansNormalizer(pluginref), ] super(ServerUsersNormalizer, self).__init__( pluginref, *args, **kwargs", "*args, **kwargs): subnorms = kwargs.setdefault('sub_normalizers', []) subnorms += [ ServerBotsNormalizer(pluginref),", "**kwargs): subnorms = kwargs.setdefault('sub_normalizers', []) subnorms += [ SrvUsrCfgNormalizer(pluginref), ]", "gitlab access levels (like guest or developer) and ## the", "super(ServerUsersNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def config_path(self): return", "*args, **kwargs ) @property def config_path(self): return ['roles'] class SrvSubRolesNormalizer(NormalizerBase):", "for recursive structures, the sub normalizers can only ## be", "cfgpath_abs) my_group = '/'.join(my_group['role_abspath']) for (k,ul) in iteritems(my_subcfg): for u", "usr_roles: user_role_to_cfg(my_subcfg['config']['username'], ur, self.get_parentcfg(cfg, cfgpath_abs, level=3) ) return my_subcfg class", "subnorms += [ SrvRolesMembersNormalizer(pluginref), ## note: for recursive structures, the", "super(SrvRolesNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def config_path(self): return", "ansible_collections.smabot.base.plugins.module_utils.plugins.config_normalizing.base import ConfigNormalizerBaseMerger, NormalizerBase, NormalizerNamed, DefaultSetterConstant, DefaultSetterOtherKey from ansible_collections.smabot.base.plugins.module_utils.utils.dicting import", "class ServerBotsNormalizer(ServerUsrBaseNormalizer): def __init__(self, pluginref, *args, **kwargs): super(ServerBotsNormalizer, self).__init__( pluginref,", "address with username as param tmp = self.get_parentcfg( cfg, cfgpath_abs,", "DefaultSetterOtherKey from ansible_collections.smabot.base.plugins.module_utils.utils.dicting import setdefault_none, SUBDICT_METAKEY_ANY, get_subdict from ansible_collections.smabot.base.plugins.module_utils.utils.utils import", "def config_path(self): return [SUBDICT_METAKEY_ANY] def _handle_specifics_postsub(self, cfg, my_subcfg, cfgpath_abs): usr_roles", "pluginref, *args, **kwargs): super(SrvRoleInstNormalizer, self).__init__( pluginref, *args, **kwargs ) @property", "default_merge_vars=['gitlab_cfg_defaults'], extra_merge_vars_ans=['extra_gitlab_config_maps'], **kwargs ) self._supports_check_mode = False self._supports_async = False", "with username as param tmp = self.get_parentcfg( cfg, cfgpath_abs, level=3", "return ['members'] def _handle_specifics_presub(self, cfg, my_subcfg, cfgpath_abs): if not my_subcfg:", "for ur in usr_roles: user_role_to_cfg(my_subcfg['config']['username'], ur, self.get_parentcfg(cfg, cfgpath_abs, level=3) )", "return [SUBDICT_METAKEY_ANY] def _handle_specifics_postsub(self, cfg, my_subcfg, cfgpath_abs): usr_roles = my_subcfg.get('roles',", "@property def config_path(self): return ['config'] @property def name_key(self): return 'username'", "__init__(self, pluginref, *args, **kwargs): super(SrvRoleInstNormalizer, self).__init__( pluginref, *args, **kwargs )", ") class ServerBotsNormalizer(ServerUsrBaseNormalizer): def __init__(self, pluginref, *args, **kwargs): super(ServerBotsNormalizer, self).__init__(", ") @property def config_path(self): return ['instances'] class SrvInstNormalizer(NormalizerBase): def __init__(self,", "SrvRolesMembersNormalizer(pluginref), ## note: for recursive structures, the sub normalizers can", "get_subdict from ansible_collections.smabot.base.plugins.module_utils.utils.utils import ansible_assert def user_role_to_cfg(username, urole, cfg): tmp", "return my_subcfg class ActionModule(ConfigNormalizerBaseMerger): def __init__(self, *args, **kwargs): super(ActionModule, self).__init__(ConfigRootNormalizer(self),", "for server, if so use this to # create address", "**kwargs ) def _handle_specifics_presub(self, cfg, my_subcfg, cfgpath_abs): # do config", ") @property def config_path(self): return ['members'] def _handle_specifics_presub(self, cfg, my_subcfg,", ") @property def config_path(self): return ['humans'] class SrvUsrNormalizer(NormalizerBase): def __init__(self,", "return 'username' def _handle_specifics_presub(self, cfg, my_subcfg, cfgpath_abs): mail = my_subcfg.get('email',", "exportcfg = [] my_group = self.get_parentcfg(cfg, cfgpath_abs) my_group = '/'.join(my_group['role_abspath'])", "my_subcfg class SrvRolesNormalizer(SrvRolesBaseNormalizer): def __init__(self, pluginref, *args, **kwargs): super(SrvRolesNormalizer, self).__init__(", "def config_path(self): return ['instances'] class SrvInstNormalizer(NormalizerBase): def __init__(self, pluginref, *args,", "config_path(self): return ['config'] @property def name_key(self): return 'username' def _handle_specifics_presub(self,", "@property def config_path(self): return ['bots'] class ServerHumansNormalizer(ServerUsrBaseNormalizer): def __init__(self, pluginref,", "= kwargs.setdefault('sub_normalizers', []) subnorms += [ SrvUsrCfgNormalizer(pluginref), ] self._add_defaultsetter(kwargs, 'pw_access',", "usr_roles = my_subcfg.get('roles', None) if usr_roles: for ur in usr_roles:", "from ansible_collections.smabot.base.plugins.module_utils.utils.dicting import setdefault_none, SUBDICT_METAKEY_ANY, get_subdict from ansible_collections.smabot.base.plugins.module_utils.utils.utils import ansible_assert", "def __init__(self, pluginref, *args, **kwargs): self._add_defaultsetter(kwargs, 'random_pwlen', DefaultSetterConstant(80) ) subnorms", "the corresponding key actually exists ## to avoid indefinite recursions", "**kwargs ) @property def config_path(self): return ['bots'] class ServerHumansNormalizer(ServerUsrBaseNormalizer): def", "setdefault_none(setdefault_none(tmp, 'members', {}), urole['level'], [] ).append(username) class ConfigRootNormalizer(NormalizerBase): def __init__(self,", "my_subcfg class ServerUsersNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): subnorms =", "not mail address is explicitly given, check if mail #", "ServerInstancesNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): subnorms = kwargs.setdefault('sub_normalizers', [])", "subnorms = kwargs.setdefault('sub_normalizers', []) subnorms += [ SrvUsrCfgNormalizer(pluginref), ] self._add_defaultsetter(kwargs,", "['instances'] class SrvInstNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): subnorms =", "__init__(self, pluginref, *args, **kwargs): super(SrvRolesNormalizer, self).__init__( pluginref, *args, **kwargs )", "**kwargs): self._add_defaultsetter(kwargs, 'random_pwlen', DefaultSetterConstant(80) ) subnorms = kwargs.setdefault('sub_normalizers', []) subnorms", "subnorms += [ ServersNormalizer(pluginref), ] super(ConfigRootNormalizer, self).__init__(pluginref, *args, **kwargs) class", "this to # create address with username as param tmp", "self).__init__( pluginref, *args, mapkey_lvl=-2, **kwargs ) self.default_setters['name'] = DefaultSetterOtherKey('username') @property", "self.get_parentcfg(cfg, cfgpath_abs, level=3) ) return my_subcfg class SrvUsrCfgNormalizer(NormalizerNamed): def __init__(self,", "check if mail # template is specified for server, if", "k }) my_subcfg['_exportcfg'] = exportcfg return my_subcfg class ServerUsersNormalizer(NormalizerBase): def", "self._supports_check_mode = False self._supports_async = False @property def my_ansvar(self): return", "print_function) __metaclass__ = type from ansible.errors import AnsibleOptionsError from ansible.module_utils.six", "pluginref, *args, **kwargs ) @property def config_path(self): return ['humans'] class", "super(ServerInstancesNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def config_path(self): return", "cfgpath_abs): mail = my_subcfg.get('email', None) if not mail: # if", "] super(ConfigRootNormalizer, self).__init__(pluginref, *args, **kwargs) class ServersNormalizer(NormalizerBase): def __init__(self, pluginref,", "## note: for recursive structures, the sub normalizers can only", "type(self).NORMER_CONFIG_PATH class SrvRoleInstNormalizer(SrvRolesBaseNormalizer): def __init__(self, pluginref, *args, **kwargs): super(SrvRoleInstNormalizer, self).__init__(", "tmp: my_subcfg['email'] = tmp.format( my_subcfg['username'].replace('_', '-') ) return my_subcfg class", "template is specified for server, if so use this to", "True), ] super(SrvRolesBaseNormalizer, self).__init__( pluginref, *args, **kwargs ) def _handle_specifics_presub(self,", "self.get_parentcfg(cfg, cfgpath_abs) my_group = '/'.join(my_group['role_abspath']) for (k,ul) in iteritems(my_subcfg): for", "level=3) ) return my_subcfg class SrvUsrCfgNormalizer(NormalizerNamed): def __init__(self, pluginref, *args,", "self).__init__( pluginref, *args, **kwargs ) @property def config_path(self): return ['instances']", "] self._add_defaultsetter(kwargs, 'pw_access', DefaultSetterConstant(True) ) super(SrvUsrNormalizer, self).__init__( pluginref, *args, **kwargs", "**kwargs ) self._supports_check_mode = False self._supports_async = False @property def", "**kwargs) class ServersNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): subnorms =", "**kwargs): super(SrvRolesMembersNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def config_path(self):", "config_path(self): return ['roles'] class SrvSubRolesNormalizer(NormalizerBase): NORMER_CONFIG_PATH = ['subroles'] def __init__(self,", "'/subroles/').split('/') tmp = get_subdict(cfg, tmp, default_empty=True) setdefault_none(setdefault_none(tmp, 'members', {}), urole['level'],", "of death (SrvSubRolesNormalizer, True), ] super(SrvRolesBaseNormalizer, self).__init__( pluginref, *args, **kwargs", "class ServerInstancesNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): subnorms = kwargs.setdefault('sub_normalizers',", "iteritems, string_types from ansible_collections.smabot.base.plugins.module_utils.plugins.config_normalizing.base import ConfigNormalizerBaseMerger, NormalizerBase, NormalizerNamed, DefaultSetterConstant, DefaultSetterOtherKey", ") self._supports_check_mode = False self._supports_async = False @property def my_ansvar(self):", "valid gitlab access levels (like guest or developer) and ##", "dict where the keys are ## valid gitlab access levels", "urole['level'], [] ).append(username) class ConfigRootNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs):", "[]) subnorms += [ ServerBotsNormalizer(pluginref), ServerHumansNormalizer(pluginref), ] super(ServerUsersNormalizer, self).__init__( pluginref,", "'members', {}), urole['level'], [] ).append(username) class ConfigRootNormalizer(NormalizerBase): def __init__(self, pluginref,", "class ServerUsersNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): subnorms = kwargs.setdefault('sub_normalizers',", "+= [ ServersNormalizer(pluginref), ] super(ConfigRootNormalizer, self).__init__(pluginref, *args, **kwargs) class ServersNormalizer(NormalizerBase):", "should be a dict where the keys are ## valid", "super(SrvUsrNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def config_path(self): return", "def __init__(self, pluginref, *args, **kwargs): super(SrvRolesMembersNormalizer, self).__init__( pluginref, *args, **kwargs", "class ConfigRootNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): self._add_defaultsetter(kwargs, 'random_pwlen', DefaultSetterConstant(80)", "= kwargs.setdefault('sub_normalizers', []) subnorms += [ ServerInstancesNormalizer(pluginref), ] super(ServersNormalizer, self).__init__(", "@property def config_path(self): return type(self).NORMER_CONFIG_PATH class SrvRoleInstNormalizer(SrvRolesBaseNormalizer): def __init__(self, pluginref,", "SrvUsrNormalizer(pluginref), ] super(ServerUsrBaseNormalizer, self).__init__( pluginref, *args, **kwargs ) class ServerBotsNormalizer(ServerUsrBaseNormalizer):", "subnorms = kwargs.setdefault('sub_normalizers', []) subnorms += [ SrvUsrNormalizer(pluginref), ] super(ServerUsrBaseNormalizer,", "my_group, 'gitlab_user': u, 'access_level': k }) my_subcfg['_exportcfg'] = exportcfg return", "**kwargs ) @property def config_path(self): return ['users'] class ServerUsrBaseNormalizer(NormalizerBase): def", "be a list of users exportcfg = [] my_group =", "subnorms += [ SrvUsrCfgNormalizer(pluginref), ] self._add_defaultsetter(kwargs, 'pw_access', DefaultSetterConstant(True) ) super(SrvUsrNormalizer,", "## to avoid indefinite recursions of death (SrvSubRolesNormalizer, True), ]", "return [SUBDICT_METAKEY_ANY] class SrvRolesBaseNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): subnorms", "= ['subroles'] def __init__(self, pluginref, *args, **kwargs): subnorms = kwargs.setdefault('sub_normalizers',", "= my_subcfg.get('email', None) if not mail: # if not mail", "= kwargs.setdefault('sub_normalizers', []) subnorms += [ SrvRolesMembersNormalizer(pluginref), ## note: for", "config_path(self): return type(self).NORMER_CONFIG_PATH class SrvRoleInstNormalizer(SrvRolesBaseNormalizer): def __init__(self, pluginref, *args, **kwargs):", "[]) subnorms += [ SrvRoleInstNormalizer(pluginref), ] super(SrvSubRolesNormalizer, self).__init__( pluginref, *args,", "def config_path(self): return [SUBDICT_METAKEY_ANY] class SrvRolesBaseNormalizer(NormalizerBase): def __init__(self, pluginref, *args,", "pluginref, *args, **kwargs ) @property def config_path(self): return [SUBDICT_METAKEY_ANY] def", "\\ + urole['path'].replace('/', '/subroles/').split('/') tmp = get_subdict(cfg, tmp, default_empty=True) setdefault_none(setdefault_none(tmp,", "*args, **kwargs ) @property def config_path(self): return ['members'] def _handle_specifics_presub(self,", "*args, **kwargs) class ServersNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): subnorms", "+= [ ServerUsersNormalizer(pluginref), SrvRolesNormalizer(pluginref), ] super(SrvInstNormalizer, self).__init__( pluginref, *args, **kwargs", "['humans'] class SrvUsrNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): subnorms =", "top level parent = [] else: ## subrole parent =", "pluginref, *args, **kwargs): subnorms = kwargs.setdefault('sub_normalizers', []) subnorms += [", "type from ansible.errors import AnsibleOptionsError from ansible.module_utils.six import iteritems, string_types", "] super(SrvInstNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def config_path(self):", "class ServerHumansNormalizer(ServerUsrBaseNormalizer): def __init__(self, pluginref, *args, **kwargs): super(ServerHumansNormalizer, self).__init__( pluginref,", "as param tmp = self.get_parentcfg( cfg, cfgpath_abs, level=3 ).get('mail_template', None)", "*args, **kwargs ) @property def config_path(self): return ['instances'] class SrvInstNormalizer(NormalizerBase):", "] super(ServerUsrBaseNormalizer, self).__init__( pluginref, *args, **kwargs ) class ServerBotsNormalizer(ServerUsrBaseNormalizer): def", "from __future__ import (absolute_import, division, print_function) __metaclass__ = type from", "do config subkey c = setdefault_none(my_subcfg, 'config', defval={}) setdefault_none(c, 'name',", "_handle_specifics_presub(self, cfg, my_subcfg, cfgpath_abs): if not my_subcfg: return my_subcfg ##", "def config_path(self): return type(self).NORMER_CONFIG_PATH class SrvRoleInstNormalizer(SrvRolesBaseNormalizer): def __init__(self, pluginref, *args,", "exists, members should be a dict where the keys are", "'name', defval=cfgpath_abs[-1]) # build role hierarchy path and parent if", "pluginref, *args, **kwargs ) @property def config_path(self): return ['roles'] class", "if not mail: # if not mail address is explicitly", "and ## the values should be a list of users", "## the values should be a list of users exportcfg", "[ SrvRolesMembersNormalizer(pluginref), ## note: for recursive structures, the sub normalizers", "if usr_roles: for ur in usr_roles: user_role_to_cfg(my_subcfg['config']['username'], ur, self.get_parentcfg(cfg, cfgpath_abs,", "*args, **kwargs): super(ServerHumansNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def", "parent = get_subdict(cfg, cfgpath_abs[:-2]) parent = parent['role_abspath'] my_subcfg['role_abspath'] = parent", "[] else: ## subrole parent = get_subdict(cfg, cfgpath_abs[:-2]) parent =", "and parent if cfgpath_abs[-1] == 'roles': ## top level parent", "= exportcfg return my_subcfg class ServerUsersNormalizer(NormalizerBase): def __init__(self, pluginref, *args,", "= kwargs.setdefault('sub_normalizers', []) subnorms += [ SrvUsrNormalizer(pluginref), ] super(ServerUsrBaseNormalizer, self).__init__(", "**kwargs ) @property def config_path(self): return [SUBDICT_METAKEY_ANY] def _handle_specifics_postsub(self, cfg,", "mail address is explicitly given, check if mail # template", "*args, **kwargs): super(SrvRolesMembersNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def", "ServerHumansNormalizer(ServerUsrBaseNormalizer): def __init__(self, pluginref, *args, **kwargs): super(ServerHumansNormalizer, self).__init__( pluginref, *args,", "__init__(self, pluginref, *args, **kwargs): super(SrvUsrCfgNormalizer, self).__init__( pluginref, *args, mapkey_lvl=-2, **kwargs", "__metaclass__ = type from ansible.errors import AnsibleOptionsError from ansible.module_utils.six import", "cfg, my_subcfg, cfgpath_abs): mail = my_subcfg.get('email', None) if not mail:", "return ['users'] class ServerUsrBaseNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): subnorms", "## subrole parent = get_subdict(cfg, cfgpath_abs[:-2]) parent = parent['role_abspath'] my_subcfg['role_abspath']", "mapkey_lvl=-2, **kwargs ) self.default_setters['name'] = DefaultSetterOtherKey('username') @property def config_path(self): return", "SrvRolesNormalizer(pluginref), ] super(SrvInstNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def", "*args, **kwargs ) class ServerBotsNormalizer(ServerUsrBaseNormalizer): def __init__(self, pluginref, *args, **kwargs):", "*args, **kwargs): super(SrvUsrCfgNormalizer, self).__init__( pluginref, *args, mapkey_lvl=-2, **kwargs ) self.default_setters['name']", "['roles', 'subroles'] \\ + urole['path'].replace('/', '/subroles/').split('/') tmp = get_subdict(cfg, tmp,", "(k,ul) in iteritems(my_subcfg): for u in ul: exportcfg.append({ 'gitlab_group': my_group,", "ansible_collections.smabot.base.plugins.module_utils.utils.dicting import setdefault_none, SUBDICT_METAKEY_ANY, get_subdict from ansible_collections.smabot.base.plugins.module_utils.utils.utils import ansible_assert def", "def config_path(self): return ['users'] class ServerUsrBaseNormalizer(NormalizerBase): def __init__(self, pluginref, *args,", "SrvUsrCfgNormalizer(pluginref), ] self._add_defaultsetter(kwargs, 'pw_access', DefaultSetterConstant(True) ) super(SrvUsrNormalizer, self).__init__( pluginref, *args,", "**kwargs): super(ServerBotsNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def config_path(self):", "ServerUsrBaseNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): subnorms = kwargs.setdefault('sub_normalizers', [])", "# template is specified for server, if so use this", "kwargs.setdefault('sub_normalizers', []) subnorms += [ ServerUsersNormalizer(pluginref), SrvRolesNormalizer(pluginref), ] super(SrvInstNormalizer, self).__init__(", "class SrvUsrNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): subnorms = kwargs.setdefault('sub_normalizers',", "cfgpath_abs[:-2]) parent = parent['role_abspath'] my_subcfg['role_abspath'] = parent + [c['name']] c['parent']", "ConfigRootNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): self._add_defaultsetter(kwargs, 'random_pwlen', DefaultSetterConstant(80) )", "= get_subdict(cfg, cfgpath_abs[:-2]) parent = parent['role_abspath'] my_subcfg['role_abspath'] = parent +", "['members'] def _handle_specifics_presub(self, cfg, my_subcfg, cfgpath_abs): if not my_subcfg: return", "cfg, my_subcfg, cfgpath_abs): usr_roles = my_subcfg.get('roles', None) if usr_roles: for", "pluginref, *args, **kwargs ) @property def config_path(self): return ['instances'] class", "subnorms = kwargs.setdefault('sub_normalizers', []) subnorms += [ ServerUsersNormalizer(pluginref), SrvRolesNormalizer(pluginref), ]", "def __init__(self, pluginref, *args, **kwargs): super(ServerBotsNormalizer, self).__init__( pluginref, *args, **kwargs", "super(SrvUsrCfgNormalizer, self).__init__( pluginref, *args, mapkey_lvl=-2, **kwargs ) self.default_setters['name'] = DefaultSetterOtherKey('username')", "self).__init__( pluginref, *args, **kwargs ) @property def config_path(self): return ['roles']", "**kwargs ) self.default_setters['name'] = DefaultSetterOtherKey('username') @property def config_path(self): return ['config']", "@property def config_path(self): return ['servers'] class ServerInstancesNormalizer(NormalizerBase): def __init__(self, pluginref,", "**kwargs ) @property def config_path(self): return [SUBDICT_METAKEY_ANY] class SrvRolesBaseNormalizer(NormalizerBase): def", "defval={}) setdefault_none(c, 'name', defval=cfgpath_abs[-1]) # build role hierarchy path and", "def __init__(self, pluginref, *args, **kwargs): super(SrvRoleInstNormalizer, self).__init__( pluginref, *args, **kwargs", "None) if not mail: # if not mail address is", "server, if so use this to # create address with", "u in ul: exportcfg.append({ 'gitlab_group': my_group, 'gitlab_user': u, 'access_level': k", "}) my_subcfg['_exportcfg'] = exportcfg return my_subcfg class ServerUsersNormalizer(NormalizerBase): def __init__(self,", "parent = parent['role_abspath'] my_subcfg['role_abspath'] = parent + [c['name']] c['parent'] =", "**kwargs): subnorms = kwargs.setdefault('sub_normalizers', []) subnorms += [ ServerUsersNormalizer(pluginref), SrvRolesNormalizer(pluginref),", "given, check if mail # template is specified for server,", "['servers'] class ServerInstancesNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): subnorms =", "urole, cfg): tmp = ['roles', 'subroles'] \\ + urole['path'].replace('/', '/subroles/').split('/')", "my_subcfg: return my_subcfg ## if it exists, members should be", "= kwargs.setdefault('sub_normalizers', []) subnorms += [ ServersNormalizer(pluginref), ] super(ConfigRootNormalizer, self).__init__(pluginref,", "pluginref, *args, **kwargs ) @property def config_path(self): return ['members'] def", "AnsibleOptionsError from ansible.module_utils.six import iteritems, string_types from ansible_collections.smabot.base.plugins.module_utils.plugins.config_normalizing.base import ConfigNormalizerBaseMerger,", "*args, **kwargs): subnorms = kwargs.setdefault('sub_normalizers', []) subnorms += [ SrvUsrCfgNormalizer(pluginref),", "subnorms += [ SrvInstNormalizer(pluginref), ] super(ServerInstancesNormalizer, self).__init__( pluginref, *args, **kwargs", "mail: # if not mail address is explicitly given, check", "# do config subkey c = setdefault_none(my_subcfg, 'config', defval={}) setdefault_none(c,", "list of users exportcfg = [] my_group = self.get_parentcfg(cfg, cfgpath_abs)", "'access_level': k }) my_subcfg['_exportcfg'] = exportcfg return my_subcfg class ServerUsersNormalizer(NormalizerBase):", ") @property def config_path(self): return [SUBDICT_METAKEY_ANY] class SrvRolesMembersNormalizer(NormalizerBase): def __init__(self,", "self).__init__( pluginref, *args, **kwargs ) @property def config_path(self): return [SUBDICT_METAKEY_ANY]", "[]) subnorms += [ SrvUsrNormalizer(pluginref), ] super(ServerUsrBaseNormalizer, self).__init__( pluginref, *args,", "a dict where the keys are ## valid gitlab access", "[]) subnorms += [ ServerUsersNormalizer(pluginref), SrvRolesNormalizer(pluginref), ] super(SrvInstNormalizer, self).__init__( pluginref,", "= parent + [c['name']] c['parent'] = '/'.join(parent) return my_subcfg class", "so use this to # create address with username as", "sub normalizers can only ## be instantiated if the corresponding", "name_key(self): return 'username' def _handle_specifics_presub(self, cfg, my_subcfg, cfgpath_abs): mail =", "[SUBDICT_METAKEY_ANY] class SrvRolesBaseNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): subnorms =", "corresponding key actually exists ## to avoid indefinite recursions of", "class SrvRoleInstNormalizer(SrvRolesBaseNormalizer): def __init__(self, pluginref, *args, **kwargs): super(SrvRoleInstNormalizer, self).__init__( pluginref,", "c = setdefault_none(my_subcfg, 'config', defval={}) setdefault_none(c, 'name', defval=cfgpath_abs[-1]) # build", "pluginref, *args, **kwargs): self._add_defaultsetter(kwargs, 'random_pwlen', DefaultSetterConstant(80) ) subnorms = kwargs.setdefault('sub_normalizers',", "= tmp.format( my_subcfg['username'].replace('_', '-') ) return my_subcfg class ActionModule(ConfigNormalizerBaseMerger): def", "[SUBDICT_METAKEY_ANY] class SrvRolesMembersNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): super(SrvRolesMembersNormalizer, self).__init__(", "## if it exists, members should be a dict where", "my_subcfg.get('email', None) if not mail: # if not mail address", "level=3 ).get('mail_template', None) if tmp: my_subcfg['email'] = tmp.format( my_subcfg['username'].replace('_', '-')", "@property def config_path(self): return [SUBDICT_METAKEY_ANY] class SrvRolesBaseNormalizer(NormalizerBase): def __init__(self, pluginref,", "hierarchy path and parent if cfgpath_abs[-1] == 'roles': ## top", "role hierarchy path and parent if cfgpath_abs[-1] == 'roles': ##", "] super(SrvRolesBaseNormalizer, self).__init__( pluginref, *args, **kwargs ) def _handle_specifics_presub(self, cfg,", "from ansible_collections.smabot.base.plugins.module_utils.utils.utils import ansible_assert def user_role_to_cfg(username, urole, cfg): tmp =", "['config'] @property def name_key(self): return 'username' def _handle_specifics_presub(self, cfg, my_subcfg,", "*args, **kwargs): subnorms = kwargs.setdefault('sub_normalizers', []) subnorms += [ ServerInstancesNormalizer(pluginref),", "def _handle_specifics_presub(self, cfg, my_subcfg, cfgpath_abs): if not my_subcfg: return my_subcfg", "(absolute_import, division, print_function) __metaclass__ = type from ansible.errors import AnsibleOptionsError", "*args, default_merge_vars=['gitlab_cfg_defaults'], extra_merge_vars_ans=['extra_gitlab_config_maps'], **kwargs ) self._supports_check_mode = False self._supports_async =", "= kwargs.setdefault('sub_normalizers', []) subnorms += [ SrvRoleInstNormalizer(pluginref), ] super(SrvSubRolesNormalizer, self).__init__(", "# if not mail address is explicitly given, check if", "*args, **kwargs): super(SrvRolesNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def", "+ urole['path'].replace('/', '/subroles/').split('/') tmp = get_subdict(cfg, tmp, default_empty=True) setdefault_none(setdefault_none(tmp, 'members',", ") super(SrvUsrNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def config_path(self):", "__init__(self, *args, **kwargs): super(ActionModule, self).__init__(ConfigRootNormalizer(self), *args, default_merge_vars=['gitlab_cfg_defaults'], extra_merge_vars_ans=['extra_gitlab_config_maps'], **kwargs )", "kwargs.setdefault('sub_normalizers', []) subnorms += [ SrvInstNormalizer(pluginref), ] super(ServerInstancesNormalizer, self).__init__( pluginref,", "ActionModule(ConfigNormalizerBaseMerger): def __init__(self, *args, **kwargs): super(ActionModule, self).__init__(ConfigRootNormalizer(self), *args, default_merge_vars=['gitlab_cfg_defaults'], extra_merge_vars_ans=['extra_gitlab_config_maps'],", "from ansible.module_utils.six import iteritems, string_types from ansible_collections.smabot.base.plugins.module_utils.plugins.config_normalizing.base import ConfigNormalizerBaseMerger, NormalizerBase,", "@property def config_path(self): return ['roles'] class SrvSubRolesNormalizer(NormalizerBase): NORMER_CONFIG_PATH = ['subroles']", "values should be a list of users exportcfg = []", "*args, **kwargs ) @property def config_path(self): return ['humans'] class SrvUsrNormalizer(NormalizerBase):", "'roles': ## top level parent = [] else: ## subrole", "use this to # create address with username as param", "my_subcfg['username'].replace('_', '-') ) return my_subcfg class ActionModule(ConfigNormalizerBaseMerger): def __init__(self, *args,", "tmp = self.get_parentcfg( cfg, cfgpath_abs, level=3 ).get('mail_template', None) if tmp:", "SrvRolesNormalizer(SrvRolesBaseNormalizer): def __init__(self, pluginref, *args, **kwargs): super(SrvRolesNormalizer, self).__init__( pluginref, *args,", "[]) subnorms += [ SrvRolesMembersNormalizer(pluginref), ## note: for recursive structures,", "parent if cfgpath_abs[-1] == 'roles': ## top level parent =", "param tmp = self.get_parentcfg( cfg, cfgpath_abs, level=3 ).get('mail_template', None) if", "**kwargs): super(ActionModule, self).__init__(ConfigRootNormalizer(self), *args, default_merge_vars=['gitlab_cfg_defaults'], extra_merge_vars_ans=['extra_gitlab_config_maps'], **kwargs ) self._supports_check_mode =", "structures, the sub normalizers can only ## be instantiated if", "**kwargs ) @property def config_path(self): return ['humans'] class SrvUsrNormalizer(NormalizerBase): def", "pluginref, *args, **kwargs ) @property def config_path(self): return [SUBDICT_METAKEY_ANY] class", "extra_merge_vars_ans=['extra_gitlab_config_maps'], **kwargs ) self._supports_check_mode = False self._supports_async = False @property", "pluginref, *args, **kwargs): super(ServerBotsNormalizer, self).__init__( pluginref, *args, **kwargs ) @property", "return my_subcfg class ServerUsersNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): subnorms", "if not mail address is explicitly given, check if mail", "cfgpath_abs): usr_roles = my_subcfg.get('roles', None) if usr_roles: for ur in", "setdefault_none(my_subcfg, 'config', defval={}) setdefault_none(c, 'name', defval=cfgpath_abs[-1]) # build role hierarchy", "super(ServerBotsNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def config_path(self): return", "= self.get_parentcfg(cfg, cfgpath_abs) my_group = '/'.join(my_group['role_abspath']) for (k,ul) in iteritems(my_subcfg):", "if the corresponding key actually exists ## to avoid indefinite", "def __init__(self, pluginref, *args, **kwargs): super(ServerHumansNormalizer, self).__init__( pluginref, *args, **kwargs", "= False self._supports_async = False @property def my_ansvar(self): return 'gitlab_cfg'", "*args, mapkey_lvl=-2, **kwargs ) self.default_setters['name'] = DefaultSetterOtherKey('username') @property def config_path(self):", "None) if usr_roles: for ur in usr_roles: user_role_to_cfg(my_subcfg['config']['username'], ur, self.get_parentcfg(cfg,", "[ ServerBotsNormalizer(pluginref), ServerHumansNormalizer(pluginref), ] super(ServerUsersNormalizer, self).__init__( pluginref, *args, **kwargs )", "# create address with username as param tmp = self.get_parentcfg(", "can only ## be instantiated if the corresponding key actually", ") @property def config_path(self): return ['roles'] class SrvSubRolesNormalizer(NormalizerBase): NORMER_CONFIG_PATH =", "from ansible_collections.smabot.base.plugins.module_utils.plugins.config_normalizing.base import ConfigNormalizerBaseMerger, NormalizerBase, NormalizerNamed, DefaultSetterConstant, DefaultSetterOtherKey from ansible_collections.smabot.base.plugins.module_utils.utils.dicting", "the keys are ## valid gitlab access levels (like guest", "my_subcfg class SrvUsrCfgNormalizer(NormalizerNamed): def __init__(self, pluginref, *args, **kwargs): super(SrvUsrCfgNormalizer, self).__init__(", "# build role hierarchy path and parent if cfgpath_abs[-1] ==", "= parent['role_abspath'] my_subcfg['role_abspath'] = parent + [c['name']] c['parent'] = '/'.join(parent)", "instantiated if the corresponding key actually exists ## to avoid", "[]) subnorms += [ SrvUsrCfgNormalizer(pluginref), ] self._add_defaultsetter(kwargs, 'pw_access', DefaultSetterConstant(True) )", "**kwargs ) @property def config_path(self): return ['roles'] class SrvSubRolesNormalizer(NormalizerBase): NORMER_CONFIG_PATH", "my_subcfg['role_abspath'] = parent + [c['name']] c['parent'] = '/'.join(parent) return my_subcfg", "u, 'access_level': k }) my_subcfg['_exportcfg'] = exportcfg return my_subcfg class", "the sub normalizers can only ## be instantiated if the", "to avoid indefinite recursions of death (SrvSubRolesNormalizer, True), ] super(SrvRolesBaseNormalizer,", "= '/'.join(my_group['role_abspath']) for (k,ul) in iteritems(my_subcfg): for u in ul:", "is specified for server, if so use this to #", "return ['servers'] class ServerInstancesNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): subnorms", "**kwargs ) @property def config_path(self): return ['instances'] class SrvInstNormalizer(NormalizerBase): def", "pluginref, *args, mapkey_lvl=-2, **kwargs ) self.default_setters['name'] = DefaultSetterOtherKey('username') @property def", "*args, **kwargs): super(SrvRoleInstNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def", "= setdefault_none(my_subcfg, 'config', defval={}) setdefault_none(c, 'name', defval=cfgpath_abs[-1]) # build role", "class SrvInstNormalizer(NormalizerBase): def __init__(self, pluginref, *args, **kwargs): subnorms = kwargs.setdefault('sub_normalizers',", "super(SrvRolesBaseNormalizer, self).__init__( pluginref, *args, **kwargs ) def _handle_specifics_presub(self, cfg, my_subcfg,", "+= [ SrvRoleInstNormalizer(pluginref), ] super(SrvSubRolesNormalizer, self).__init__( pluginref, *args, **kwargs )", "## top level parent = [] else: ## subrole parent", "self).__init__( pluginref, *args, **kwargs ) class ServerBotsNormalizer(ServerUsrBaseNormalizer): def __init__(self, pluginref,", "mail # template is specified for server, if so use", "return my_subcfg ## if it exists, members should be a", "ur, self.get_parentcfg(cfg, cfgpath_abs, level=3) ) return my_subcfg class SrvUsrCfgNormalizer(NormalizerNamed): def", "super(ConfigRootNormalizer, self).__init__(pluginref, *args, **kwargs) class ServersNormalizer(NormalizerBase): def __init__(self, pluginref, *args,", "**kwargs): super(SrvRolesNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def config_path(self):", "ConfigNormalizerBaseMerger, NormalizerBase, NormalizerNamed, DefaultSetterConstant, DefaultSetterOtherKey from ansible_collections.smabot.base.plugins.module_utils.utils.dicting import setdefault_none, SUBDICT_METAKEY_ANY,", "super(SrvRoleInstNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def config_path(self): return", "in iteritems(my_subcfg): for u in ul: exportcfg.append({ 'gitlab_group': my_group, 'gitlab_user':", ") @property def config_path(self): return ['bots'] class ServerHumansNormalizer(ServerUsrBaseNormalizer): def __init__(self,", "parent['role_abspath'] my_subcfg['role_abspath'] = parent + [c['name']] c['parent'] = '/'.join(parent) return", "_handle_specifics_presub(self, cfg, my_subcfg, cfgpath_abs): # do config subkey c =", "= ['roles', 'subroles'] \\ + urole['path'].replace('/', '/subroles/').split('/') tmp = get_subdict(cfg,", "**kwargs): subnorms = kwargs.setdefault('sub_normalizers', []) subnorms += [ SrvUsrNormalizer(pluginref), ]", "cfgpath_abs): # do config subkey c = setdefault_none(my_subcfg, 'config', defval={})", "ServerBotsNormalizer(ServerUsrBaseNormalizer): def __init__(self, pluginref, *args, **kwargs): super(ServerBotsNormalizer, self).__init__( pluginref, *args,", "DefaultSetterConstant(True) ) super(SrvUsrNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def", "_handle_specifics_presub(self, cfg, my_subcfg, cfgpath_abs): mail = my_subcfg.get('email', None) if not", "build role hierarchy path and parent if cfgpath_abs[-1] == 'roles':", "*args, **kwargs ) @property def config_path(self): return ['users'] class ServerUsrBaseNormalizer(NormalizerBase):", "SrvRoleInstNormalizer(SrvRolesBaseNormalizer): def __init__(self, pluginref, *args, **kwargs): super(SrvRoleInstNormalizer, self).__init__( pluginref, *args,", "{}), urole['level'], [] ).append(username) class ConfigRootNormalizer(NormalizerBase): def __init__(self, pluginref, *args,", "if it exists, members should be a dict where the", "super(ServerUsrBaseNormalizer, self).__init__( pluginref, *args, **kwargs ) class ServerBotsNormalizer(ServerUsrBaseNormalizer): def __init__(self,", "my_subcfg['email'] = tmp.format( my_subcfg['username'].replace('_', '-') ) return my_subcfg class ActionModule(ConfigNormalizerBaseMerger):", "**kwargs): subnorms = kwargs.setdefault('sub_normalizers', []) subnorms += [ SrvRolesMembersNormalizer(pluginref), ##", "user_role_to_cfg(username, urole, cfg): tmp = ['roles', 'subroles'] \\ + urole['path'].replace('/',", "actually exists ## to avoid indefinite recursions of death (SrvSubRolesNormalizer,", "my_subcfg, cfgpath_abs): if not my_subcfg: return my_subcfg ## if it", "subnorms += [ ServerBotsNormalizer(pluginref), ServerHumansNormalizer(pluginref), ] super(ServerUsersNormalizer, self).__init__( pluginref, *args,", "SrvInstNormalizer(pluginref), ] super(ServerInstancesNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def", "['bots'] class ServerHumansNormalizer(ServerUsrBaseNormalizer): def __init__(self, pluginref, *args, **kwargs): super(ServerHumansNormalizer, self).__init__(", "config subkey c = setdefault_none(my_subcfg, 'config', defval={}) setdefault_none(c, 'name', defval=cfgpath_abs[-1])", "def __init__(self, *args, **kwargs): super(ActionModule, self).__init__(ConfigRootNormalizer(self), *args, default_merge_vars=['gitlab_cfg_defaults'], extra_merge_vars_ans=['extra_gitlab_config_maps'], **kwargs", "= DefaultSetterOtherKey('username') @property def config_path(self): return ['config'] @property def name_key(self):", "'/'.join(my_group['role_abspath']) for (k,ul) in iteritems(my_subcfg): for u in ul: exportcfg.append({", "ServerUsersNormalizer(pluginref), SrvRolesNormalizer(pluginref), ] super(SrvInstNormalizer, self).__init__( pluginref, *args, **kwargs ) @property", "tmp.format( my_subcfg['username'].replace('_', '-') ) return my_subcfg class ActionModule(ConfigNormalizerBaseMerger): def __init__(self,", "normalizers can only ## be instantiated if the corresponding key", "*args, **kwargs ) @property def config_path(self): return type(self).NORMER_CONFIG_PATH class SrvRoleInstNormalizer(SrvRolesBaseNormalizer):", "SrvRoleInstNormalizer(pluginref), ] super(SrvSubRolesNormalizer, self).__init__( pluginref, *args, **kwargs ) @property def", "tmp = ['roles', 'subroles'] \\ + urole['path'].replace('/', '/subroles/').split('/') tmp =", "[]) subnorms += [ ServerInstancesNormalizer(pluginref), ] super(ServersNormalizer, self).__init__( pluginref, *args," ]
[ "conexao com DynamoDB\") logging.CRITICAL(\"Problema na conexao com DynamoDB\") return False", "DyConnect: def __init__(self, table, region): self.table = table self.region =", "Dynamodb \"\"\" import boto3 import botocore import logging import datetime", "'details' in stages['Item']: return stages['Item']['details'] return False def get_sharedlibrary_release(): newtemplate", "def connect(self): try: dydb = boto3.resource('dynamodb', region_name=self.region) conn = dydb.Table(self.table)", "dados): conn = self.connect() if conn: retorno = conn.put_item(Item=dados) def", "query): conn = self.connect() return conn.get_item(Key=query) def get_dy_template(template_name): newtemplate =", "print(\"Problema na conexao com DynamoDB\") logging.CRITICAL(\"Problema na conexao com DynamoDB\")", "conn: retorno = conn.put_item(Item=dados) def dynamodb_query(self, query): conn = self.connect()", "return stages['Item']['details'] return False def get_sharedlibrary_release(): newtemplate = DyConnect(dynamodb['template'], aws_region)", "de integração com o Dynamodb \"\"\" import boto3 import botocore", "= region def connect(self): try: dydb = boto3.resource('dynamodb', region_name=self.region) conn", "get_sharedlibrary_release(): newtemplate = DyConnect(dynamodb['template'], aws_region) query = {'name': 'sharedlibrary'} version", "= DyConnect(dynamodb['template'], aws_region) query = {'name': 'sharedlibrary'} version = newtemplate.dynamodb_query(query)", "def dynamodb_query(self, query): conn = self.connect() return conn.get_item(Key=query) def get_dy_template(template_name):", "self.table = table self.region = region def connect(self): try: dydb", "time import os class DyConnect: def __init__(self, table, region): self.table", "= self.connect() return conn.get_item(Key=query) def get_dy_template(template_name): newtemplate = DyConnect(dynamodb['template'], aws_region)", "self.connect() return conn.get_item(Key=query) def get_dy_template(template_name): newtemplate = DyConnect(dynamodb['template'], aws_region) query", "import copy import time import os class DyConnect: def __init__(self,", "<filename>microservices/validate/tools/dynamodb.py \"\"\" Tools de integração com o Dynamodb \"\"\" import", "False def get_sharedlibrary_release(): newtemplate = DyConnect(dynamodb['template'], aws_region) query = {'name':", "__init__(self, table, region): self.table = table self.region = region def", "def dynamodb_save(self, dados): conn = self.connect() if conn: retorno =", "class DyConnect: def __init__(self, table, region): self.table = table self.region", "\"\"\" import boto3 import botocore import logging import datetime import", "def __init__(self, table, region): self.table = table self.region = region", "connect(self): try: dydb = boto3.resource('dynamodb', region_name=self.region) conn = dydb.Table(self.table) return", "in stages: if 'details' in stages['Item']: return stages['Item']['details'] return False", "datetime import json import copy import time import os class", "import botocore import logging import datetime import json import copy", "in stages['Item']: return stages['Item']['details'] return False def get_sharedlibrary_release(): newtemplate =", "conn.put_item(Item=dados) def dynamodb_query(self, query): conn = self.connect() return conn.get_item(Key=query) def", "\"\"\" Tools de integração com o Dynamodb \"\"\" import boto3", "aws_region) query = {'name': template_name} stages = newtemplate.dynamodb_query(query) if 'Item'", "os class DyConnect: def __init__(self, table, region): self.table = table", "self.region = region def connect(self): try: dydb = boto3.resource('dynamodb', region_name=self.region)", "return conn.get_item(Key=query) def get_dy_template(template_name): newtemplate = DyConnect(dynamodb['template'], aws_region) query =", "if 'details' in stages['Item']: return stages['Item']['details'] return False def get_sharedlibrary_release():", "com DynamoDB\") return False def dynamodb_save(self, dados): conn = self.connect()", "import boto3 import botocore import logging import datetime import json", "stages['Item']: return stages['Item']['details'] return False def get_sharedlibrary_release(): newtemplate = DyConnect(dynamodb['template'],", "region): self.table = table self.region = region def connect(self): try:", "dynamodb_query(self, query): conn = self.connect() return conn.get_item(Key=query) def get_dy_template(template_name): newtemplate", "stages = newtemplate.dynamodb_query(query) if 'Item' in stages: if 'details' in", "com DynamoDB\") logging.CRITICAL(\"Problema na conexao com DynamoDB\") return False def", "def get_sharedlibrary_release(): newtemplate = DyConnect(dynamodb['template'], aws_region) query = {'name': 'sharedlibrary'}", "com o Dynamodb \"\"\" import boto3 import botocore import logging", "query = {'name': 'sharedlibrary'} version = newtemplate.dynamodb_query(query) if 'Item' in", "import os class DyConnect: def __init__(self, table, region): self.table =", "conn = self.connect() if conn: retorno = conn.put_item(Item=dados) def dynamodb_query(self,", "self.connect() if conn: retorno = conn.put_item(Item=dados) def dynamodb_query(self, query): conn", "na conexao com DynamoDB\") logging.CRITICAL(\"Problema na conexao com DynamoDB\") return", "newtemplate = DyConnect(dynamodb['template'], aws_region) query = {'name': 'sharedlibrary'} version =", "o Dynamodb \"\"\" import boto3 import botocore import logging import", "= {'name': template_name} stages = newtemplate.dynamodb_query(query) if 'Item' in stages:", "def get_dy_template(template_name): newtemplate = DyConnect(dynamodb['template'], aws_region) query = {'name': template_name}", "'sharedlibrary'} version = newtemplate.dynamodb_query(query) if 'Item' in version: return version['Item']['release']", "retorno = conn.put_item(Item=dados) def dynamodb_query(self, query): conn = self.connect() return", "get_dy_template(template_name): newtemplate = DyConnect(dynamodb['template'], aws_region) query = {'name': template_name} stages", "query = {'name': template_name} stages = newtemplate.dynamodb_query(query) if 'Item' in", "import logging import datetime import json import copy import time", "aws_region) query = {'name': 'sharedlibrary'} version = newtemplate.dynamodb_query(query) if 'Item'", "na conexao com DynamoDB\") return False def dynamodb_save(self, dados): conn", "return False def get_sharedlibrary_release(): newtemplate = DyConnect(dynamodb['template'], aws_region) query =", "conn = dydb.Table(self.table) return conn except: print(\"Problema na conexao com", "logging import datetime import json import copy import time import", "= DyConnect(dynamodb['template'], aws_region) query = {'name': template_name} stages = newtemplate.dynamodb_query(query)", "table, region): self.table = table self.region = region def connect(self):", "= boto3.resource('dynamodb', region_name=self.region) conn = dydb.Table(self.table) return conn except: print(\"Problema", "Tools de integração com o Dynamodb \"\"\" import boto3 import", "import datetime import json import copy import time import os", "newtemplate = DyConnect(dynamodb['template'], aws_region) query = {'name': template_name} stages =", "logging.CRITICAL(\"Problema na conexao com DynamoDB\") return False def dynamodb_save(self, dados):", "False def dynamodb_save(self, dados): conn = self.connect() if conn: retorno", "= conn.put_item(Item=dados) def dynamodb_query(self, query): conn = self.connect() return conn.get_item(Key=query)", "if 'Item' in stages: if 'details' in stages['Item']: return stages['Item']['details']", "conn.get_item(Key=query) def get_dy_template(template_name): newtemplate = DyConnect(dynamodb['template'], aws_region) query = {'name':", "conn = self.connect() return conn.get_item(Key=query) def get_dy_template(template_name): newtemplate = DyConnect(dynamodb['template'],", "if conn: retorno = conn.put_item(Item=dados) def dynamodb_query(self, query): conn =", "version = newtemplate.dynamodb_query(query) if 'Item' in version: return version['Item']['release'] return", "json import copy import time import os class DyConnect: def", "region def connect(self): try: dydb = boto3.resource('dynamodb', region_name=self.region) conn =", "DynamoDB\") logging.CRITICAL(\"Problema na conexao com DynamoDB\") return False def dynamodb_save(self,", "= dydb.Table(self.table) return conn except: print(\"Problema na conexao com DynamoDB\")", "DyConnect(dynamodb['template'], aws_region) query = {'name': 'sharedlibrary'} version = newtemplate.dynamodb_query(query) if", "botocore import logging import datetime import json import copy import", "dynamodb_save(self, dados): conn = self.connect() if conn: retorno = conn.put_item(Item=dados)", "{'name': 'sharedlibrary'} version = newtemplate.dynamodb_query(query) if 'Item' in version: return", "stages['Item']['details'] return False def get_sharedlibrary_release(): newtemplate = DyConnect(dynamodb['template'], aws_region) query", "region_name=self.region) conn = dydb.Table(self.table) return conn except: print(\"Problema na conexao", "template_name} stages = newtemplate.dynamodb_query(query) if 'Item' in stages: if 'details'", "table self.region = region def connect(self): try: dydb = boto3.resource('dynamodb',", "DyConnect(dynamodb['template'], aws_region) query = {'name': template_name} stages = newtemplate.dynamodb_query(query) if", "DynamoDB\") return False def dynamodb_save(self, dados): conn = self.connect() if", "copy import time import os class DyConnect: def __init__(self, table,", "= self.connect() if conn: retorno = conn.put_item(Item=dados) def dynamodb_query(self, query):", "stages: if 'details' in stages['Item']: return stages['Item']['details'] return False def", "boto3 import botocore import logging import datetime import json import", "= newtemplate.dynamodb_query(query) if 'Item' in version: return version['Item']['release'] return False", "dydb.Table(self.table) return conn except: print(\"Problema na conexao com DynamoDB\") logging.CRITICAL(\"Problema", "conexao com DynamoDB\") return False def dynamodb_save(self, dados): conn =", "return conn except: print(\"Problema na conexao com DynamoDB\") logging.CRITICAL(\"Problema na", "except: print(\"Problema na conexao com DynamoDB\") logging.CRITICAL(\"Problema na conexao com", "integração com o Dynamodb \"\"\" import boto3 import botocore import", "import json import copy import time import os class DyConnect:", "try: dydb = boto3.resource('dynamodb', region_name=self.region) conn = dydb.Table(self.table) return conn", "conn except: print(\"Problema na conexao com DynamoDB\") logging.CRITICAL(\"Problema na conexao", "'Item' in stages: if 'details' in stages['Item']: return stages['Item']['details'] return", "{'name': template_name} stages = newtemplate.dynamodb_query(query) if 'Item' in stages: if", "import time import os class DyConnect: def __init__(self, table, region):", "= table self.region = region def connect(self): try: dydb =", "return False def dynamodb_save(self, dados): conn = self.connect() if conn:", "newtemplate.dynamodb_query(query) if 'Item' in stages: if 'details' in stages['Item']: return", "= {'name': 'sharedlibrary'} version = newtemplate.dynamodb_query(query) if 'Item' in version:", "dydb = boto3.resource('dynamodb', region_name=self.region) conn = dydb.Table(self.table) return conn except:", "boto3.resource('dynamodb', region_name=self.region) conn = dydb.Table(self.table) return conn except: print(\"Problema na", "= newtemplate.dynamodb_query(query) if 'Item' in stages: if 'details' in stages['Item']:" ]
[ "scrapy_ddiy.utils.spiders.ddiy_base import DdiyBaseSpider class GlidedSky001Spider(DdiyBaseSpider): name = 'glided_sky_001' description =", "爬虫-基础1' start_url = 'http://www.glidedsky.com/level/web/crawler-basic-1' custom_settings = { 'COOKIES_ENABLED': True, 'DOWNLOADER_MIDDLEWARES':", "description = 'GlidedSky 爬虫-基础1' start_url = 'http://www.glidedsky.com/level/web/crawler-basic-1' custom_settings = {", "from scrapy import Request from scrapy_ddiy.utils.spiders.ddiy_base import DdiyBaseSpider class GlidedSky001Spider(DdiyBaseSpider):", "= 'GlidedSky 爬虫-基础1' start_url = 'http://www.glidedsky.com/level/web/crawler-basic-1' custom_settings = { 'COOKIES_ENABLED':", "callback=self.parse) def parse(self, response, **kwargs): all_number = [int(i) for i", "class GlidedSky001Spider(DdiyBaseSpider): name = 'glided_sky_001' description = 'GlidedSky 爬虫-基础1' start_url", "import Request from scrapy_ddiy.utils.spiders.ddiy_base import DdiyBaseSpider class GlidedSky001Spider(DdiyBaseSpider): name =", "'http://www.glidedsky.com/level/web/crawler-basic-1' custom_settings = { 'COOKIES_ENABLED': True, 'DOWNLOADER_MIDDLEWARES': { 'scrapy_ddiy.spiders.GlidedSky.glided_sky_downloadmiddleware.GlidedSkyMiddleware': 589,", "'GlidedSky 爬虫-基础1' start_url = 'http://www.glidedsky.com/level/web/crawler-basic-1' custom_settings = { 'COOKIES_ENABLED': True,", "'COOKIES_ENABLED': True, 'DOWNLOADER_MIDDLEWARES': { 'scrapy_ddiy.spiders.GlidedSky.glided_sky_downloadmiddleware.GlidedSkyMiddleware': 589, }, } def start_requests(self):", "True, 'DOWNLOADER_MIDDLEWARES': { 'scrapy_ddiy.spiders.GlidedSky.glided_sky_downloadmiddleware.GlidedSkyMiddleware': 589, }, } def start_requests(self): yield", "} def start_requests(self): yield Request(url=self.start_url, callback=self.parse) def parse(self, response, **kwargs):", "}, } def start_requests(self): yield Request(url=self.start_url, callback=self.parse) def parse(self, response,", "'glided_sky_001' description = 'GlidedSky 爬虫-基础1' start_url = 'http://www.glidedsky.com/level/web/crawler-basic-1' custom_settings =", "for i in response.xpath('//div[@class=\"card-body\"]//div[@class=\"col-md-1\"]/text()').getall()] self.logger.info(f'Sum or web number is {sum(all_number)}')", "all_number = [int(i) for i in response.xpath('//div[@class=\"card-body\"]//div[@class=\"col-md-1\"]/text()').getall()] self.logger.info(f'Sum or web", "from scrapy_ddiy.utils.spiders.ddiy_base import DdiyBaseSpider class GlidedSky001Spider(DdiyBaseSpider): name = 'glided_sky_001' description", "import DdiyBaseSpider class GlidedSky001Spider(DdiyBaseSpider): name = 'glided_sky_001' description = 'GlidedSky", "utf-8 -*- from scrapy import Request from scrapy_ddiy.utils.spiders.ddiy_base import DdiyBaseSpider", "start_url = 'http://www.glidedsky.com/level/web/crawler-basic-1' custom_settings = { 'COOKIES_ENABLED': True, 'DOWNLOADER_MIDDLEWARES': {", "-*- coding: utf-8 -*- from scrapy import Request from scrapy_ddiy.utils.spiders.ddiy_base", "DdiyBaseSpider class GlidedSky001Spider(DdiyBaseSpider): name = 'glided_sky_001' description = 'GlidedSky 爬虫-基础1'", "def start_requests(self): yield Request(url=self.start_url, callback=self.parse) def parse(self, response, **kwargs): all_number", "coding: utf-8 -*- from scrapy import Request from scrapy_ddiy.utils.spiders.ddiy_base import", "-*- from scrapy import Request from scrapy_ddiy.utils.spiders.ddiy_base import DdiyBaseSpider class", "name = 'glided_sky_001' description = 'GlidedSky 爬虫-基础1' start_url = 'http://www.glidedsky.com/level/web/crawler-basic-1'", "start_requests(self): yield Request(url=self.start_url, callback=self.parse) def parse(self, response, **kwargs): all_number =", "parse(self, response, **kwargs): all_number = [int(i) for i in response.xpath('//div[@class=\"card-body\"]//div[@class=\"col-md-1\"]/text()').getall()]", "'DOWNLOADER_MIDDLEWARES': { 'scrapy_ddiy.spiders.GlidedSky.glided_sky_downloadmiddleware.GlidedSkyMiddleware': 589, }, } def start_requests(self): yield Request(url=self.start_url,", "= { 'COOKIES_ENABLED': True, 'DOWNLOADER_MIDDLEWARES': { 'scrapy_ddiy.spiders.GlidedSky.glided_sky_downloadmiddleware.GlidedSkyMiddleware': 589, }, }", "yield Request(url=self.start_url, callback=self.parse) def parse(self, response, **kwargs): all_number = [int(i)", "**kwargs): all_number = [int(i) for i in response.xpath('//div[@class=\"card-body\"]//div[@class=\"col-md-1\"]/text()').getall()] self.logger.info(f'Sum or", "= [int(i) for i in response.xpath('//div[@class=\"card-body\"]//div[@class=\"col-md-1\"]/text()').getall()] self.logger.info(f'Sum or web number", "= 'http://www.glidedsky.com/level/web/crawler-basic-1' custom_settings = { 'COOKIES_ENABLED': True, 'DOWNLOADER_MIDDLEWARES': { 'scrapy_ddiy.spiders.GlidedSky.glided_sky_downloadmiddleware.GlidedSkyMiddleware':", "custom_settings = { 'COOKIES_ENABLED': True, 'DOWNLOADER_MIDDLEWARES': { 'scrapy_ddiy.spiders.GlidedSky.glided_sky_downloadmiddleware.GlidedSkyMiddleware': 589, },", "GlidedSky001Spider(DdiyBaseSpider): name = 'glided_sky_001' description = 'GlidedSky 爬虫-基础1' start_url =", "def parse(self, response, **kwargs): all_number = [int(i) for i in", "Request(url=self.start_url, callback=self.parse) def parse(self, response, **kwargs): all_number = [int(i) for", "589, }, } def start_requests(self): yield Request(url=self.start_url, callback=self.parse) def parse(self,", "'scrapy_ddiy.spiders.GlidedSky.glided_sky_downloadmiddleware.GlidedSkyMiddleware': 589, }, } def start_requests(self): yield Request(url=self.start_url, callback=self.parse) def", "[int(i) for i in response.xpath('//div[@class=\"card-body\"]//div[@class=\"col-md-1\"]/text()').getall()] self.logger.info(f'Sum or web number is", "{ 'COOKIES_ENABLED': True, 'DOWNLOADER_MIDDLEWARES': { 'scrapy_ddiy.spiders.GlidedSky.glided_sky_downloadmiddleware.GlidedSkyMiddleware': 589, }, } def", "# -*- coding: utf-8 -*- from scrapy import Request from", "Request from scrapy_ddiy.utils.spiders.ddiy_base import DdiyBaseSpider class GlidedSky001Spider(DdiyBaseSpider): name = 'glided_sky_001'", "{ 'scrapy_ddiy.spiders.GlidedSky.glided_sky_downloadmiddleware.GlidedSkyMiddleware': 589, }, } def start_requests(self): yield Request(url=self.start_url, callback=self.parse)", "scrapy import Request from scrapy_ddiy.utils.spiders.ddiy_base import DdiyBaseSpider class GlidedSky001Spider(DdiyBaseSpider): name", "response, **kwargs): all_number = [int(i) for i in response.xpath('//div[@class=\"card-body\"]//div[@class=\"col-md-1\"]/text()').getall()] self.logger.info(f'Sum", "= 'glided_sky_001' description = 'GlidedSky 爬虫-基础1' start_url = 'http://www.glidedsky.com/level/web/crawler-basic-1' custom_settings" ]
[ "attribute is \"shape\" and there are three possible shapes, then", "\"iid\" or \"comp\" dataset_dir: The base directory of the dataset", "2.0 (the \"License\"); # you may not use this file", "instructions for reading 2D shapes data. Args: split_name: A train/test", "For example, if the first attribute is \"shape\" and there", "the Quantitative Analysis of Deep Belief Networks.'' In Proceedings of", "None or dataset_dir == '': dataset_dir = _DATASET_DIR # Load", "and placed around one of four locations/regions in the canvas).", "an augmented version of the MNIST dataset which contains the", "limitations under the License. r\"\"\"Provides data for the mnist with", "of the MNIST dataset which contains the following attributes: 1.", "not in _SPLITS_TO_SIZES: raise ValueError('split name %s was not recognized.'", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "location is a gaussian placed at four quadrants of the", "__future__ import division from __future__ import print_function import os import", "and 0.9 +- 0.1 repsectively. 3. Orientation: we vary orientation", "tf.FixedLenFeature([len(num_classes_per_attribute)], tf.int64), } # TODO(vrama): See # https://github.com/tensorflow/tensorflow/blob/master/tensorflow/contrib/slim/python/slim/data/tfexample_decoder.py#L270 # For", "in the following paper: Salakhutdinov, Ruslan, and <NAME>. 2008. ``On", "+30 +- 10 and -30 +-10. On a third of", "num_classes_per_attribute = _NUM_CLASSES_PER_ATTRIBUTE if dataset_dir is None or dataset_dir ==", "four quadrants of the canvas. 2. Scale (We vary scale", "sampling actual values from gaussians at +30 +- 10 and", "is \"shape\" and there are three possible shapes, then then", "(We vary scale from 0.4 to 1.0), with two gaussians", "a value 3 in the first index, and so on.", "file. label_map_json = os.path.join(dataset_dir, 'attribute_label_map.json') file_pattern = os.path.join(dataset_dir, _FILE_PATTERN %", "of the occasions we dont orient the digit at all", "import print_function import os import tensorflow as tf from tensorflow.contrib.slim.python.slim.data", "split_name) if split_type is not \"iid\": raise ValueError(\"Only IID split", "to preprocess the images which # get loaded. items_to_handlers =", "at four quadrants of the canvas. 2. Scale (We vary", "Orientation: we vary orientation from -90 to +90 degrees, sampling", "use this file except in compliance with the License. #", "'image/labels': tf.FixedLenFeature([len(num_classes_per_attribute)], tf.int64), } # TODO(vrama): See # https://github.com/tensorflow/tensorflow/blob/master/tensorflow/contrib/slim/python/slim/data/tfexample_decoder.py#L270 #", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "of 0 degrees. The original data after transformations is binarized", "reading 2D shapes data. Args: split_name: A train/test split name.", "License. # You may obtain a copy of the License", "from datasets.celeba.image_decoder import ImageDecodeProcess # Only provides option to load", "from tensorflow.contrib.slim.python.slim.data import tfexample_decoder from datasets.celeba.image_decoder import ImageDecodeProcess # Only", "under the License is distributed on an \"AS IS\" BASIS,", "License for the specific language governing permissions and # limitations", "num_classes_per_attribute: The number of labels for the classfication problem corresponding", "{ 'image': ImageDecodeProcess(shape=[218, 178, 3], image_length=64), 'labels': tfexample_decoder.Tensor('image/labels'), } decoder", "the following attributes: 1. Location (digits are translated on a", "would need to be made to preprocess the images which", "'image/encoded': tf.FixedLenFeature((), tf.string, default_value=''), 'image/format': tf.FixedLenFeature((), tf.string, default_value='raw'), 'image/labels': tf.FixedLenFeature([len(num_classes_per_attribute)],", "about the dataset we just constructed. Raises: ValueError: if `split_name`", "0.9 +- 0.1 repsectively. 3. Orientation: we vary orientation from", "option to load the binarized version of the dataset. _FILE_PATTERN", "binarized version of the dataset. _FILE_PATTERN = '%s-*' _SPLIT_TYPE =", "+-10. On a third of the occasions we dont orient", "transformations is binarized as per the procedure described in the", "as per the procedure described in the following paper: Salakhutdinov,", "x 3] RGB image.', 'labels': 'Attributes corresponding to the image.',", "'Attributes corresponding to the image.', } _NUM_CLASSES_PER_ATTRIBUTE = tuple([2]*18) def", "from __future__ import absolute_import from __future__ import division from __future__", "in compliance with the License. # You may obtain a", "+- 0.1 repsectively. 3. Orientation: we vary orientation from -90", "gaussians at +30 +- 10 and -30 +-10. On a", "tuple with instructions for reading 2D shapes data. Args: split_name:", "dataset which contains the following attributes: 1. Location (digits are", "software # distributed under the License is distributed on an", "actual values from gaussians at +30 +- 10 and -30", "2. Scale (We vary scale from 0.4 to 1.0), with", "from tensorflow.contrib.slim.python.slim.data import dataset from tensorflow.contrib.slim.python.slim.data import tfexample_decoder from datasets.celeba.image_decoder", "tf.FixedLenFeature((), tf.string, default_value=''), 'image/format': tf.FixedLenFeature((), tf.string, default_value='raw'), 'image/labels': tf.FixedLenFeature([len(num_classes_per_attribute)], tf.int64),", "default_value='raw'), 'image/labels': tf.FixedLenFeature([len(num_classes_per_attribute)], tf.int64), } # TODO(vrama): See # https://github.com/tensorflow/tensorflow/blob/master/tensorflow/contrib/slim/python/slim/data/tfexample_decoder.py#L270", "split_type=\"iid\", dataset_dir=None, image_length=64, num_classes_per_attribute=None): \"\"\"Gets a dataset tuple with instructions", "the first attribute is \"shape\" and there are three possible", "attributes: 1. Location (digits are translated on a canvas and", "0.4 to 1.0), with two gaussians placed at 0.5 +-", "problem corresponding to each attribute. For example, if the first", "a third of the occasions we dont orient the digit", "1. Location (digits are translated on a canvas and placed", "0 degrees. The original data after transformations is binarized as", "the classfication problem corresponding to each attribute. For example, if", "map file. label_map_json = os.path.join(dataset_dir, 'attribute_label_map.json') file_pattern = os.path.join(dataset_dir, _FILE_PATTERN", "the images which # get loaded. items_to_handlers = { 'image':", "split_type is not \"iid\": raise ValueError(\"Only IID split available for", "_ITEMS_TO_DESCRIPTIONS = { 'image': 'A [218 x 178 x 3]", "_FILE_PATTERN = '%s-*' _SPLIT_TYPE = 'iid' _DATASET_DIR = '/srv/share/datasets/celeba_for_tf_ig' _SPLITS_TO_SIZES", "image.', } _NUM_CLASSES_PER_ATTRIBUTE = tuple([2]*18) def get_split(split_name='train', split_type=\"iid\", dataset_dir=None, image_length=64,", "of the canvas. 2. Scale (We vary scale from 0.4", "Deep Belief Networks.'' In Proceedings of the 25th International Conference", "contains the following attributes: 1. Location (digits are translated on", "dataset_dir = _DATASET_DIR # Load attribute label map file. label_map_json", "= _DATASET_DIR # Load attribute label map file. label_map_json =", "+- 10 and -30 +-10. On a third of the", "base directory of the dataset sources. num_classes_per_attribute: The number of", "default_value=''), 'image/format': tf.FixedLenFeature((), tf.string, default_value='raw'), 'image/labels': tf.FixedLenFeature([len(num_classes_per_attribute)], tf.int64), } #", "IID split available for CelebA.\") if num_classes_per_attribute is None: num_classes_per_attribute", "of the dataset. _FILE_PATTERN = '%s-*' _SPLIT_TYPE = 'iid' _DATASET_DIR", "for the classfication problem corresponding to each attribute. For example,", "2008. ``On the Quantitative Analysis of Deep Belief Networks.'' In", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "'image': ImageDecodeProcess(shape=[218, 178, 3], image_length=64), 'labels': tfexample_decoder.Tensor('image/labels'), } decoder =", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "are translated on a canvas and placed around one of", "changes would need to be made to preprocess the images", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "label_map_json = os.path.join(dataset_dir, 'attribute_label_map.json') file_pattern = os.path.join(dataset_dir, _FILE_PATTERN % split_name)", "_DATASET_DIR # Load attribute label map file. label_map_json = os.path.join(dataset_dir,", "to in writing, software # distributed under the License is", "we dont orient the digit at all which means a", "For where changes would need to be made to preprocess", "is a gaussian placed at four quadrants of the canvas.", "there are three possible shapes, then then provide a value", "with some metadata about the dataset we just constructed. Raises:", "# See the License for the specific language governing permissions", "% (file_pattern)) keys_to_features = { 'image/encoded': tf.FixedLenFeature((), tf.string, default_value=''), 'image/format':", "'': dataset_dir = _DATASET_DIR # Load attribute label map file.", "gaussian placed at four quadrants of the canvas. 2. Scale", "_NUM_CLASSES_PER_ATTRIBUTE = tuple([2]*18) def get_split(split_name='train', split_type=\"iid\", dataset_dir=None, image_length=64, num_classes_per_attribute=None): \"\"\"Gets", "metadata about the dataset we just constructed. Raises: ValueError: if", "from %s file.' % (file_pattern)) keys_to_features = { 'image/encoded': tf.FixedLenFeature((),", "or agreed to in writing, software # distributed under the", "required by applicable law or agreed to in writing, software", "file.' % (file_pattern)) keys_to_features = { 'image/encoded': tf.FixedLenFeature((), tf.string, default_value=''),", "to the image.', } _NUM_CLASSES_PER_ATTRIBUTE = tuple([2]*18) def get_split(split_name='train', split_type=\"iid\",", "A dictionary with some metadata about the dataset we just", "(file_pattern)) keys_to_features = { 'image/encoded': tf.FixedLenFeature((), tf.string, default_value=''), 'image/format': tf.FixedLenFeature((),", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "for reading 2D shapes data. Args: split_name: A train/test split", "with the License. # You may obtain a copy of", "ImageDecodeProcess(shape=[218, 178, 3], image_length=64), 'labels': tfexample_decoder.Tensor('image/labels'), } decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,", "just constructed. Raises: ValueError: if `split_name` is not a valid", "3] RGB image.', 'labels': 'Attributes corresponding to the image.', }", "in _SPLITS_TO_SIZES: raise ValueError('split name %s was not recognized.' %", "= '/srv/share/datasets/celeba_for_tf_ig' _SPLITS_TO_SIZES = {'train': 162770, 'val': 19867, 'test': 19962}", "is None or dataset_dir == '': dataset_dir = _DATASET_DIR #", "} _NUM_CLASSES_PER_ATTRIBUTE = tuple([2]*18) def get_split(split_name='train', split_type=\"iid\", dataset_dir=None, image_length=64, num_classes_per_attribute=None):", "a dataset tuple with instructions for reading 2D shapes data.", "os.path.join(dataset_dir, 'attribute_label_map.json') file_pattern = os.path.join(dataset_dir, _FILE_PATTERN % split_name) tf.logging.info('Loading from", "_SPLITS_TO_SIZES = {'train': 162770, 'val': 19867, 'test': 19962} _ITEMS_TO_DESCRIPTIONS =", "A `Dataset` namedtuple. metadata: A dictionary with some metadata about", "a canvas and placed around one of four locations/regions in", "num_classes_per_attribute is None: num_classes_per_attribute = _NUM_CLASSES_PER_ATTRIBUTE if dataset_dir is None", "compliance with the License. # You may obtain a copy", "`split_name` is not a valid train/test split. \"\"\" if split_name", "agreed to in writing, software # distributed under the License", "image_length=64), 'labels': tfexample_decoder.Tensor('image/labels'), } decoder = tfexample_decoder.TFExampleDecoder(keys_to_features, items_to_handlers) metadata =", "tensorflow as tf from tensorflow.contrib.slim.python.slim.data import dataset from tensorflow.contrib.slim.python.slim.data import", "tuple([2]*18) def get_split(split_name='train', split_type=\"iid\", dataset_dir=None, image_length=64, num_classes_per_attribute=None): \"\"\"Gets a dataset", "+90 degrees, sampling actual values from gaussians at +30 +-", "distributed under the License is distributed on an \"AS IS\"", "vary orientation from -90 to +90 degrees, sampling actual values", "made to preprocess the images which # get loaded. items_to_handlers", "is not \"iid\": raise ValueError(\"Only IID split available for CelebA.\")", "0.1 repsectively. 3. Orientation: we vary orientation from -90 to", "version of the dataset. _FILE_PATTERN = '%s-*' _SPLIT_TYPE = 'iid'", "tf.logging.info('Loading from %s file.' % (file_pattern)) keys_to_features = { 'image/encoded':", "_SPLIT_TYPE, 'label_map_json': label_map_json, } return dataset.Dataset( data_sources=file_pattern, reader=tf.TFRecordReader, decoder=decoder, num_samples=_SPLITS_TO_SIZES[split_name],", "import division from __future__ import print_function import os import tensorflow", "express or implied. # See the License for the specific", "except in compliance with the License. # You may obtain", "third of the occasions we dont orient the digit at", "r\"\"\"Provides data for the mnist with attributes dataset. Provide data", "locations/regions in the canvas). Each location is a gaussian placed", "19962} _ITEMS_TO_DESCRIPTIONS = { 'image': 'A [218 x 178 x", "constructed. Raises: ValueError: if `split_name` is not a valid train/test", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "placed at four quadrants of the canvas. 2. Scale (We", "per the procedure described in the following paper: Salakhutdinov, Ruslan,", "not use this file except in compliance with the License.", "0.1 and 0.9 +- 0.1 repsectively. 3. Orientation: we vary", "_DATASET_DIR = '/srv/share/datasets/celeba_for_tf_ig' _SPLITS_TO_SIZES = {'train': 162770, 'val': 19867, 'test':", "on Machine Learning, 872-79. Author: vrama@ \"\"\" from __future__ import", "a valid train/test split. \"\"\" if split_name not in _SPLITS_TO_SIZES:", "_NUM_CLASSES_PER_ATTRIBUTE if dataset_dir is None or dataset_dir == '': dataset_dir", "being loaded \"iid\" or \"comp\" dataset_dir: The base directory of", "tfexample_decoder.TFExampleDecoder(keys_to_features, items_to_handlers) metadata = { 'num_classes_per_attribute': num_classes_per_attribute, 'split_type': _SPLIT_TYPE, 'label_map_json':", "print_function import os import tensorflow as tf from tensorflow.contrib.slim.python.slim.data import", "valid train/test split. \"\"\" if split_name not in _SPLITS_TO_SIZES: raise", "writing, software # distributed under the License is distributed on", "sources. num_classes_per_attribute: The number of labels for the classfication problem", "you may not use this file except in compliance with", "the License. r\"\"\"Provides data for the mnist with attributes dataset.", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "import ImageDecodeProcess # Only provides option to load the binarized", "get loaded. items_to_handlers = { 'image': ImageDecodeProcess(shape=[218, 178, 3], image_length=64),", "= { 'image/encoded': tf.FixedLenFeature((), tf.string, default_value=''), 'image/format': tf.FixedLenFeature((), tf.string, default_value='raw'),", "<NAME>. 2008. ``On the Quantitative Analysis of Deep Belief Networks.''", "split_name: A train/test split name. split_type: str, type of split", "or dataset_dir == '': dataset_dir = _DATASET_DIR # Load attribute", "preprocess the images which # get loaded. items_to_handlers = {", "at +30 +- 10 and -30 +-10. On a third", "canvas. 2. Scale (We vary scale from 0.4 to 1.0),", "file_pattern = os.path.join(dataset_dir, _FILE_PATTERN % split_name) tf.logging.info('Loading from %s file.'", "import tfexample_decoder from datasets.celeba.image_decoder import ImageDecodeProcess # Only provides option", "CONDITIONS OF ANY KIND, either express or implied. # See", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "load the binarized version of the dataset. _FILE_PATTERN = '%s-*'", "placed at 0.5 +- 0.1 and 0.9 +- 0.1 repsectively.", "attributes dataset. Provide data loading utilities for an augmented version", "for an augmented version of the MNIST dataset which contains", "at all which means a rotation of 0 degrees. The", "tf from tensorflow.contrib.slim.python.slim.data import dataset from tensorflow.contrib.slim.python.slim.data import tfexample_decoder from", "tfexample_decoder from datasets.celeba.image_decoder import ImageDecodeProcess # Only provides option to", "A train/test split name. split_type: str, type of split being", "attribute label map file. label_map_json = os.path.join(dataset_dir, 'attribute_label_map.json') file_pattern =", "os.path.join(dataset_dir, _FILE_PATTERN % split_name) tf.logging.info('Loading from %s file.' % (file_pattern))", "Returns: A `Dataset` namedtuple. metadata: A dictionary with some metadata", "'label_map_json': label_map_json, } return dataset.Dataset( data_sources=file_pattern, reader=tf.TFRecordReader, decoder=decoder, num_samples=_SPLITS_TO_SIZES[split_name], items_to_descriptions=_ITEMS_TO_DESCRIPTIONS),", "orient the digit at all which means a rotation of", "25th International Conference on Machine Learning, 872-79. Author: vrama@ \"\"\"", "Copyright 2017 Google Inc. # # Licensed under the Apache", "the image.', } _NUM_CLASSES_PER_ATTRIBUTE = tuple([2]*18) def get_split(split_name='train', split_type=\"iid\", dataset_dir=None,", "License. r\"\"\"Provides data for the mnist with attributes dataset. Provide", "keys_to_features = { 'image/encoded': tf.FixedLenFeature((), tf.string, default_value=''), 'image/format': tf.FixedLenFeature((), tf.string,", "the MNIST dataset which contains the following attributes: 1. Location", "as tf from tensorflow.contrib.slim.python.slim.data import dataset from tensorflow.contrib.slim.python.slim.data import tfexample_decoder", "OR CONDITIONS OF ANY KIND, either express or implied. #", "is None: num_classes_per_attribute = _NUM_CLASSES_PER_ATTRIBUTE if dataset_dir is None or", "Only provides option to load the binarized version of the", "the License is distributed on an \"AS IS\" BASIS, #", "are three possible shapes, then then provide a value 3", "from gaussians at +30 +- 10 and -30 +-10. On", "The base directory of the dataset sources. num_classes_per_attribute: The number", "not a valid train/test split. \"\"\" if split_name not in", "augmented version of the MNIST dataset which contains the following", "'split_type': _SPLIT_TYPE, 'label_map_json': label_map_json, } return dataset.Dataset( data_sources=file_pattern, reader=tf.TFRecordReader, decoder=decoder,", "train/test split. \"\"\" if split_name not in _SPLITS_TO_SIZES: raise ValueError('split", "of labels for the classfication problem corresponding to each attribute.", "Conference on Machine Learning, 872-79. Author: vrama@ \"\"\" from __future__", "CelebA.\") if num_classes_per_attribute is None: num_classes_per_attribute = _NUM_CLASSES_PER_ATTRIBUTE if dataset_dir", "the 25th International Conference on Machine Learning, 872-79. Author: vrama@", "on a canvas and placed around one of four locations/regions", "orientation from -90 to +90 degrees, sampling actual values from", "value 3 in the first index, and so on. Returns:", "vrama@ \"\"\" from __future__ import absolute_import from __future__ import division", "None: num_classes_per_attribute = _NUM_CLASSES_PER_ATTRIBUTE if dataset_dir is None or dataset_dir", "law or agreed to in writing, software # distributed under", "\"shape\" and there are three possible shapes, then then provide", "data. Args: split_name: A train/test split name. split_type: str, type", "version of the MNIST dataset which contains the following attributes:", "\"\"\" from __future__ import absolute_import from __future__ import division from", "from 0.4 to 1.0), with two gaussians placed at 0.5", "labels for the classfication problem corresponding to each attribute. For", "https://github.com/tensorflow/tensorflow/blob/master/tensorflow/contrib/slim/python/slim/data/tfexample_decoder.py#L270 # For where changes would need to be made", "# https://github.com/tensorflow/tensorflow/blob/master/tensorflow/contrib/slim/python/slim/data/tfexample_decoder.py#L270 # For where changes would need to be", "num_classes_per_attribute, 'split_type': _SPLIT_TYPE, 'label_map_json': label_map_json, } return dataset.Dataset( data_sources=file_pattern, reader=tf.TFRecordReader,", "not recognized.' % split_name) if split_type is not \"iid\": raise", "items_to_handlers) metadata = { 'num_classes_per_attribute': num_classes_per_attribute, 'split_type': _SPLIT_TYPE, 'label_map_json': label_map_json,", "data for the mnist with attributes dataset. Provide data loading", "under the License. r\"\"\"Provides data for the mnist with attributes", "get_split(split_name='train', split_type=\"iid\", dataset_dir=None, image_length=64, num_classes_per_attribute=None): \"\"\"Gets a dataset tuple with", "be made to preprocess the images which # get loaded.", "dataset. Provide data loading utilities for an augmented version of", "two gaussians placed at 0.5 +- 0.1 and 0.9 +-", "Salakhutdinov, Ruslan, and <NAME>. 2008. ``On the Quantitative Analysis of", "on. Returns: A `Dataset` namedtuple. metadata: A dictionary with some", "name. split_type: str, type of split being loaded \"iid\" or", "the mnist with attributes dataset. Provide data loading utilities for", "'image': 'A [218 x 178 x 3] RGB image.', 'labels':", "index, and so on. Returns: A `Dataset` namedtuple. metadata: A", "import os import tensorflow as tf from tensorflow.contrib.slim.python.slim.data import dataset", "of split being loaded \"iid\" or \"comp\" dataset_dir: The base", "may obtain a copy of the License at # #", "\"iid\": raise ValueError(\"Only IID split available for CelebA.\") if num_classes_per_attribute", "import dataset from tensorflow.contrib.slim.python.slim.data import tfexample_decoder from datasets.celeba.image_decoder import ImageDecodeProcess", "if `split_name` is not a valid train/test split. \"\"\" if", "In Proceedings of the 25th International Conference on Machine Learning,", "loaded. items_to_handlers = { 'image': ImageDecodeProcess(shape=[218, 178, 3], image_length=64), 'labels':", "'attribute_label_map.json') file_pattern = os.path.join(dataset_dir, _FILE_PATTERN % split_name) tf.logging.info('Loading from %s", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "Inc. # # Licensed under the Apache License, Version 2.0", "or \"comp\" dataset_dir: The base directory of the dataset sources.", "possible shapes, then then provide a value 3 in the", "Each location is a gaussian placed at four quadrants of", "with attributes dataset. Provide data loading utilities for an augmented", "may not use this file except in compliance with the", "then then provide a value 3 in the first index,", "3 in the first index, and so on. Returns: A", "three possible shapes, then then provide a value 3 in", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "this file except in compliance with the License. # You", "in the canvas). Each location is a gaussian placed at", "10 and -30 +-10. On a third of the occasions", "procedure described in the following paper: Salakhutdinov, Ruslan, and <NAME>.", "the occasions we dont orient the digit at all which", "import absolute_import from __future__ import division from __future__ import print_function", "to each attribute. For example, if the first attribute is", "if split_name not in _SPLITS_TO_SIZES: raise ValueError('split name %s was", "'A [218 x 178 x 3] RGB image.', 'labels': 'Attributes", "= tfexample_decoder.TFExampleDecoder(keys_to_features, items_to_handlers) metadata = { 'num_classes_per_attribute': num_classes_per_attribute, 'split_type': _SPLIT_TYPE,", "+- 0.1 and 0.9 +- 0.1 repsectively. 3. Orientation: we", "'iid' _DATASET_DIR = '/srv/share/datasets/celeba_for_tf_ig' _SPLITS_TO_SIZES = {'train': 162770, 'val': 19867,", "split_name not in _SPLITS_TO_SIZES: raise ValueError('split name %s was not", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "{'train': 162770, 'val': 19867, 'test': 19962} _ITEMS_TO_DESCRIPTIONS = { 'image':", "and <NAME>. 2008. ``On the Quantitative Analysis of Deep Belief", "# # Licensed under the Apache License, Version 2.0 (the", "image_length=64, num_classes_per_attribute=None): \"\"\"Gets a dataset tuple with instructions for reading", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "the following paper: Salakhutdinov, Ruslan, and <NAME>. 2008. ``On the", "_FILE_PATTERN % split_name) tf.logging.info('Loading from %s file.' % (file_pattern)) keys_to_features", "decoder = tfexample_decoder.TFExampleDecoder(keys_to_features, items_to_handlers) metadata = { 'num_classes_per_attribute': num_classes_per_attribute, 'split_type':", "tensorflow.contrib.slim.python.slim.data import dataset from tensorflow.contrib.slim.python.slim.data import tfexample_decoder from datasets.celeba.image_decoder import", "images which # get loaded. items_to_handlers = { 'image': ImageDecodeProcess(shape=[218,", "values from gaussians at +30 +- 10 and -30 +-10.", "``On the Quantitative Analysis of Deep Belief Networks.'' In Proceedings", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "each attribute. For example, if the first attribute is \"shape\"", "str, type of split being loaded \"iid\" or \"comp\" dataset_dir:", "= _NUM_CLASSES_PER_ATTRIBUTE if dataset_dir is None or dataset_dir == '':", "need to be made to preprocess the images which #", "so on. Returns: A `Dataset` namedtuple. metadata: A dictionary with", "quadrants of the canvas. 2. Scale (We vary scale from", "split being loaded \"iid\" or \"comp\" dataset_dir: The base directory", "\"comp\" dataset_dir: The base directory of the dataset sources. num_classes_per_attribute:", "tf.FixedLenFeature((), tf.string, default_value='raw'), 'image/labels': tf.FixedLenFeature([len(num_classes_per_attribute)], tf.int64), } # TODO(vrama): See", "to +90 degrees, sampling actual values from gaussians at +30", "Networks.'' In Proceedings of the 25th International Conference on Machine", "then provide a value 3 in the first index, and", "tf.int64), } # TODO(vrama): See # https://github.com/tensorflow/tensorflow/blob/master/tensorflow/contrib/slim/python/slim/data/tfexample_decoder.py#L270 # For where", "first attribute is \"shape\" and there are three possible shapes,", "which # get loaded. items_to_handlers = { 'image': ImageDecodeProcess(shape=[218, 178,", "_SPLITS_TO_SIZES: raise ValueError('split name %s was not recognized.' % split_name)", "# limitations under the License. r\"\"\"Provides data for the mnist", "of the 25th International Conference on Machine Learning, 872-79. Author:", "Ruslan, and <NAME>. 2008. ``On the Quantitative Analysis of Deep", "items_to_handlers = { 'image': ImageDecodeProcess(shape=[218, 178, 3], image_length=64), 'labels': tfexample_decoder.Tensor('image/labels'),", "permissions and # limitations under the License. r\"\"\"Provides data for", "} decoder = tfexample_decoder.TFExampleDecoder(keys_to_features, items_to_handlers) metadata = { 'num_classes_per_attribute': num_classes_per_attribute,", "not \"iid\": raise ValueError(\"Only IID split available for CelebA.\") if", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "Learning, 872-79. Author: vrama@ \"\"\" from __future__ import absolute_import from", "3. Orientation: we vary orientation from -90 to +90 degrees,", "= os.path.join(dataset_dir, _FILE_PATTERN % split_name) tf.logging.info('Loading from %s file.' %", "label_map_json, } return dataset.Dataset( data_sources=file_pattern, reader=tf.TFRecordReader, decoder=decoder, num_samples=_SPLITS_TO_SIZES[split_name], items_to_descriptions=_ITEMS_TO_DESCRIPTIONS), metadata", "'labels': tfexample_decoder.Tensor('image/labels'), } decoder = tfexample_decoder.TFExampleDecoder(keys_to_features, items_to_handlers) metadata = {", "3], image_length=64), 'labels': tfexample_decoder.Tensor('image/labels'), } decoder = tfexample_decoder.TFExampleDecoder(keys_to_features, items_to_handlers) metadata", "for CelebA.\") if num_classes_per_attribute is None: num_classes_per_attribute = _NUM_CLASSES_PER_ATTRIBUTE if", "or implied. # See the License for the specific language", "the canvas). Each location is a gaussian placed at four", "dataset_dir is None or dataset_dir == '': dataset_dir = _DATASET_DIR", "Provide data loading utilities for an augmented version of the", "means a rotation of 0 degrees. The original data after", "__future__ import print_function import os import tensorflow as tf from", "label map file. label_map_json = os.path.join(dataset_dir, 'attribute_label_map.json') file_pattern = os.path.join(dataset_dir,", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "'%s-*' _SPLIT_TYPE = 'iid' _DATASET_DIR = '/srv/share/datasets/celeba_for_tf_ig' _SPLITS_TO_SIZES = {'train':", "the dataset we just constructed. Raises: ValueError: if `split_name` is", "train/test split name. split_type: str, type of split being loaded", "# For where changes would need to be made to", "{ 'num_classes_per_attribute': num_classes_per_attribute, 'split_type': _SPLIT_TYPE, 'label_map_json': label_map_json, } return dataset.Dataset(", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "0.5 +- 0.1 and 0.9 +- 0.1 repsectively. 3. Orientation:", "Google Inc. # # Licensed under the Apache License, Version", "'val': 19867, 'test': 19962} _ITEMS_TO_DESCRIPTIONS = { 'image': 'A [218", "% split_name) tf.logging.info('Loading from %s file.' % (file_pattern)) keys_to_features =", "1.0), with two gaussians placed at 0.5 +- 0.1 and", "following paper: Salakhutdinov, Ruslan, and <NAME>. 2008. ``On the Quantitative", "first index, and so on. Returns: A `Dataset` namedtuple. metadata:", "# Only provides option to load the binarized version of", "The number of labels for the classfication problem corresponding to", "# Load attribute label map file. label_map_json = os.path.join(dataset_dir, 'attribute_label_map.json')", "(the \"License\"); # you may not use this file except", "# you may not use this file except in compliance", "classfication problem corresponding to each attribute. For example, if the", "Quantitative Analysis of Deep Belief Networks.'' In Proceedings of the", "and # limitations under the License. r\"\"\"Provides data for the", "name %s was not recognized.' % split_name) if split_type is", "available for CelebA.\") if num_classes_per_attribute is None: num_classes_per_attribute = _NUM_CLASSES_PER_ATTRIBUTE", "the first index, and so on. Returns: A `Dataset` namedtuple.", "= { 'image': 'A [218 x 178 x 3] RGB", "dictionary with some metadata about the dataset we just constructed.", "with two gaussians placed at 0.5 +- 0.1 and 0.9", "# Copyright 2017 Google Inc. # # Licensed under the", "# # Unless required by applicable law or agreed to", "from -90 to +90 degrees, sampling actual values from gaussians", "where changes would need to be made to preprocess the", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "at 0.5 +- 0.1 and 0.9 +- 0.1 repsectively. 3.", "the canvas. 2. Scale (We vary scale from 0.4 to", "{ 'image': 'A [218 x 178 x 3] RGB image.',", "canvas and placed around one of four locations/regions in the", "Version 2.0 (the \"License\"); # you may not use this", "degrees. The original data after transformations is binarized as per", "if the first attribute is \"shape\" and there are three", "the digit at all which means a rotation of 0", "tensorflow.contrib.slim.python.slim.data import tfexample_decoder from datasets.celeba.image_decoder import ImageDecodeProcess # Only provides", "original data after transformations is binarized as per the procedure", "= {'train': 162770, 'val': 19867, 'test': 19962} _ITEMS_TO_DESCRIPTIONS = {", "ValueError: if `split_name` is not a valid train/test split. \"\"\"", "Analysis of Deep Belief Networks.'' In Proceedings of the 25th", "of Deep Belief Networks.'' In Proceedings of the 25th International", "See # https://github.com/tensorflow/tensorflow/blob/master/tensorflow/contrib/slim/python/slim/data/tfexample_decoder.py#L270 # For where changes would need to", "= { 'num_classes_per_attribute': num_classes_per_attribute, 'split_type': _SPLIT_TYPE, 'label_map_json': label_map_json, } return", "{ 'image/encoded': tf.FixedLenFeature((), tf.string, default_value=''), 'image/format': tf.FixedLenFeature((), tf.string, default_value='raw'), 'image/labels':", "__future__ import absolute_import from __future__ import division from __future__ import", "implied. # See the License for the specific language governing", "872-79. Author: vrama@ \"\"\" from __future__ import absolute_import from __future__", "under the Apache License, Version 2.0 (the \"License\"); # you", "placed around one of four locations/regions in the canvas). Each", "vary scale from 0.4 to 1.0), with two gaussians placed", "x 178 x 3] RGB image.', 'labels': 'Attributes corresponding to", "following attributes: 1. Location (digits are translated on a canvas", "occasions we dont orient the digit at all which means", "Author: vrama@ \"\"\" from __future__ import absolute_import from __future__ import", "by applicable law or agreed to in writing, software #", "'image/format': tf.FixedLenFeature((), tf.string, default_value='raw'), 'image/labels': tf.FixedLenFeature([len(num_classes_per_attribute)], tf.int64), } # TODO(vrama):", "shapes, then then provide a value 3 in the first", "the dataset sources. num_classes_per_attribute: The number of labels for the", "-30 +-10. On a third of the occasions we dont", "governing permissions and # limitations under the License. r\"\"\"Provides data", "Load attribute label map file. label_map_json = os.path.join(dataset_dir, 'attribute_label_map.json') file_pattern", "all which means a rotation of 0 degrees. The original", "image.', 'labels': 'Attributes corresponding to the image.', } _NUM_CLASSES_PER_ATTRIBUTE =", "raise ValueError(\"Only IID split available for CelebA.\") if num_classes_per_attribute is", "_SPLIT_TYPE = 'iid' _DATASET_DIR = '/srv/share/datasets/celeba_for_tf_ig' _SPLITS_TO_SIZES = {'train': 162770,", "was not recognized.' % split_name) if split_type is not \"iid\":", "binarized as per the procedure described in the following paper:", "we vary orientation from -90 to +90 degrees, sampling actual", "and there are three possible shapes, then then provide a", "split available for CelebA.\") if num_classes_per_attribute is None: num_classes_per_attribute =", "a gaussian placed at four quadrants of the canvas. 2.", "# get loaded. items_to_handlers = { 'image': ImageDecodeProcess(shape=[218, 178, 3],", "178, 3], image_length=64), 'labels': tfexample_decoder.Tensor('image/labels'), } decoder = tfexample_decoder.TFExampleDecoder(keys_to_features, items_to_handlers)", "Scale (We vary scale from 0.4 to 1.0), with two", "ValueError(\"Only IID split available for CelebA.\") if num_classes_per_attribute is None:", "which means a rotation of 0 degrees. The original data", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "[218 x 178 x 3] RGB image.', 'labels': 'Attributes corresponding", "Unless required by applicable law or agreed to in writing,", "ImageDecodeProcess # Only provides option to load the binarized version", "and so on. Returns: A `Dataset` namedtuple. metadata: A dictionary", "'test': 19962} _ITEMS_TO_DESCRIPTIONS = { 'image': 'A [218 x 178", "-90 to +90 degrees, sampling actual values from gaussians at", "dataset tuple with instructions for reading 2D shapes data. Args:", "On a third of the occasions we dont orient the", "= 'iid' _DATASET_DIR = '/srv/share/datasets/celeba_for_tf_ig' _SPLITS_TO_SIZES = {'train': 162770, 'val':", "gaussians placed at 0.5 +- 0.1 and 0.9 +- 0.1", "= '%s-*' _SPLIT_TYPE = 'iid' _DATASET_DIR = '/srv/share/datasets/celeba_for_tf_ig' _SPLITS_TO_SIZES =", "\"\"\" if split_name not in _SPLITS_TO_SIZES: raise ValueError('split name %s", "MNIST dataset which contains the following attributes: 1. Location (digits", "is binarized as per the procedure described in the following", "the specific language governing permissions and # limitations under the", "2017 Google Inc. # # Licensed under the Apache License,", "Proceedings of the 25th International Conference on Machine Learning, 872-79.", "which contains the following attributes: 1. Location (digits are translated", "dataset sources. num_classes_per_attribute: The number of labels for the classfication", "applicable law or agreed to in writing, software # distributed", "data after transformations is binarized as per the procedure described", "\"\"\"Gets a dataset tuple with instructions for reading 2D shapes", "% split_name) if split_type is not \"iid\": raise ValueError(\"Only IID", "split_name) tf.logging.info('Loading from %s file.' % (file_pattern)) keys_to_features = {", "'num_classes_per_attribute': num_classes_per_attribute, 'split_type': _SPLIT_TYPE, 'label_map_json': label_map_json, } return dataset.Dataset( data_sources=file_pattern,", "for the mnist with attributes dataset. Provide data loading utilities", "Args: split_name: A train/test split name. split_type: str, type of", "datasets.celeba.image_decoder import ImageDecodeProcess # Only provides option to load the", "in writing, software # distributed under the License is distributed", "dataset from tensorflow.contrib.slim.python.slim.data import tfexample_decoder from datasets.celeba.image_decoder import ImageDecodeProcess #", "from __future__ import division from __future__ import print_function import os", "is not a valid train/test split. \"\"\" if split_name not", "digit at all which means a rotation of 0 degrees.", "if split_type is not \"iid\": raise ValueError(\"Only IID split available", "def get_split(split_name='train', split_type=\"iid\", dataset_dir=None, image_length=64, num_classes_per_attribute=None): \"\"\"Gets a dataset tuple", "the dataset. _FILE_PATTERN = '%s-*' _SPLIT_TYPE = 'iid' _DATASET_DIR =", "Raises: ValueError: if `split_name` is not a valid train/test split.", "we just constructed. Raises: ValueError: if `split_name` is not a", "namedtuple. metadata: A dictionary with some metadata about the dataset", "example, if the first attribute is \"shape\" and there are", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "degrees, sampling actual values from gaussians at +30 +- 10", "tfexample_decoder.Tensor('image/labels'), } decoder = tfexample_decoder.TFExampleDecoder(keys_to_features, items_to_handlers) metadata = { 'num_classes_per_attribute':", "2D shapes data. Args: split_name: A train/test split name. split_type:", "License, Version 2.0 (the \"License\"); # you may not use", "import tensorflow as tf from tensorflow.contrib.slim.python.slim.data import dataset from tensorflow.contrib.slim.python.slim.data", "in the first index, and so on. Returns: A `Dataset`", "# You may obtain a copy of the License at", "paper: Salakhutdinov, Ruslan, and <NAME>. 2008. ``On the Quantitative Analysis", "dataset we just constructed. Raises: ValueError: if `split_name` is not", "and -30 +-10. On a third of the occasions we", "from __future__ import print_function import os import tensorflow as tf", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "dataset_dir=None, image_length=64, num_classes_per_attribute=None): \"\"\"Gets a dataset tuple with instructions for", "language governing permissions and # limitations under the License. r\"\"\"Provides", "to 1.0), with two gaussians placed at 0.5 +- 0.1", "if num_classes_per_attribute is None: num_classes_per_attribute = _NUM_CLASSES_PER_ATTRIBUTE if dataset_dir is", "dataset_dir: The base directory of the dataset sources. num_classes_per_attribute: The", "around one of four locations/regions in the canvas). Each location", "tf.string, default_value='raw'), 'image/labels': tf.FixedLenFeature([len(num_classes_per_attribute)], tf.int64), } # TODO(vrama): See #", "provides option to load the binarized version of the dataset.", "ValueError('split name %s was not recognized.' % split_name) if split_type", "`Dataset` namedtuple. metadata: A dictionary with some metadata about the", "some metadata about the dataset we just constructed. Raises: ValueError:", "type of split being loaded \"iid\" or \"comp\" dataset_dir: The", "the License for the specific language governing permissions and #", "Apache License, Version 2.0 (the \"License\"); # you may not", "four locations/regions in the canvas). Each location is a gaussian", "%s file.' % (file_pattern)) keys_to_features = { 'image/encoded': tf.FixedLenFeature((), tf.string,", "either express or implied. # See the License for the", "19867, 'test': 19962} _ITEMS_TO_DESCRIPTIONS = { 'image': 'A [218 x", "described in the following paper: Salakhutdinov, Ruslan, and <NAME>. 2008.", "directory of the dataset sources. num_classes_per_attribute: The number of labels", "# # Copyright 2017 Google Inc. # # Licensed under", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "number of labels for the classfication problem corresponding to each", "dataset. _FILE_PATTERN = '%s-*' _SPLIT_TYPE = 'iid' _DATASET_DIR = '/srv/share/datasets/celeba_for_tf_ig'", "162770, 'val': 19867, 'test': 19962} _ITEMS_TO_DESCRIPTIONS = { 'image': 'A", "provide a value 3 in the first index, and so", "dont orient the digit at all which means a rotation", "with instructions for reading 2D shapes data. Args: split_name: A", "data loading utilities for an augmented version of the MNIST", "Machine Learning, 872-79. Author: vrama@ \"\"\" from __future__ import absolute_import", "split_type: str, type of split being loaded \"iid\" or \"comp\"", "if dataset_dir is None or dataset_dir == '': dataset_dir =", "Location (digits are translated on a canvas and placed around", "metadata = { 'num_classes_per_attribute': num_classes_per_attribute, 'split_type': _SPLIT_TYPE, 'label_map_json': label_map_json, }", "split name. split_type: str, type of split being loaded \"iid\"", "'/srv/share/datasets/celeba_for_tf_ig' _SPLITS_TO_SIZES = {'train': 162770, 'val': 19867, 'test': 19962} _ITEMS_TO_DESCRIPTIONS", "loading utilities for an augmented version of the MNIST dataset", "the procedure described in the following paper: Salakhutdinov, Ruslan, and", "TODO(vrama): See # https://github.com/tensorflow/tensorflow/blob/master/tensorflow/contrib/slim/python/slim/data/tfexample_decoder.py#L270 # For where changes would need", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "the binarized version of the dataset. _FILE_PATTERN = '%s-*' _SPLIT_TYPE", "loaded \"iid\" or \"comp\" dataset_dir: The base directory of the", "= tuple([2]*18) def get_split(split_name='train', split_type=\"iid\", dataset_dir=None, image_length=64, num_classes_per_attribute=None): \"\"\"Gets a", "Belief Networks.'' In Proceedings of the 25th International Conference on", "(digits are translated on a canvas and placed around one", "split. \"\"\" if split_name not in _SPLITS_TO_SIZES: raise ValueError('split name", "The original data after transformations is binarized as per the", "rotation of 0 degrees. The original data after transformations is", "absolute_import from __future__ import division from __future__ import print_function import", "num_classes_per_attribute=None): \"\"\"Gets a dataset tuple with instructions for reading 2D", "to load the binarized version of the dataset. _FILE_PATTERN =", "of the dataset sources. num_classes_per_attribute: The number of labels for", "# TODO(vrama): See # https://github.com/tensorflow/tensorflow/blob/master/tensorflow/contrib/slim/python/slim/data/tfexample_decoder.py#L270 # For where changes would", "os import tensorflow as tf from tensorflow.contrib.slim.python.slim.data import dataset from", "division from __future__ import print_function import os import tensorflow as", "\"License\"); # you may not use this file except in", "tf.string, default_value=''), 'image/format': tf.FixedLenFeature((), tf.string, default_value='raw'), 'image/labels': tf.FixedLenFeature([len(num_classes_per_attribute)], tf.int64), }", "International Conference on Machine Learning, 872-79. Author: vrama@ \"\"\" from", "recognized.' % split_name) if split_type is not \"iid\": raise ValueError(\"Only", "corresponding to each attribute. For example, if the first attribute", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "dataset_dir == '': dataset_dir = _DATASET_DIR # Load attribute label", "translated on a canvas and placed around one of four", "# distributed under the License is distributed on an \"AS", "RGB image.', 'labels': 'Attributes corresponding to the image.', } _NUM_CLASSES_PER_ATTRIBUTE", "a rotation of 0 degrees. The original data after transformations", "# Unless required by applicable law or agreed to in", "} # TODO(vrama): See # https://github.com/tensorflow/tensorflow/blob/master/tensorflow/contrib/slim/python/slim/data/tfexample_decoder.py#L270 # For where changes", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "shapes data. Args: split_name: A train/test split name. split_type: str,", "metadata: A dictionary with some metadata about the dataset we", "raise ValueError('split name %s was not recognized.' % split_name) if", "== '': dataset_dir = _DATASET_DIR # Load attribute label map", "after transformations is binarized as per the procedure described in", "%s was not recognized.' % split_name) if split_type is not", "You may obtain a copy of the License at #", "mnist with attributes dataset. Provide data loading utilities for an", "= { 'image': ImageDecodeProcess(shape=[218, 178, 3], image_length=64), 'labels': tfexample_decoder.Tensor('image/labels'), }", "to be made to preprocess the images which # get", "one of four locations/regions in the canvas). Each location is", "attribute. For example, if the first attribute is \"shape\" and", "repsectively. 3. Orientation: we vary orientation from -90 to +90", "178 x 3] RGB image.', 'labels': 'Attributes corresponding to the", "scale from 0.4 to 1.0), with two gaussians placed at", "= os.path.join(dataset_dir, 'attribute_label_map.json') file_pattern = os.path.join(dataset_dir, _FILE_PATTERN % split_name) tf.logging.info('Loading", "the Apache License, Version 2.0 (the \"License\"); # you may", "utilities for an augmented version of the MNIST dataset which", "of four locations/regions in the canvas). Each location is a", "'labels': 'Attributes corresponding to the image.', } _NUM_CLASSES_PER_ATTRIBUTE = tuple([2]*18)", "canvas). Each location is a gaussian placed at four quadrants", "corresponding to the image.', } _NUM_CLASSES_PER_ATTRIBUTE = tuple([2]*18) def get_split(split_name='train'," ]
[ "os.path.join(self.path, f\"{step:010d}_{score}.gif\") imageio.mimwrite(write_path, frames, fps=60) optimize(write_path) print(f\"...Record episode to {write_path}...\")", "key, value, step) self.writer.add_scalar(\"all/\" + key, value, step) if \"score\"", "len(frames) > 0: score = scalar_dict[\"score\"] write_path = os.path.join(self.path, f\"{step:010d}_{score}.gif\")", "env, id, experiment=None): self.id = id now = datetime.datetime.now().strftime(\"%Y%m%d%H%M%S%f\") self.path", "scalar_dict.items(): self.writer.add_scalar(f\"{self.id}/\" + key, value, step) self.writer.add_scalar(\"all/\" + key, value,", "step) self.writer.add_scalar(\"all/\" + key, value, step) if \"score\" in key:", "SummaryWriter(self.path) self.stamp = time.time() def write(self, scalar_dict, frames, step): for", "value, step) self.writer.add_scalar(\"all/\" + key, value, step) if \"score\" in", "self.stamp = time.time() def write(self, scalar_dict, frames, step): for key,", "> 0: score = scalar_dict[\"score\"] write_path = os.path.join(self.path, f\"{step:010d}_{score}.gif\") imageio.mimwrite(write_path,", "self.writer.add_scalar(f\"{self.id}/{key}_per_time\", value, time_delta) self.writer.add_scalar(f\"all/{key}_per_time\", value, time_delta) if len(frames) > 0:", "int(time.time() - self.stamp) self.writer.add_scalar(f\"{self.id}/{key}_per_time\", value, time_delta) self.writer.add_scalar(f\"all/{key}_per_time\", value, time_delta) if", "else f\"./logs/{env}/{id}/{now}/\" ) self.writer = SummaryWriter(self.path) self.stamp = time.time() def", "import datetime, time import imageio from pygifsicle import optimize from", "= datetime.datetime.now().strftime(\"%Y%m%d%H%M%S%f\") self.path = ( f\"./logs/{experiment}/{env}/{id}/{now}/\" if experiment else f\"./logs/{env}/{id}/{now}/\"", "self.writer = SummaryWriter(self.path) self.stamp = time.time() def write(self, scalar_dict, frames,", "if experiment else f\"./logs/{env}/{id}/{now}/\" ) self.writer = SummaryWriter(self.path) self.stamp =", "frames, step): for key, value in scalar_dict.items(): self.writer.add_scalar(f\"{self.id}/\" + key,", "self.writer.add_scalar(f\"{self.id}/\" + key, value, step) self.writer.add_scalar(\"all/\" + key, value, step)", "score = scalar_dict[\"score\"] write_path = os.path.join(self.path, f\"{step:010d}_{score}.gif\") imageio.mimwrite(write_path, frames, fps=60)", "pygifsicle import optimize from torch.utils.tensorboard import SummaryWriter class LogManager: def", "optimize from torch.utils.tensorboard import SummaryWriter class LogManager: def __init__(self, env,", "value in scalar_dict.items(): self.writer.add_scalar(f\"{self.id}/\" + key, value, step) self.writer.add_scalar(\"all/\" +", "class LogManager: def __init__(self, env, id, experiment=None): self.id = id", "in key: time_delta = int(time.time() - self.stamp) self.writer.add_scalar(f\"{self.id}/{key}_per_time\", value, time_delta)", "= time.time() def write(self, scalar_dict, frames, step): for key, value", ") self.writer = SummaryWriter(self.path) self.stamp = time.time() def write(self, scalar_dict,", "value, time_delta) if len(frames) > 0: score = scalar_dict[\"score\"] write_path", "self.writer.add_scalar(\"all/\" + key, value, step) if \"score\" in key: time_delta", "def write(self, scalar_dict, frames, step): for key, value in scalar_dict.items():", "f\"./logs/{env}/{id}/{now}/\" ) self.writer = SummaryWriter(self.path) self.stamp = time.time() def write(self,", "from pygifsicle import optimize from torch.utils.tensorboard import SummaryWriter class LogManager:", "datetime.datetime.now().strftime(\"%Y%m%d%H%M%S%f\") self.path = ( f\"./logs/{experiment}/{env}/{id}/{now}/\" if experiment else f\"./logs/{env}/{id}/{now}/\" )", "f\"./logs/{experiment}/{env}/{id}/{now}/\" if experiment else f\"./logs/{env}/{id}/{now}/\" ) self.writer = SummaryWriter(self.path) self.stamp", "+ key, value, step) if \"score\" in key: time_delta =", "time.time() def write(self, scalar_dict, frames, step): for key, value in", "import os import datetime, time import imageio from pygifsicle import", "key, value, step) if \"score\" in key: time_delta = int(time.time()", "+ key, value, step) self.writer.add_scalar(\"all/\" + key, value, step) if", "import imageio from pygifsicle import optimize from torch.utils.tensorboard import SummaryWriter", "time_delta = int(time.time() - self.stamp) self.writer.add_scalar(f\"{self.id}/{key}_per_time\", value, time_delta) self.writer.add_scalar(f\"all/{key}_per_time\", value,", "def __init__(self, env, id, experiment=None): self.id = id now =", "os import datetime, time import imageio from pygifsicle import optimize", "in scalar_dict.items(): self.writer.add_scalar(f\"{self.id}/\" + key, value, step) self.writer.add_scalar(\"all/\" + key,", "= ( f\"./logs/{experiment}/{env}/{id}/{now}/\" if experiment else f\"./logs/{env}/{id}/{now}/\" ) self.writer =", "<filename>jorldy/manager/log_manager.py import os import datetime, time import imageio from pygifsicle", "LogManager: def __init__(self, env, id, experiment=None): self.id = id now", "now = datetime.datetime.now().strftime(\"%Y%m%d%H%M%S%f\") self.path = ( f\"./logs/{experiment}/{env}/{id}/{now}/\" if experiment else", "( f\"./logs/{experiment}/{env}/{id}/{now}/\" if experiment else f\"./logs/{env}/{id}/{now}/\" ) self.writer = SummaryWriter(self.path)", "- self.stamp) self.writer.add_scalar(f\"{self.id}/{key}_per_time\", value, time_delta) self.writer.add_scalar(f\"all/{key}_per_time\", value, time_delta) if len(frames)", "if \"score\" in key: time_delta = int(time.time() - self.stamp) self.writer.add_scalar(f\"{self.id}/{key}_per_time\",", "id, experiment=None): self.id = id now = datetime.datetime.now().strftime(\"%Y%m%d%H%M%S%f\") self.path =", "step): for key, value in scalar_dict.items(): self.writer.add_scalar(f\"{self.id}/\" + key, value,", "= os.path.join(self.path, f\"{step:010d}_{score}.gif\") imageio.mimwrite(write_path, frames, fps=60) optimize(write_path) print(f\"...Record episode to", "value, time_delta) self.writer.add_scalar(f\"all/{key}_per_time\", value, time_delta) if len(frames) > 0: score", "datetime, time import imageio from pygifsicle import optimize from torch.utils.tensorboard", "self.id = id now = datetime.datetime.now().strftime(\"%Y%m%d%H%M%S%f\") self.path = ( f\"./logs/{experiment}/{env}/{id}/{now}/\"", "experiment=None): self.id = id now = datetime.datetime.now().strftime(\"%Y%m%d%H%M%S%f\") self.path = (", "time_delta) self.writer.add_scalar(f\"all/{key}_per_time\", value, time_delta) if len(frames) > 0: score =", "SummaryWriter class LogManager: def __init__(self, env, id, experiment=None): self.id =", "= int(time.time() - self.stamp) self.writer.add_scalar(f\"{self.id}/{key}_per_time\", value, time_delta) self.writer.add_scalar(f\"all/{key}_per_time\", value, time_delta)", "from torch.utils.tensorboard import SummaryWriter class LogManager: def __init__(self, env, id,", "step) if \"score\" in key: time_delta = int(time.time() - self.stamp)", "import optimize from torch.utils.tensorboard import SummaryWriter class LogManager: def __init__(self,", "0: score = scalar_dict[\"score\"] write_path = os.path.join(self.path, f\"{step:010d}_{score}.gif\") imageio.mimwrite(write_path, frames,", "key: time_delta = int(time.time() - self.stamp) self.writer.add_scalar(f\"{self.id}/{key}_per_time\", value, time_delta) self.writer.add_scalar(f\"all/{key}_per_time\",", "time import imageio from pygifsicle import optimize from torch.utils.tensorboard import", "id now = datetime.datetime.now().strftime(\"%Y%m%d%H%M%S%f\") self.path = ( f\"./logs/{experiment}/{env}/{id}/{now}/\" if experiment", "scalar_dict, frames, step): for key, value in scalar_dict.items(): self.writer.add_scalar(f\"{self.id}/\" +", "imageio from pygifsicle import optimize from torch.utils.tensorboard import SummaryWriter class", "write(self, scalar_dict, frames, step): for key, value in scalar_dict.items(): self.writer.add_scalar(f\"{self.id}/\"", "= id now = datetime.datetime.now().strftime(\"%Y%m%d%H%M%S%f\") self.path = ( f\"./logs/{experiment}/{env}/{id}/{now}/\" if", "= SummaryWriter(self.path) self.stamp = time.time() def write(self, scalar_dict, frames, step):", "import SummaryWriter class LogManager: def __init__(self, env, id, experiment=None): self.id", "experiment else f\"./logs/{env}/{id}/{now}/\" ) self.writer = SummaryWriter(self.path) self.stamp = time.time()", "= scalar_dict[\"score\"] write_path = os.path.join(self.path, f\"{step:010d}_{score}.gif\") imageio.mimwrite(write_path, frames, fps=60) optimize(write_path)", "write_path = os.path.join(self.path, f\"{step:010d}_{score}.gif\") imageio.mimwrite(write_path, frames, fps=60) optimize(write_path) print(f\"...Record episode", "self.path = ( f\"./logs/{experiment}/{env}/{id}/{now}/\" if experiment else f\"./logs/{env}/{id}/{now}/\" ) self.writer", "__init__(self, env, id, experiment=None): self.id = id now = datetime.datetime.now().strftime(\"%Y%m%d%H%M%S%f\")", "\"score\" in key: time_delta = int(time.time() - self.stamp) self.writer.add_scalar(f\"{self.id}/{key}_per_time\", value,", "value, step) if \"score\" in key: time_delta = int(time.time() -", "torch.utils.tensorboard import SummaryWriter class LogManager: def __init__(self, env, id, experiment=None):", "self.writer.add_scalar(f\"all/{key}_per_time\", value, time_delta) if len(frames) > 0: score = scalar_dict[\"score\"]", "key, value in scalar_dict.items(): self.writer.add_scalar(f\"{self.id}/\" + key, value, step) self.writer.add_scalar(\"all/\"", "time_delta) if len(frames) > 0: score = scalar_dict[\"score\"] write_path =", "for key, value in scalar_dict.items(): self.writer.add_scalar(f\"{self.id}/\" + key, value, step)", "if len(frames) > 0: score = scalar_dict[\"score\"] write_path = os.path.join(self.path,", "self.stamp) self.writer.add_scalar(f\"{self.id}/{key}_per_time\", value, time_delta) self.writer.add_scalar(f\"all/{key}_per_time\", value, time_delta) if len(frames) >", "scalar_dict[\"score\"] write_path = os.path.join(self.path, f\"{step:010d}_{score}.gif\") imageio.mimwrite(write_path, frames, fps=60) optimize(write_path) print(f\"...Record" ]
[ "driver UNI interface configuraton ''' import GlobalModule from EmCommonLog import", "%(vip_ip_address)s\" self._append_add_command(comm_txt, parame) if self.is_active: comm_txt = \"standby %(hsrp_id)s priority", "vip_ip_address self.hsrp_id = hsrp_id self.mtu = mtu self.is_active = is_active", "mtu self.is_active = is_active @decorater_log def output_add_command(self): ''' Command line", "type) ''' tmp_param = super(UNIInterface, self)._get_param() tmp_param.update( { \"vlan_id\": self.vlan_id,", "version 2\") comm_txt = \"standby %(hsrp_id)s ip %(vip_ip_address)s\" self._append_add_command(comm_txt, parame)", "\"vip_ip_address\": self.vip_ip_address, \"hsrp_id\": self.hsrp_id, \"mtu\": self.mtu, } ) return tmp_param", "tmp_param = super(UNIInterface, self)._get_param() tmp_param.update( { \"vlan_id\": self.vlan_id, \"ip_address\": self.ip_address,", "Module for ASR driver UNI interface configuraton ''' import GlobalModule", "parame) self._interface_common_end() GlobalModule.EM_LOGGER.debug( \"uni if command = %s\" % (self._tmp_add_command,))", "utf-8 -*- # Copyright(c) 2019 Nippon Telegraph and Telephone Corporation", "def __init__(self, vrf_name=None, if_name=None, vlan_id=None, ip_address=None, subnet_mask=None, vip_ip_address=None, hsrp_id=None, mtu=None,", "self.vlan_id, \"ip_address\": self.ip_address, \"subnet_mask\": self.subnet_mask, \"vip_ip_address\": self.vip_ip_address, \"hsrp_id\": self.hsrp_id, \"mtu\":", "self._get_param() self._interface_common_start() self._append_add_command(\"standby version 2\") comm_txt = \"standby %(hsrp_id)s ip", "self._interface_common_end() GlobalModule.EM_LOGGER.debug( \"uni if command = %s\" % (self._tmp_add_command,)) return", "vrf_name=None, if_name=None, vlan_id=None, ip_address=None, subnet_mask=None, vip_ip_address=None, hsrp_id=None, mtu=None, is_active=True): '''", "= mtu self.is_active = is_active @decorater_log def output_add_command(self): ''' Command", "%s\" % (self._tmp_add_command,)) return self._tmp_add_command @decorater_log def _get_param(self): ''' Parameter", "return self._tmp_add_command @decorater_log def _get_param(self): ''' Parameter is acquired from", "is output. ''' parame = self._get_param() self._interface_common_start() self._append_add_command(\"standby version 2\")", "= ip_address self.subnet_mask = subnet_mask self.vip_ip_address = vip_ip_address self.hsrp_id =", "-*- coding: utf-8 -*- # Copyright(c) 2019 Nippon Telegraph and", "_get_param(self): ''' Parameter is acquired from attribute.(dict type) ''' tmp_param", "= super(UNIInterface, self)._get_param() tmp_param.update( { \"vlan_id\": self.vlan_id, \"ip_address\": self.ip_address, \"subnet_mask\":", "@decorater_log def output_add_command(self): ''' Command line to add configuration is", "ASRDriverParts/UNIInterface.py ''' Parts Module for ASR driver UNI interface configuraton", "= \"ip mtu %(mtu)s\" self._append_add_command(comm_txt, parame) self._interface_common_end() GlobalModule.EM_LOGGER.debug( \"uni if", "self.mtu = mtu self.is_active = is_active @decorater_log def output_add_command(self): '''", "\"subnet_mask\": self.subnet_mask, \"vip_ip_address\": self.vip_ip_address, \"hsrp_id\": self.hsrp_id, \"mtu\": self.mtu, } )", "subnet_mask self.vip_ip_address = vip_ip_address self.hsrp_id = hsrp_id self.mtu = mtu", "EmCommonLog import decorater_log from ASRDriverParts.InterfaceBase import InterfaceBase class UNIInterface(InterfaceBase): '''", "\"standby %(hsrp_id)s ip %(vip_ip_address)s\" self._append_add_command(comm_txt, parame) if self.is_active: comm_txt =", "= self._get_param() self._interface_common_start() self._append_add_command(\"standby version 2\") comm_txt = \"standby %(hsrp_id)s", "= \"standby %(hsrp_id)s ip %(vip_ip_address)s\" self._append_add_command(comm_txt, parame) if self.is_active: comm_txt", "GlobalModule from EmCommonLog import decorater_log from ASRDriverParts.InterfaceBase import InterfaceBase class", "''' parame = self._get_param() self._interface_common_start() self._append_add_command(\"standby version 2\") comm_txt =", "acquired from attribute.(dict type) ''' tmp_param = super(UNIInterface, self)._get_param() tmp_param.update(", "''' super(UNIInterface, self).__init__(vrf_name=vrf_name, if_name=if_name) self.vlan_id = vlan_id self.ip_address = ip_address", "''' Costructor ''' super(UNIInterface, self).__init__(vrf_name=vrf_name, if_name=if_name) self.vlan_id = vlan_id self.ip_address", "self)._get_param() tmp_param.update( { \"vlan_id\": self.vlan_id, \"ip_address\": self.ip_address, \"subnet_mask\": self.subnet_mask, \"vip_ip_address\":", "configuraton ''' import GlobalModule from EmCommonLog import decorater_log from ASRDriverParts.InterfaceBase", "configuraton ''' @decorater_log def __init__(self, vrf_name=None, if_name=None, vlan_id=None, ip_address=None, subnet_mask=None,", "self._append_add_command(comm_txt, parame) if self.is_active: comm_txt = \"standby %(hsrp_id)s priority 105\"", "subnet_mask=None, vip_ip_address=None, hsrp_id=None, mtu=None, is_active=True): ''' Costructor ''' super(UNIInterface, self).__init__(vrf_name=vrf_name,", "UNIInterface(InterfaceBase): ''' Parts class for ASR driver UNI interface configuraton", "%(hsrp_id)s ip %(vip_ip_address)s\" self._append_add_command(comm_txt, parame) if self.is_active: comm_txt = \"standby", "comm_txt = \"standby %(hsrp_id)s preempt\" self._append_add_command(comm_txt, parame) comm_txt = \"ip", "\"vlan_id\": self.vlan_id, \"ip_address\": self.ip_address, \"subnet_mask\": self.subnet_mask, \"vip_ip_address\": self.vip_ip_address, \"hsrp_id\": self.hsrp_id,", "InterfaceBase class UNIInterface(InterfaceBase): ''' Parts class for ASR driver UNI", "from ASRDriverParts.InterfaceBase import InterfaceBase class UNIInterface(InterfaceBase): ''' Parts class for", "command = %s\" % (self._tmp_add_command,)) return self._tmp_add_command @decorater_log def _get_param(self):", "ASRDriverParts.InterfaceBase import InterfaceBase class UNIInterface(InterfaceBase): ''' Parts class for ASR", "decorater_log from ASRDriverParts.InterfaceBase import InterfaceBase class UNIInterface(InterfaceBase): ''' Parts class", "coding: utf-8 -*- # Copyright(c) 2019 Nippon Telegraph and Telephone", "python # -*- coding: utf-8 -*- # Copyright(c) 2019 Nippon", "comm_txt = \"standby %(hsrp_id)s priority 105\" self._append_add_command(comm_txt, parame) comm_txt =", "tmp_param.update( { \"vlan_id\": self.vlan_id, \"ip_address\": self.ip_address, \"subnet_mask\": self.subnet_mask, \"vip_ip_address\": self.vip_ip_address,", "for ASR driver UNI interface configuraton ''' @decorater_log def __init__(self,", "Costructor ''' super(UNIInterface, self).__init__(vrf_name=vrf_name, if_name=if_name) self.vlan_id = vlan_id self.ip_address =", "comm_txt = \"standby %(hsrp_id)s ip %(vip_ip_address)s\" self._append_add_command(comm_txt, parame) if self.is_active:", "self.ip_address, \"subnet_mask\": self.subnet_mask, \"vip_ip_address\": self.vip_ip_address, \"hsrp_id\": self.hsrp_id, \"mtu\": self.mtu, }", "if command = %s\" % (self._tmp_add_command,)) return self._tmp_add_command @decorater_log def", "output. ''' parame = self._get_param() self._interface_common_start() self._append_add_command(\"standby version 2\") comm_txt", "if self.is_active: comm_txt = \"standby %(hsrp_id)s priority 105\" self._append_add_command(comm_txt, parame)", "Telegraph and Telephone Corporation # Filename: ASRDriverParts/UNIInterface.py ''' Parts Module", "import decorater_log from ASRDriverParts.InterfaceBase import InterfaceBase class UNIInterface(InterfaceBase): ''' Parts", "parame = self._get_param() self._interface_common_start() self._append_add_command(\"standby version 2\") comm_txt = \"standby", "Command line to add configuration is output. ''' parame =", "Telephone Corporation # Filename: ASRDriverParts/UNIInterface.py ''' Parts Module for ASR", "''' Command line to add configuration is output. ''' parame", "super(UNIInterface, self)._get_param() tmp_param.update( { \"vlan_id\": self.vlan_id, \"ip_address\": self.ip_address, \"subnet_mask\": self.subnet_mask,", "line to add configuration is output. ''' parame = self._get_param()", "% (self._tmp_add_command,)) return self._tmp_add_command @decorater_log def _get_param(self): ''' Parameter is", "105\" self._append_add_command(comm_txt, parame) comm_txt = \"standby %(hsrp_id)s preempt\" self._append_add_command(comm_txt, parame)", "import InterfaceBase class UNIInterface(InterfaceBase): ''' Parts class for ASR driver", "__init__(self, vrf_name=None, if_name=None, vlan_id=None, ip_address=None, subnet_mask=None, vip_ip_address=None, hsrp_id=None, mtu=None, is_active=True):", "hsrp_id=None, mtu=None, is_active=True): ''' Costructor ''' super(UNIInterface, self).__init__(vrf_name=vrf_name, if_name=if_name) self.vlan_id", "class for ASR driver UNI interface configuraton ''' @decorater_log def", "parame) if self.is_active: comm_txt = \"standby %(hsrp_id)s priority 105\" self._append_add_command(comm_txt,", "if_name=if_name) self.vlan_id = vlan_id self.ip_address = ip_address self.subnet_mask = subnet_mask", "self.ip_address = ip_address self.subnet_mask = subnet_mask self.vip_ip_address = vip_ip_address self.hsrp_id", "@decorater_log def __init__(self, vrf_name=None, if_name=None, vlan_id=None, ip_address=None, subnet_mask=None, vip_ip_address=None, hsrp_id=None,", "Filename: ASRDriverParts/UNIInterface.py ''' Parts Module for ASR driver UNI interface", "self._append_add_command(comm_txt, parame) comm_txt = \"standby %(hsrp_id)s preempt\" self._append_add_command(comm_txt, parame) comm_txt", "ip_address=None, subnet_mask=None, vip_ip_address=None, hsrp_id=None, mtu=None, is_active=True): ''' Costructor ''' super(UNIInterface,", "parame) comm_txt = \"ip mtu %(mtu)s\" self._append_add_command(comm_txt, parame) self._interface_common_end() GlobalModule.EM_LOGGER.debug(", "self._tmp_add_command @decorater_log def _get_param(self): ''' Parameter is acquired from attribute.(dict", "self._append_add_command(comm_txt, parame) self._interface_common_end() GlobalModule.EM_LOGGER.debug( \"uni if command = %s\" %", "\"uni if command = %s\" % (self._tmp_add_command,)) return self._tmp_add_command @decorater_log", "self.vip_ip_address = vip_ip_address self.hsrp_id = hsrp_id self.mtu = mtu self.is_active", "mtu %(mtu)s\" self._append_add_command(comm_txt, parame) self._interface_common_end() GlobalModule.EM_LOGGER.debug( \"uni if command =", "self.subnet_mask, \"vip_ip_address\": self.vip_ip_address, \"hsrp_id\": self.hsrp_id, \"mtu\": self.mtu, } ) return", "-*- # Copyright(c) 2019 Nippon Telegraph and Telephone Corporation #", "''' import GlobalModule from EmCommonLog import decorater_log from ASRDriverParts.InterfaceBase import", "ASR driver UNI interface configuraton ''' @decorater_log def __init__(self, vrf_name=None,", "priority 105\" self._append_add_command(comm_txt, parame) comm_txt = \"standby %(hsrp_id)s preempt\" self._append_add_command(comm_txt,", "# -*- coding: utf-8 -*- # Copyright(c) 2019 Nippon Telegraph", "UNI interface configuraton ''' @decorater_log def __init__(self, vrf_name=None, if_name=None, vlan_id=None,", "(self._tmp_add_command,)) return self._tmp_add_command @decorater_log def _get_param(self): ''' Parameter is acquired", "Parts class for ASR driver UNI interface configuraton ''' @decorater_log", "\"standby %(hsrp_id)s priority 105\" self._append_add_command(comm_txt, parame) comm_txt = \"standby %(hsrp_id)s", "to add configuration is output. ''' parame = self._get_param() self._interface_common_start()", "Nippon Telegraph and Telephone Corporation # Filename: ASRDriverParts/UNIInterface.py ''' Parts", "# Filename: ASRDriverParts/UNIInterface.py ''' Parts Module for ASR driver UNI", "configuration is output. ''' parame = self._get_param() self._interface_common_start() self._append_add_command(\"standby version", "= vip_ip_address self.hsrp_id = hsrp_id self.mtu = mtu self.is_active =", "import GlobalModule from EmCommonLog import decorater_log from ASRDriverParts.InterfaceBase import InterfaceBase", "''' Parameter is acquired from attribute.(dict type) ''' tmp_param =", "self.hsrp_id = hsrp_id self.mtu = mtu self.is_active = is_active @decorater_log", "Parameter is acquired from attribute.(dict type) ''' tmp_param = super(UNIInterface,", "driver UNI interface configuraton ''' @decorater_log def __init__(self, vrf_name=None, if_name=None,", "= \"standby %(hsrp_id)s priority 105\" self._append_add_command(comm_txt, parame) comm_txt = \"standby", "Copyright(c) 2019 Nippon Telegraph and Telephone Corporation # Filename: ASRDriverParts/UNIInterface.py", "''' Parts class for ASR driver UNI interface configuraton '''", "''' @decorater_log def __init__(self, vrf_name=None, if_name=None, vlan_id=None, ip_address=None, subnet_mask=None, vip_ip_address=None,", "class UNIInterface(InterfaceBase): ''' Parts class for ASR driver UNI interface", "GlobalModule.EM_LOGGER.debug( \"uni if command = %s\" % (self._tmp_add_command,)) return self._tmp_add_command", "def _get_param(self): ''' Parameter is acquired from attribute.(dict type) '''", "vip_ip_address=None, hsrp_id=None, mtu=None, is_active=True): ''' Costructor ''' super(UNIInterface, self).__init__(vrf_name=vrf_name, if_name=if_name)", "= vlan_id self.ip_address = ip_address self.subnet_mask = subnet_mask self.vip_ip_address =", "output_add_command(self): ''' Command line to add configuration is output. '''", "= subnet_mask self.vip_ip_address = vip_ip_address self.hsrp_id = hsrp_id self.mtu =", "%(hsrp_id)s priority 105\" self._append_add_command(comm_txt, parame) comm_txt = \"standby %(hsrp_id)s preempt\"", "2019 Nippon Telegraph and Telephone Corporation # Filename: ASRDriverParts/UNIInterface.py '''", "attribute.(dict type) ''' tmp_param = super(UNIInterface, self)._get_param() tmp_param.update( { \"vlan_id\":", "for ASR driver UNI interface configuraton ''' import GlobalModule from", "self.is_active: comm_txt = \"standby %(hsrp_id)s priority 105\" self._append_add_command(comm_txt, parame) comm_txt", "self.is_active = is_active @decorater_log def output_add_command(self): ''' Command line to", "Parts Module for ASR driver UNI interface configuraton ''' import", "''' Parts Module for ASR driver UNI interface configuraton '''", "add configuration is output. ''' parame = self._get_param() self._interface_common_start() self._append_add_command(\"standby", "= \"standby %(hsrp_id)s preempt\" self._append_add_command(comm_txt, parame) comm_txt = \"ip mtu", "ip_address self.subnet_mask = subnet_mask self.vip_ip_address = vip_ip_address self.hsrp_id = hsrp_id", "''' tmp_param = super(UNIInterface, self)._get_param() tmp_param.update( { \"vlan_id\": self.vlan_id, \"ip_address\":", "is_active=True): ''' Costructor ''' super(UNIInterface, self).__init__(vrf_name=vrf_name, if_name=if_name) self.vlan_id = vlan_id", "ASR driver UNI interface configuraton ''' import GlobalModule from EmCommonLog", "self._interface_common_start() self._append_add_command(\"standby version 2\") comm_txt = \"standby %(hsrp_id)s ip %(vip_ip_address)s\"", "mtu=None, is_active=True): ''' Costructor ''' super(UNIInterface, self).__init__(vrf_name=vrf_name, if_name=if_name) self.vlan_id =", "UNI interface configuraton ''' import GlobalModule from EmCommonLog import decorater_log", "from attribute.(dict type) ''' tmp_param = super(UNIInterface, self)._get_param() tmp_param.update( {", "interface configuraton ''' @decorater_log def __init__(self, vrf_name=None, if_name=None, vlan_id=None, ip_address=None,", "interface configuraton ''' import GlobalModule from EmCommonLog import decorater_log from", "self._append_add_command(\"standby version 2\") comm_txt = \"standby %(hsrp_id)s ip %(vip_ip_address)s\" self._append_add_command(comm_txt,", "\"ip mtu %(mtu)s\" self._append_add_command(comm_txt, parame) self._interface_common_end() GlobalModule.EM_LOGGER.debug( \"uni if command", "is acquired from attribute.(dict type) ''' tmp_param = super(UNIInterface, self)._get_param()", "#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright(c) 2019", "and Telephone Corporation # Filename: ASRDriverParts/UNIInterface.py ''' Parts Module for", "2\") comm_txt = \"standby %(hsrp_id)s ip %(vip_ip_address)s\" self._append_add_command(comm_txt, parame) if", "is_active @decorater_log def output_add_command(self): ''' Command line to add configuration", "\"ip_address\": self.ip_address, \"subnet_mask\": self.subnet_mask, \"vip_ip_address\": self.vip_ip_address, \"hsrp_id\": self.hsrp_id, \"mtu\": self.mtu,", "\"standby %(hsrp_id)s preempt\" self._append_add_command(comm_txt, parame) comm_txt = \"ip mtu %(mtu)s\"", "self).__init__(vrf_name=vrf_name, if_name=if_name) self.vlan_id = vlan_id self.ip_address = ip_address self.subnet_mask =", "# Copyright(c) 2019 Nippon Telegraph and Telephone Corporation # Filename:", "ip %(vip_ip_address)s\" self._append_add_command(comm_txt, parame) if self.is_active: comm_txt = \"standby %(hsrp_id)s", "= is_active @decorater_log def output_add_command(self): ''' Command line to add", "comm_txt = \"ip mtu %(mtu)s\" self._append_add_command(comm_txt, parame) self._interface_common_end() GlobalModule.EM_LOGGER.debug( \"uni", "%(mtu)s\" self._append_add_command(comm_txt, parame) self._interface_common_end() GlobalModule.EM_LOGGER.debug( \"uni if command = %s\"", "self._append_add_command(comm_txt, parame) comm_txt = \"ip mtu %(mtu)s\" self._append_add_command(comm_txt, parame) self._interface_common_end()", "@decorater_log def _get_param(self): ''' Parameter is acquired from attribute.(dict type)", "hsrp_id self.mtu = mtu self.is_active = is_active @decorater_log def output_add_command(self):", "%(hsrp_id)s preempt\" self._append_add_command(comm_txt, parame) comm_txt = \"ip mtu %(mtu)s\" self._append_add_command(comm_txt,", "parame) comm_txt = \"standby %(hsrp_id)s preempt\" self._append_add_command(comm_txt, parame) comm_txt =", "= %s\" % (self._tmp_add_command,)) return self._tmp_add_command @decorater_log def _get_param(self): '''", "Corporation # Filename: ASRDriverParts/UNIInterface.py ''' Parts Module for ASR driver", "if_name=None, vlan_id=None, ip_address=None, subnet_mask=None, vip_ip_address=None, hsrp_id=None, mtu=None, is_active=True): ''' Costructor", "super(UNIInterface, self).__init__(vrf_name=vrf_name, if_name=if_name) self.vlan_id = vlan_id self.ip_address = ip_address self.subnet_mask", "= hsrp_id self.mtu = mtu self.is_active = is_active @decorater_log def", "def output_add_command(self): ''' Command line to add configuration is output.", "from EmCommonLog import decorater_log from ASRDriverParts.InterfaceBase import InterfaceBase class UNIInterface(InterfaceBase):", "{ \"vlan_id\": self.vlan_id, \"ip_address\": self.ip_address, \"subnet_mask\": self.subnet_mask, \"vip_ip_address\": self.vip_ip_address, \"hsrp_id\":", "self.vlan_id = vlan_id self.ip_address = ip_address self.subnet_mask = subnet_mask self.vip_ip_address", "self.subnet_mask = subnet_mask self.vip_ip_address = vip_ip_address self.hsrp_id = hsrp_id self.mtu", "vlan_id self.ip_address = ip_address self.subnet_mask = subnet_mask self.vip_ip_address = vip_ip_address", "vlan_id=None, ip_address=None, subnet_mask=None, vip_ip_address=None, hsrp_id=None, mtu=None, is_active=True): ''' Costructor '''", "preempt\" self._append_add_command(comm_txt, parame) comm_txt = \"ip mtu %(mtu)s\" self._append_add_command(comm_txt, parame)" ]
[ "images requires Pillow installation, please install it before usage\" )", "Corporation Licensed under the Apache License, Version 2.0 (the \"License\");", "if file not found content_errors.append('{}: does not exist'.format(self.converted_images_dir / identifier))", "not None and index % progress_interval == 0: progress_callback(index /", "from PIL import Image except ImportError: Image = None class", "/ identifier)) annotations.append(ClassificationAnnotation(identifier, label)) if check_images: if not check_file_existence(self.converted_images_dir /", "in enumerate(annotation_table): identifier = '{}.png'.format(index) label = labels_to_id.get(annotation['label'], int(annotation['label'])) if", "dataset_meta['label_map'] = label_map or {str(i): i for i in range(10)}", "to errors list if file not found content_errors.append('{}: does not", "= '{}x{}' for x in range(28): for y in range(28):", "obtaining the necessary parameters for converting from the command line", "look up __provider__ = 'mnist_csv' annotation_types = (ClassificationAnnotation, ) @classmethod", "= self.get_value_from_config('annotation_file') self.converted_images_dir = self.get_value_from_config('converted_images_dir') self.convert_images = self.get_value_from_config('convert_images') if self.convert_images", "'labels' in dataset_meta: label_map = dict(enumerate(dataset_meta['labels'])) dataset_meta['label_map'] = label_map or", "under the License. \"\"\" import numpy as np from ..config", "None if check_content: self.converted_images_dir = self.converted_images_dir or self.test_csv_file.parent / 'converted_images'", "requires Pillow installation, please install it before usage\" ) self.dataset_meta", "check_content: if not self.converted_images_dir.exists(): content_errors = ['{}: does not exist'.format(self.converted_images_dir)]", "annotation in enumerate(annotation_table): identifier = '{}.png'.format(index) label = labels_to_id.get(annotation['label'], int(annotation['label']))", "= 'mnist_csv' annotation_types = (ClassificationAnnotation, ) @classmethod def parameters(cls): configuration_parameters", "name for this converter # this name will be used", "Apache License, Version 2.0 (the \"License\"); you may not use", "it before usage\" ) self.dataset_meta = self.get_value_from_config('dataset_meta_file') def convert(self, check_content=False,", "import PathField, BoolField from ..representation import ClassificationAnnotation from ..utils import", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "not self.convert_images meta = self.generate_meta() labels_to_id = meta['label_map'] content_errors =", "if self.converted_images_dir and check_content: if not self.converted_images_dir.exists(): content_errors = ['{}:", "are automatically got from command line arguments or config file", "of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law", "self.generate_meta() labels_to_id = meta['label_map'] content_errors = None if check_content: self.converted_images_dir", "np.zeros((28, 28)) column_template = '{}x{}' for x in range(28): for", "mnist images requires Pillow installation, please install it before usage\"", "this name will be used for converter class look up", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "= [] check_images = check_content and not self.convert_images meta =", "distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "with dataset meta (e.g. label_map, color_encoding)', optional=True ) }) return", "automatically got from command line arguments or config file in", "the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or", "should be derived from BaseFormatConverter class. \"\"\" # register name", "ANY KIND, either express or implied. See the License for", "= dataset_meta.get('label_map') if 'labels' in dataset_meta: label_map = dict(enumerate(dataset_meta['labels'])) dataset_meta['label_map']", "http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in", "numpy as np from ..config import PathField, BoolField from ..representation", "28)) column_template = '{}x{}' for x in range(28): for y", "may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless", "to json file with dataset meta (e.g. label_map, color_encoding)', optional=True", "i in range(10)}} dataset_meta = read_json(self.dataset_meta) label_map = dataset_meta.get('label_map') if", "range(28): pixel = int(features[column_template.format(x+1, y+1)]) image[x, y] = pixel return", "configuration_parameters = super().parameters() configuration_parameters.update({ 'annotation_file': PathField(description=\"Path to csv file which", "if 'labels' in dataset_meta: label_map = dict(enumerate(dataset_meta['labels'])) dataset_meta['label_map'] = label_map", "level metadata. \"\"\" annotations = [] check_images = check_content and", "under the License is distributed on an \"AS IS\" BASIS,", "dictionary with additional dataset level metadata. \"\"\" annotations = []", "Image = None class MNISTCSVFormatConverter(BaseFormatConverter): \"\"\" MNIST CSV dataset converter.", "add error to errors list if file not found content_errors.append('{}:", "(c) 2019 Intel Corporation Licensed under the Apache License, Version", "is_directory=True, check_exists=False, description=\"Path to converted images location.\" ), 'dataset_meta_file': PathField(", "converter class look up __provider__ = 'mnist_csv' annotation_types = (ClassificationAnnotation,", "= ['{}: does not exist'.format(self.converted_images_dir)] check_images = False # read", "class MNISTCSVFormatConverter(BaseFormatConverter): \"\"\" MNIST CSV dataset converter. All annotation converters", "= super().parameters() configuration_parameters.update({ 'annotation_file': PathField(description=\"Path to csv file which contain", "this file except in compliance with the License. You may", "'converted_images_dir': PathField( optional=True, is_directory=True, check_exists=False, description=\"Path to converted images location.\"", "and not self.convert_images meta = self.generate_meta() labels_to_id = meta['label_map'] content_errors", "or config. \"\"\" self.test_csv_file = self.get_value_from_config('annotation_file') self.converted_images_dir = self.get_value_from_config('converted_images_dir') self.convert_images", "(ClassificationAnnotation, ) @classmethod def parameters(cls): configuration_parameters = super().parameters() configuration_parameters.update({ 'annotation_file':", "if check_images: if not check_file_existence(self.converted_images_dir / identifier): # add error", "\"\"\" MNIST CSV dataset converter. All annotation converters should be", "not self.converted_images_dir.exists(): content_errors = ['{}: does not exist'.format(self.converted_images_dir)] check_images =", "label_map = dataset_meta.get('label_map') if 'labels' in dataset_meta: label_map = dict(enumerate(dataset_meta['labels']))", "from .format_converter import BaseFormatConverter, ConverterReturn try: from PIL import Image", "@staticmethod def convert_image(features): image = np.zeros((28, 28)) column_template = '{}x{}'", "PathField( description='path to json file with dataset meta (e.g. label_map,", "for this converter # this name will be used for", "and limitations under the License. \"\"\" import numpy as np", "image = image.convert(\"L\") image.save(str(self.converted_images_dir / identifier)) annotations.append(ClassificationAnnotation(identifier, label)) if check_images:", "meta: dictionary with additional dataset level metadata. \"\"\" annotations =", "for x in range(28): for y in range(28): pixel =", "= int(features[column_template.format(x+1, y+1)]) image[x, y] = pixel return image def", "import Image except ImportError: Image = None class MNISTCSVFormatConverter(BaseFormatConverter): \"\"\"", "method is responsible for obtaining the necessary parameters for converting", "file except in compliance with the License. You may obtain", "if progress_callback is not None and index % progress_interval ==", "PathField(description=\"Path to csv file which contain dataset.\"), 'convert_images': BoolField( optional=True,", "index, annotation in enumerate(annotation_table): identifier = '{}.png'.format(index) label = labels_to_id.get(annotation['label'],", "range(10)}} dataset_meta = read_json(self.dataset_meta) label_map = dataset_meta.get('label_map') if 'labels' in", "metadata. \"\"\" annotations = [] check_images = check_content and not", "line arguments or config file in method configure Returns: annotations:", "OR CONDITIONS OF ANY KIND, either express or implied. See", "check_images = check_content and not self.convert_images meta = self.generate_meta() labels_to_id", "under the Apache License, Version 2.0 (the \"License\"); you may", "directory.\" ), 'converted_images_dir': PathField( optional=True, is_directory=True, check_exists=False, description=\"Path to converted", "= None if check_content: self.converted_images_dir = self.converted_images_dir or self.test_csv_file.parent /", "responsible for obtaining the necessary parameters for converting from the", "PathField( optional=True, is_directory=True, check_exists=False, description=\"Path to converted images location.\" ),", "parameters for converting from the command line or config. \"\"\"", "enumerate(annotation_table): identifier = '{}.png'.format(index) label = labels_to_id.get(annotation['label'], int(annotation['label'])) if self.convert_images:", "Image is None: raise ValueError( \"conversion mnist images requires Pillow", "configuration_parameters.update({ 'annotation_file': PathField(description=\"Path to csv file which contain dataset.\"), 'convert_images':", "not self.converted_images_dir.exists(): self.converted_images_dir.mkdir(parents=True) if self.convert_images and Image is None: raise", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "if check_content: self.converted_images_dir = self.converted_images_dir or self.test_csv_file.parent / 'converted_images' if", "See the License for the specific language governing permissions and", "governing permissions and limitations under the License. \"\"\" import numpy", "config file in method configure Returns: annotations: list of annotation", "Pillow installation, please install it before usage\" ) self.dataset_meta =", "import BaseFormatConverter, ConverterReturn try: from PIL import Image except ImportError:", "), 'dataset_meta_file': PathField( description='path to json file with dataset meta", "pixel return image def generate_meta(self): if not self.dataset_meta: return {'label_map':", "len(annotation_table) for index, annotation in enumerate(annotation_table): identifier = '{}.png'.format(index) label", "return image def generate_meta(self): if not self.dataset_meta: return {'label_map': {str(i):", "found content_errors.append('{}: does not exist'.format(self.converted_images_dir / identifier)) if progress_callback is", "check_content: self.converted_images_dir = self.converted_images_dir or self.test_csv_file.parent / 'converted_images' if self.converted_images_dir", "image.convert(\"L\") image.save(str(self.converted_images_dir / identifier)) annotations.append(ClassificationAnnotation(identifier, label)) if check_images: if not", "meta, content_errors) @staticmethod def convert_image(features): image = np.zeros((28, 28)) column_template", "in writing, software distributed under the License is distributed on", "required by applicable law or agreed to in writing, software", "does not exist'.format(self.converted_images_dir)] check_images = False # read original dataset", "class look up __provider__ = 'mnist_csv' annotation_types = (ClassificationAnnotation, )", "content_errors.append('{}: does not exist'.format(self.converted_images_dir / identifier)) if progress_callback is not", "optional=True, default=False, description=\"Allows to convert images from pickle file to", "identifier): # add error to errors list if file not", "or self.test_csv_file.parent / 'converted_images' if self.converted_images_dir and check_content: if not", "/ identifier)) if progress_callback is not None and index %", "be derived from BaseFormatConverter class. \"\"\" # register name for", "permissions and limitations under the License. \"\"\" import numpy as", "ConverterReturn try: from PIL import Image except ImportError: Image =", "try: from PIL import Image except ImportError: Image = None", "check_file_existence(self.converted_images_dir / identifier): # add error to errors list if", "optional=True ) }) return configuration_parameters def configure(self): \"\"\" This method", "), 'converted_images_dir': PathField( optional=True, is_directory=True, check_exists=False, description=\"Path to converted images", "self.get_value_from_config('dataset_meta_file') def convert(self, check_content=False, progress_callback=None, progress_interval=100, **kwargs): \"\"\" This method", "exist'.format(self.converted_images_dir / identifier)) if progress_callback is not None and index", "'mnist_csv' annotation_types = (ClassificationAnnotation, ) @classmethod def parameters(cls): configuration_parameters =", "y+1)]) image[x, y] = pixel return image def generate_meta(self): if", "image.save(str(self.converted_images_dir / identifier)) annotations.append(ClassificationAnnotation(identifier, label)) if check_images: if not check_file_existence(self.converted_images_dir", "software distributed under the License is distributed on an \"AS", "distributed under the License is distributed on an \"AS IS\"", "for converter class look up __provider__ = 'mnist_csv' annotation_types =", "dataset annotation annotation_table = read_csv(self.test_csv_file) num_iterations = len(annotation_table) for index,", "is started. All arguments are automatically got from command line", "self.convert_images = self.get_value_from_config('convert_images') if self.convert_images and not self.converted_images_dir: self.converted_images_dir =", "arguments or config file in method configure Returns: annotations: list", "CONDITIONS OF ANY KIND, either express or implied. See the", "Version 2.0 (the \"License\"); you may not use this file", "% progress_interval == 0: progress_callback(index / num_iterations * 100) return", "pixel = int(features[column_template.format(x+1, y+1)]) image[x, y] = pixel return image", "location.\" ), 'dataset_meta_file': PathField( description='path to json file with dataset", "identifier)) annotations.append(ClassificationAnnotation(identifier, label)) if check_images: if not check_file_existence(self.converted_images_dir / identifier):", "not use this file except in compliance with the License.", "2.0 (the \"License\"); you may not use this file except", "copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable", "label_map, color_encoding)', optional=True ) }) return configuration_parameters def configure(self): \"\"\"", "if not self.dataset_meta: return {'label_map': {str(i): i for i in", "= check_content and not self.convert_images meta = self.generate_meta() labels_to_id =", "check_file_existence, read_json from .format_converter import BaseFormatConverter, ConverterReturn try: from PIL", "convert(self, check_content=False, progress_callback=None, progress_interval=100, **kwargs): \"\"\" This method is executed", "= self.generate_meta() labels_to_id = meta['label_map'] content_errors = None if check_content:", "file which contain dataset.\"), 'convert_images': BoolField( optional=True, default=False, description=\"Allows to", "you may not use this file except in compliance with", "@classmethod def parameters(cls): configuration_parameters = super().parameters() configuration_parameters.update({ 'annotation_file': PathField(description=\"Path to", "is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "the License. You may obtain a copy of the License", "config. \"\"\" self.test_csv_file = self.get_value_from_config('annotation_file') self.converted_images_dir = self.get_value_from_config('converted_images_dir') self.convert_images =", "self.converted_images_dir and check_content: if not self.converted_images_dir.exists(): content_errors = ['{}: does", "# read original dataset annotation annotation_table = read_csv(self.test_csv_file) num_iterations =", "self.converted_images_dir.exists(): self.converted_images_dir.mkdir(parents=True) if self.convert_images and Image is None: raise ValueError(", "use this file except in compliance with the License. You", "from pickle file to user specified directory.\" ), 'converted_images_dir': PathField(", "\"\"\" # register name for this converter # this name", "= self.converted_images_dir or self.test_csv_file.parent / 'converted_images' if self.converted_images_dir and check_content:", "= label_map or {str(i): i for i in range(10)} return", "description=\"Path to converted images location.\" ), 'dataset_meta_file': PathField( description='path to", "ValueError( \"conversion mnist images requires Pillow installation, please install it", "original dataset annotation annotation_table = read_csv(self.test_csv_file) num_iterations = len(annotation_table) for", "return {'label_map': {str(i): i for i in range(10)}} dataset_meta =", ") self.dataset_meta = self.get_value_from_config('dataset_meta_file') def convert(self, check_content=False, progress_callback=None, progress_interval=100, **kwargs):", "[] check_images = check_content and not self.convert_images meta = self.generate_meta()", "'{}.png'.format(index) label = labels_to_id.get(annotation['label'], int(annotation['label'])) if self.convert_images: image = Image.fromarray(self.convert_image(annotation))", "up __provider__ = 'mnist_csv' annotation_types = (ClassificationAnnotation, ) @classmethod def", "command line arguments or config file in method configure Returns:", "self.get_value_from_config('annotation_file') self.converted_images_dir = self.get_value_from_config('converted_images_dir') self.convert_images = self.get_value_from_config('convert_images') if self.convert_images and", "converted images location.\" ), 'dataset_meta_file': PathField( description='path to json file", "read_json from .format_converter import BaseFormatConverter, ConverterReturn try: from PIL import", "except ImportError: Image = None class MNISTCSVFormatConverter(BaseFormatConverter): \"\"\" MNIST CSV", "\"\"\" annotations = [] check_images = check_content and not self.convert_images", "annotation annotation_table = read_csv(self.test_csv_file) num_iterations = len(annotation_table) for index, annotation", "parameters(cls): configuration_parameters = super().parameters() configuration_parameters.update({ 'annotation_file': PathField(description=\"Path to csv file", "label_map = dict(enumerate(dataset_meta['labels'])) dataset_meta['label_map'] = label_map or {str(i): i for", "labels_to_id = meta['label_map'] content_errors = None if check_content: self.converted_images_dir =", "not exist'.format(self.converted_images_dir / identifier)) if progress_callback is not None and", "int(annotation['label'])) if self.convert_images: image = Image.fromarray(self.convert_image(annotation)) image = image.convert(\"L\") image.save(str(self.converted_images_dir", "0: progress_callback(index / num_iterations * 100) return ConverterReturn(annotations, meta, content_errors)", "configuration_parameters def configure(self): \"\"\" This method is responsible for obtaining", "= self.get_value_from_config('converted_images_dir') self.convert_images = self.get_value_from_config('convert_images') if self.convert_images and not self.converted_images_dir:", "image def generate_meta(self): if not self.dataset_meta: return {'label_map': {str(i): i", "specified directory.\" ), 'converted_images_dir': PathField( optional=True, is_directory=True, check_exists=False, description=\"Path to", "super().parameters() configuration_parameters.update({ 'annotation_file': PathField(description=\"Path to csv file which contain dataset.\"),", "list if file not found content_errors.append('{}: does not exist'.format(self.converted_images_dir /", "/ num_iterations * 100) return ConverterReturn(annotations, meta, content_errors) @staticmethod def", "self.converted_images_dir = self.get_value_from_config('converted_images_dir') self.convert_images = self.get_value_from_config('convert_images') if self.convert_images and not", "(the \"License\"); you may not use this file except in", "dataset_meta.get('label_map') if 'labels' in dataset_meta: label_map = dict(enumerate(dataset_meta['labels'])) dataset_meta['label_map'] =", "column_template = '{}x{}' for x in range(28): for y in", "started. All arguments are automatically got from command line arguments", "Image.fromarray(self.convert_image(annotation)) image = image.convert(\"L\") image.save(str(self.converted_images_dir / identifier)) annotations.append(ClassificationAnnotation(identifier, label)) if", "class. \"\"\" # register name for this converter # this", "'dataset_meta_file': PathField( description='path to json file with dataset meta (e.g.", "You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0", "converters should be derived from BaseFormatConverter class. \"\"\" # register", "i for i in range(10)}} dataset_meta = read_json(self.dataset_meta) label_map =", "or config file in method configure Returns: annotations: list of", "and check_content: if not self.converted_images_dir.exists(): content_errors = ['{}: does not", "identifier)) if progress_callback is not None and index % progress_interval", "the Apache License, Version 2.0 (the \"License\"); you may not", "or implied. See the License for the specific language governing", "got from command line arguments or config file in method", "['{}: does not exist'.format(self.converted_images_dir)] check_images = False # read original", "KIND, either express or implied. See the License for the", "BaseFormatConverter, ConverterReturn try: from PIL import Image except ImportError: Image", "return ConverterReturn(annotations, meta, content_errors) @staticmethod def convert_image(features): image = np.zeros((28,", "to in writing, software distributed under the License is distributed", "= dict(enumerate(dataset_meta['labels'])) dataset_meta['label_map'] = label_map or {str(i): i for i", "converter # this name will be used for converter class", "law or agreed to in writing, software distributed under the", "\"\"\" Copyright (c) 2019 Intel Corporation Licensed under the Apache", "\"\"\" This method is executed automatically when convert.py is started.", "file with dataset meta (e.g. label_map, color_encoding)', optional=True ) })", "= False # read original dataset annotation annotation_table = read_csv(self.test_csv_file)", "this converter # this name will be used for converter", "= np.zeros((28, 28)) column_template = '{}x{}' for x in range(28):", "on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "derived from BaseFormatConverter class. \"\"\" # register name for this", "arguments are automatically got from command line arguments or config", "not found content_errors.append('{}: does not exist'.format(self.converted_images_dir / identifier)) if progress_callback", "read_csv(self.test_csv_file) num_iterations = len(annotation_table) for index, annotation in enumerate(annotation_table): identifier", "= pixel return image def generate_meta(self): if not self.dataset_meta: return", "= Image.fromarray(self.convert_image(annotation)) image = image.convert(\"L\") image.save(str(self.converted_images_dir / identifier)) annotations.append(ClassificationAnnotation(identifier, label))", "self.test_csv_file.parent / 'converted_images' if self.converted_images_dir and check_content: if not self.converted_images_dir.exists():", "not exist'.format(self.converted_images_dir)] check_images = False # read original dataset annotation", "annotation_table = read_csv(self.test_csv_file) num_iterations = len(annotation_table) for index, annotation in", "usage\" ) self.dataset_meta = self.get_value_from_config('dataset_meta_file') def convert(self, check_content=False, progress_callback=None, progress_interval=100,", "dict(enumerate(dataset_meta['labels'])) dataset_meta['label_map'] = label_map or {str(i): i for i in", "is executed automatically when convert.py is started. All arguments are", "..utils import read_csv, check_file_existence, read_json from .format_converter import BaseFormatConverter, ConverterReturn", "error to errors list if file not found content_errors.append('{}: does", "for the specific language governing permissions and limitations under the", "= read_json(self.dataset_meta) label_map = dataset_meta.get('label_map') if 'labels' in dataset_meta: label_map", "image[x, y] = pixel return image def generate_meta(self): if not", "int(features[column_template.format(x+1, y+1)]) image[x, y] = pixel return image def generate_meta(self):", "{str(i): i for i in range(10)}} dataset_meta = read_json(self.dataset_meta) label_map", "\"\"\" This method is responsible for obtaining the necessary parameters", "ImportError: Image = None class MNISTCSVFormatConverter(BaseFormatConverter): \"\"\" MNIST CSV dataset", "/ 'converted_images' if self.converted_images_dir and check_content: if not self.converted_images_dir.exists(): content_errors", "# register name for this converter # this name will", "All arguments are automatically got from command line arguments or", "}) return configuration_parameters def configure(self): \"\"\" This method is responsible", "the License for the specific language governing permissions and limitations", "may not use this file except in compliance with the", "None class MNISTCSVFormatConverter(BaseFormatConverter): \"\"\" MNIST CSV dataset converter. All annotation", "convert images from pickle file to user specified directory.\" ),", "for index, annotation in enumerate(annotation_table): identifier = '{}.png'.format(index) label =", "in method configure Returns: annotations: list of annotation representation objects.", "implied. See the License for the specific language governing permissions", "def convert_image(features): image = np.zeros((28, 28)) column_template = '{}x{}' for", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "generate_meta(self): if not self.dataset_meta: return {'label_map': {str(i): i for i", "num_iterations = len(annotation_table) for index, annotation in enumerate(annotation_table): identifier =", "= meta['label_map'] content_errors = None if check_content: self.converted_images_dir = self.converted_images_dir", "y] = pixel return image def generate_meta(self): if not self.dataset_meta:", "License. \"\"\" import numpy as np from ..config import PathField,", "self.converted_images_dir = self.test_csv_file.parent / 'converted_images' if not self.converted_images_dir.exists(): self.converted_images_dir.mkdir(parents=True) if", "configure(self): \"\"\" This method is responsible for obtaining the necessary", "= self.test_csv_file.parent / 'converted_images' if not self.converted_images_dir.exists(): self.converted_images_dir.mkdir(parents=True) if self.convert_images", "self.test_csv_file = self.get_value_from_config('annotation_file') self.converted_images_dir = self.get_value_from_config('converted_images_dir') self.convert_images = self.get_value_from_config('convert_images') if", "to user specified directory.\" ), 'converted_images_dir': PathField( optional=True, is_directory=True, check_exists=False,", "= '{}.png'.format(index) label = labels_to_id.get(annotation['label'], int(annotation['label'])) if self.convert_images: image =", "command line or config. \"\"\" self.test_csv_file = self.get_value_from_config('annotation_file') self.converted_images_dir =", "progress_callback is not None and index % progress_interval == 0:", "from ..utils import read_csv, check_file_existence, read_json from .format_converter import BaseFormatConverter,", "converting from the command line or config. \"\"\" self.test_csv_file =", "/ identifier): # add error to errors list if file", "in dataset_meta: label_map = dict(enumerate(dataset_meta['labels'])) dataset_meta['label_map'] = label_map or {str(i):", "import read_csv, check_file_existence, read_json from .format_converter import BaseFormatConverter, ConverterReturn try:", "self.test_csv_file.parent / 'converted_images' if not self.converted_images_dir.exists(): self.converted_images_dir.mkdir(parents=True) if self.convert_images and", "limitations under the License. \"\"\" import numpy as np from", "necessary parameters for converting from the command line or config.", "content_errors = ['{}: does not exist'.format(self.converted_images_dir)] check_images = False #", "executed automatically when convert.py is started. All arguments are automatically", "check_images: if not check_file_existence(self.converted_images_dir / identifier): # add error to", "csv file which contain dataset.\"), 'convert_images': BoolField( optional=True, default=False, description=\"Allows", "writing, software distributed under the License is distributed on an", "install it before usage\" ) self.dataset_meta = self.get_value_from_config('dataset_meta_file') def convert(self,", "the command line or config. \"\"\" self.test_csv_file = self.get_value_from_config('annotation_file') self.converted_images_dir", "json file with dataset meta (e.g. label_map, color_encoding)', optional=True )", "in range(28): for y in range(28): pixel = int(features[column_template.format(x+1, y+1)])", "label = labels_to_id.get(annotation['label'], int(annotation['label'])) if self.convert_images: image = Image.fromarray(self.convert_image(annotation)) image", "does not exist'.format(self.converted_images_dir / identifier)) if progress_callback is not None", "in compliance with the License. You may obtain a copy", "# this name will be used for converter class look", "Copyright (c) 2019 Intel Corporation Licensed under the Apache License,", "if not check_file_existence(self.converted_images_dir / identifier): # add error to errors", "return configuration_parameters def configure(self): \"\"\" This method is responsible for", "agreed to in writing, software distributed under the License is", "= None class MNISTCSVFormatConverter(BaseFormatConverter): \"\"\" MNIST CSV dataset converter. All", "import ClassificationAnnotation from ..utils import read_csv, check_file_existence, read_json from .format_converter", "optional=True, is_directory=True, check_exists=False, description=\"Path to converted images location.\" ), 'dataset_meta_file':", "at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to", "when convert.py is started. All arguments are automatically got from", "import numpy as np from ..config import PathField, BoolField from", "All annotation converters should be derived from BaseFormatConverter class. \"\"\"", "check_content and not self.convert_images meta = self.generate_meta() labels_to_id = meta['label_map']", "is not None and index % progress_interval == 0: progress_callback(index", "100) return ConverterReturn(annotations, meta, content_errors) @staticmethod def convert_image(features): image =", "description=\"Allows to convert images from pickle file to user specified", "/ 'converted_images' if not self.converted_images_dir.exists(): self.converted_images_dir.mkdir(parents=True) if self.convert_images and Image", "is None: raise ValueError( \"conversion mnist images requires Pillow installation,", "..config import PathField, BoolField from ..representation import ClassificationAnnotation from ..utils", "ClassificationAnnotation from ..utils import read_csv, check_file_existence, read_json from .format_converter import", "convert_image(features): image = np.zeros((28, 28)) column_template = '{}x{}' for x", "either express or implied. See the License for the specific", "used for converter class look up __provider__ = 'mnist_csv' annotation_types", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "\"\"\" import numpy as np from ..config import PathField, BoolField", "not self.converted_images_dir: self.converted_images_dir = self.test_csv_file.parent / 'converted_images' if not self.converted_images_dir.exists():", "\"License\"); you may not use this file except in compliance", "License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES", "Licensed under the Apache License, Version 2.0 (the \"License\"); you", "user specified directory.\" ), 'converted_images_dir': PathField( optional=True, is_directory=True, check_exists=False, description=\"Path", "CSV dataset converter. All annotation converters should be derived from", "= read_csv(self.test_csv_file) num_iterations = len(annotation_table) for index, annotation in enumerate(annotation_table):", "License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed", "License for the specific language governing permissions and limitations under", "label_map or {str(i): i for i in range(10)} return dataset_meta", "if self.convert_images and Image is None: raise ValueError( \"conversion mnist", "(e.g. label_map, color_encoding)', optional=True ) }) return configuration_parameters def configure(self):", "not check_file_existence(self.converted_images_dir / identifier): # add error to errors list", "= self.get_value_from_config('convert_images') if self.convert_images and not self.converted_images_dir: self.converted_images_dir = self.test_csv_file.parent", "PathField, BoolField from ..representation import ClassificationAnnotation from ..utils import read_csv,", "and not self.converted_images_dir: self.converted_images_dir = self.test_csv_file.parent / 'converted_images' if not", "for i in range(10)}} dataset_meta = read_json(self.dataset_meta) label_map = dataset_meta.get('label_map')", "label)) if check_images: if not check_file_existence(self.converted_images_dir / identifier): # add", "self.dataset_meta: return {'label_map': {str(i): i for i in range(10)}} dataset_meta", "..representation import ClassificationAnnotation from ..utils import read_csv, check_file_existence, read_json from", "self.converted_images_dir.mkdir(parents=True) if self.convert_images and Image is None: raise ValueError( \"conversion", "from the command line or config. \"\"\" self.test_csv_file = self.get_value_from_config('annotation_file')", "False # read original dataset annotation annotation_table = read_csv(self.test_csv_file) num_iterations", "read original dataset annotation annotation_table = read_csv(self.test_csv_file) num_iterations = len(annotation_table)", "'converted_images' if not self.converted_images_dir.exists(): self.converted_images_dir.mkdir(parents=True) if self.convert_images and Image is", "and index % progress_interval == 0: progress_callback(index / num_iterations *", "def generate_meta(self): if not self.dataset_meta: return {'label_map': {str(i): i for", "method configure Returns: annotations: list of annotation representation objects. meta:", "* 100) return ConverterReturn(annotations, meta, content_errors) @staticmethod def convert_image(features): image", "self.convert_images and not self.converted_images_dir: self.converted_images_dir = self.test_csv_file.parent / 'converted_images' if", "self.converted_images_dir: self.converted_images_dir = self.test_csv_file.parent / 'converted_images' if not self.converted_images_dir.exists(): self.converted_images_dir.mkdir(parents=True)", "installation, please install it before usage\" ) self.dataset_meta = self.get_value_from_config('dataset_meta_file')", "to csv file which contain dataset.\"), 'convert_images': BoolField( optional=True, default=False,", "with additional dataset level metadata. \"\"\" annotations = [] check_images", "not self.dataset_meta: return {'label_map': {str(i): i for i in range(10)}}", "def configure(self): \"\"\" This method is responsible for obtaining the", "for converting from the command line or config. \"\"\" self.test_csv_file", "MNISTCSVFormatConverter(BaseFormatConverter): \"\"\" MNIST CSV dataset converter. All annotation converters should", "except in compliance with the License. You may obtain a", "= self.get_value_from_config('dataset_meta_file') def convert(self, check_content=False, progress_callback=None, progress_interval=100, **kwargs): \"\"\" This", "is responsible for obtaining the necessary parameters for converting from", "meta (e.g. label_map, color_encoding)', optional=True ) }) return configuration_parameters def", "in range(10)}} dataset_meta = read_json(self.dataset_meta) label_map = dataset_meta.get('label_map') if 'labels'", "# add error to errors list if file not found", "check_content=False, progress_callback=None, progress_interval=100, **kwargs): \"\"\" This method is executed automatically", "compliance with the License. You may obtain a copy of", "language governing permissions and limitations under the License. \"\"\" import", "from ..config import PathField, BoolField from ..representation import ClassificationAnnotation from", "self.convert_images meta = self.generate_meta() labels_to_id = meta['label_map'] content_errors = None", "ConverterReturn(annotations, meta, content_errors) @staticmethod def convert_image(features): image = np.zeros((28, 28))", "line or config. \"\"\" self.test_csv_file = self.get_value_from_config('annotation_file') self.converted_images_dir = self.get_value_from_config('converted_images_dir')", ") }) return configuration_parameters def configure(self): \"\"\" This method is", "PIL import Image except ImportError: Image = None class MNISTCSVFormatConverter(BaseFormatConverter):", "file not found content_errors.append('{}: does not exist'.format(self.converted_images_dir / identifier)) if", "if not self.converted_images_dir.exists(): self.converted_images_dir.mkdir(parents=True) if self.convert_images and Image is None:", "from command line arguments or config file in method configure", "Returns: annotations: list of annotation representation objects. meta: dictionary with", "\"\"\" self.test_csv_file = self.get_value_from_config('annotation_file') self.converted_images_dir = self.get_value_from_config('converted_images_dir') self.convert_images = self.get_value_from_config('convert_images')", "annotations.append(ClassificationAnnotation(identifier, label)) if check_images: if not check_file_existence(self.converted_images_dir / identifier): #", "annotation converters should be derived from BaseFormatConverter class. \"\"\" #", "2019 Intel Corporation Licensed under the Apache License, Version 2.0", "None: raise ValueError( \"conversion mnist images requires Pillow installation, please", "= image.convert(\"L\") image.save(str(self.converted_images_dir / identifier)) annotations.append(ClassificationAnnotation(identifier, label)) if check_images: if", "num_iterations * 100) return ConverterReturn(annotations, meta, content_errors) @staticmethod def convert_image(features):", "representation objects. meta: dictionary with additional dataset level metadata. \"\"\"", "color_encoding)', optional=True ) }) return configuration_parameters def configure(self): \"\"\" This", "content_errors) @staticmethod def convert_image(features): image = np.zeros((28, 28)) column_template =", "additional dataset level metadata. \"\"\" annotations = [] check_images =", "default=False, description=\"Allows to convert images from pickle file to user", "self.convert_images: image = Image.fromarray(self.convert_image(annotation)) image = image.convert(\"L\") image.save(str(self.converted_images_dir / identifier))", "name will be used for converter class look up __provider__", "dataset meta (e.g. label_map, color_encoding)', optional=True ) }) return configuration_parameters", "from ..representation import ClassificationAnnotation from ..utils import read_csv, check_file_existence, read_json", "= (ClassificationAnnotation, ) @classmethod def parameters(cls): configuration_parameters = super().parameters() configuration_parameters.update({", "dataset_meta: label_map = dict(enumerate(dataset_meta['labels'])) dataset_meta['label_map'] = label_map or {str(i): i", "list of annotation representation objects. meta: dictionary with additional dataset", "method is executed automatically when convert.py is started. All arguments", "meta['label_map'] content_errors = None if check_content: self.converted_images_dir = self.converted_images_dir or", "dataset.\"), 'convert_images': BoolField( optional=True, default=False, description=\"Allows to convert images from", "content_errors = None if check_content: self.converted_images_dir = self.converted_images_dir or self.test_csv_file.parent", "MNIST CSV dataset converter. All annotation converters should be derived", "will be used for converter class look up __provider__ =", "raise ValueError( \"conversion mnist images requires Pillow installation, please install", "be used for converter class look up __provider__ = 'mnist_csv'", "objects. meta: dictionary with additional dataset level metadata. \"\"\" annotations", "Unless required by applicable law or agreed to in writing,", "by applicable law or agreed to in writing, software distributed", "file in method configure Returns: annotations: list of annotation representation", "dataset converter. All annotation converters should be derived from BaseFormatConverter", "register name for this converter # this name will be", "the License. \"\"\" import numpy as np from ..config import", "pickle file to user specified directory.\" ), 'converted_images_dir': PathField( optional=True,", "express or implied. See the License for the specific language", "x in range(28): for y in range(28): pixel = int(features[column_template.format(x+1,", "errors list if file not found content_errors.append('{}: does not exist'.format(self.converted_images_dir", "file to user specified directory.\" ), 'converted_images_dir': PathField( optional=True, is_directory=True,", "index % progress_interval == 0: progress_callback(index / num_iterations * 100)", "y in range(28): pixel = int(features[column_template.format(x+1, y+1)]) image[x, y] =", "description='path to json file with dataset meta (e.g. label_map, color_encoding)',", "contain dataset.\"), 'convert_images': BoolField( optional=True, default=False, description=\"Allows to convert images", "configure Returns: annotations: list of annotation representation objects. meta: dictionary", "converter. All annotation converters should be derived from BaseFormatConverter class.", "convert.py is started. All arguments are automatically got from command", "self.dataset_meta = self.get_value_from_config('dataset_meta_file') def convert(self, check_content=False, progress_callback=None, progress_interval=100, **kwargs): \"\"\"", "before usage\" ) self.dataset_meta = self.get_value_from_config('dataset_meta_file') def convert(self, check_content=False, progress_callback=None,", "as np from ..config import PathField, BoolField from ..representation import", "This method is responsible for obtaining the necessary parameters for", "for obtaining the necessary parameters for converting from the command", "annotations = [] check_images = check_content and not self.convert_images meta", "check_exists=False, description=\"Path to converted images location.\" ), 'dataset_meta_file': PathField( description='path", "progress_interval=100, **kwargs): \"\"\" This method is executed automatically when convert.py", "dataset_meta = read_json(self.dataset_meta) label_map = dataset_meta.get('label_map') if 'labels' in dataset_meta:", "\"conversion mnist images requires Pillow installation, please install it before", "obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required", "for y in range(28): pixel = int(features[column_template.format(x+1, y+1)]) image[x, y]", "Image except ImportError: Image = None class MNISTCSVFormatConverter(BaseFormatConverter): \"\"\" MNIST", "dataset level metadata. \"\"\" annotations = [] check_images = check_content", "read_json(self.dataset_meta) label_map = dataset_meta.get('label_map') if 'labels' in dataset_meta: label_map =", "BoolField( optional=True, default=False, description=\"Allows to convert images from pickle file", "to converted images location.\" ), 'dataset_meta_file': PathField( description='path to json", "range(28): for y in range(28): pixel = int(features[column_template.format(x+1, y+1)]) image[x,", "BoolField from ..representation import ClassificationAnnotation from ..utils import read_csv, check_file_existence,", "with the License. You may obtain a copy of the", "if not self.converted_images_dir.exists(): content_errors = ['{}: does not exist'.format(self.converted_images_dir)] check_images", "= labels_to_id.get(annotation['label'], int(annotation['label'])) if self.convert_images: image = Image.fromarray(self.convert_image(annotation)) image =", "in range(28): pixel = int(features[column_template.format(x+1, y+1)]) image[x, y] = pixel", "{'label_map': {str(i): i for i in range(10)}} dataset_meta = read_json(self.dataset_meta)", "if self.convert_images: image = Image.fromarray(self.convert_image(annotation)) image = image.convert(\"L\") image.save(str(self.converted_images_dir /", "**kwargs): \"\"\" This method is executed automatically when convert.py is", "progress_callback(index / num_iterations * 100) return ConverterReturn(annotations, meta, content_errors) @staticmethod", "self.converted_images_dir = self.converted_images_dir or self.test_csv_file.parent / 'converted_images' if self.converted_images_dir and", "images from pickle file to user specified directory.\" ), 'converted_images_dir':", "__provider__ = 'mnist_csv' annotation_types = (ClassificationAnnotation, ) @classmethod def parameters(cls):", "specific language governing permissions and limitations under the License. \"\"\"", "self.convert_images and Image is None: raise ValueError( \"conversion mnist images", "image = np.zeros((28, 28)) column_template = '{}x{}' for x in", "image = Image.fromarray(self.convert_image(annotation)) image = image.convert(\"L\") image.save(str(self.converted_images_dir / identifier)) annotations.append(ClassificationAnnotation(identifier,", "of annotation representation objects. meta: dictionary with additional dataset level", "np from ..config import PathField, BoolField from ..representation import ClassificationAnnotation", "self.get_value_from_config('converted_images_dir') self.convert_images = self.get_value_from_config('convert_images') if self.convert_images and not self.converted_images_dir: self.converted_images_dir", ".format_converter import BaseFormatConverter, ConverterReturn try: from PIL import Image except", "if self.convert_images and not self.converted_images_dir: self.converted_images_dir = self.test_csv_file.parent / 'converted_images'", "applicable law or agreed to in writing, software distributed under", "identifier = '{}.png'.format(index) label = labels_to_id.get(annotation['label'], int(annotation['label'])) if self.convert_images: image", "'convert_images': BoolField( optional=True, default=False, description=\"Allows to convert images from pickle", "the License is distributed on an \"AS IS\" BASIS, WITHOUT", "'{}x{}' for x in range(28): for y in range(28): pixel", "meta = self.generate_meta() labels_to_id = meta['label_map'] content_errors = None if", "the specific language governing permissions and limitations under the License.", "def parameters(cls): configuration_parameters = super().parameters() configuration_parameters.update({ 'annotation_file': PathField(description=\"Path to csv", "progress_callback=None, progress_interval=100, **kwargs): \"\"\" This method is executed automatically when", "self.get_value_from_config('convert_images') if self.convert_images and not self.converted_images_dir: self.converted_images_dir = self.test_csv_file.parent /", "please install it before usage\" ) self.dataset_meta = self.get_value_from_config('dataset_meta_file') def", "or agreed to in writing, software distributed under the License", "'annotation_file': PathField(description=\"Path to csv file which contain dataset.\"), 'convert_images': BoolField(", "annotations: list of annotation representation objects. meta: dictionary with additional", "self.converted_images_dir.exists(): content_errors = ['{}: does not exist'.format(self.converted_images_dir)] check_images = False", "from BaseFormatConverter class. \"\"\" # register name for this converter", "'converted_images' if self.converted_images_dir and check_content: if not self.converted_images_dir.exists(): content_errors =", "OF ANY KIND, either express or implied. See the License", "exist'.format(self.converted_images_dir)] check_images = False # read original dataset annotation annotation_table", "check_images = False # read original dataset annotation annotation_table =", "and Image is None: raise ValueError( \"conversion mnist images requires", "License, Version 2.0 (the \"License\"); you may not use this", "the necessary parameters for converting from the command line or", "Intel Corporation Licensed under the Apache License, Version 2.0 (the", "automatically when convert.py is started. All arguments are automatically got", "def convert(self, check_content=False, progress_callback=None, progress_interval=100, **kwargs): \"\"\" This method is", "a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by", "progress_interval == 0: progress_callback(index / num_iterations * 100) return ConverterReturn(annotations,", ") @classmethod def parameters(cls): configuration_parameters = super().parameters() configuration_parameters.update({ 'annotation_file': PathField(description=\"Path", "BaseFormatConverter class. \"\"\" # register name for this converter #", "This method is executed automatically when convert.py is started. All", "License. You may obtain a copy of the License at", "annotation_types = (ClassificationAnnotation, ) @classmethod def parameters(cls): configuration_parameters = super().parameters()", "== 0: progress_callback(index / num_iterations * 100) return ConverterReturn(annotations, meta,", "to convert images from pickle file to user specified directory.\"", "= len(annotation_table) for index, annotation in enumerate(annotation_table): identifier = '{}.png'.format(index)", "labels_to_id.get(annotation['label'], int(annotation['label'])) if self.convert_images: image = Image.fromarray(self.convert_image(annotation)) image = image.convert(\"L\")", "self.converted_images_dir or self.test_csv_file.parent / 'converted_images' if self.converted_images_dir and check_content: if", "read_csv, check_file_existence, read_json from .format_converter import BaseFormatConverter, ConverterReturn try: from", "an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY", "which contain dataset.\"), 'convert_images': BoolField( optional=True, default=False, description=\"Allows to convert", "images location.\" ), 'dataset_meta_file': PathField( description='path to json file with", "annotation representation objects. meta: dictionary with additional dataset level metadata.", "None and index % progress_interval == 0: progress_callback(index / num_iterations" ]
[ "the action that has raised this error exc: Caught exception", "import config import log import logging import sys import wx", "\"\"\"Wrapper for creating a L{BugReport} dialog and show the given", "return exc_type = exc.__class__.__name__ exc_msg = str(exc) header = self.st_header.GetLabel()", "exc_type = exc.__class__.__name__ exc_msg = str(exc) header = self.st_header.GetLabel() %", "dialog content to the clipboard\" text = self.tc_details.GetValue() if not", "details['error'] = str(inst.args) text = _(\"\"\"%(msg)s Error type: %(type)s Error", "= log.getBufferAsString() if not exc_msg: exc_msg = _('No summary available')", "exc_tb)) self._fill_dialog(msg, exc_type, _('An internal error occurred')) def _fill_dialog(self, exc_msg,", "value exc_tb: Exception traceback msg: Short description of the exception", "exc_value: Exception value exc_tb: Exception traceback msg: Short description of", "a L{BugReport} dialog and show the given exception details. exc_type:", "# Flag to prevent dialog popups during test runs. def", "and show the details of the given exception instance. msg:", "str(inst.args) text = _(\"\"\"%(msg)s Error type: %(type)s Error code: %(error)s", "getattr(sys, '_called_from_test', False): return wx.ID_OK super(BugReport, self).ShowModal(**kwargs) def Show(msg, exc):", "\"\"\"\\ Dialog to show details of internal errors. @copyright: 2014-2016", "BugReport() dialog.SetContentEI(exc_type, exc_value, exc_tb, msg) dialog.ShowModal() dialog.Destroy() def ShowEnvironmentError(msg, inst):", "self.st_header.SetLabel(header) self.st_summary.SetLabel(summary) self.tc_details.SetValue(details) howto = self.tc_howto_report.GetValue() howto = howto %", "internal error occurred')): \"\"\"Format given exception and add details to", "details of internal errors\" _disabled = False # Flag to", "don't use exception() because it overwrites exc_info with 1 logging.error(msg,", "BugReport(bugdialog_ui.UIBugDialog): \"Dialog to show details of internal errors\" _disabled =", "exc_value, exc_tb)) self._fill_dialog(msg, exc_type, _('An internal error occurred')) def _fill_dialog(self,", "content to the clipboard\" text = self.tc_details.GetValue() if not text:", "dialog content. msg: Short description of the action that has", "dialog and show the details of the given exception instance.", "details = {'msg':msg, 'type':inst.__class__.__name__} if inst.filename: details['filename'] = _('Filename: %s')", "information and show it as dialog content. msg: Short description", "LICENSE.txt) - THIS PROGRAM COMES WITH NO WARRANTY \"\"\" import", "given exception details. exc_type: Exception type exc_value: Exception value exc_tb:", "caught exception\"\"\" details = {'msg':msg, 'type':inst.__class__.__name__} if inst.filename: details['filename'] =", "exception see ShowEI(), BugReport.SetContent()\"\"\" dialog = BugReport() dialog.SetContent(msg, exc) dialog.ShowModal()", "exc) dialog.ShowModal() dialog.Destroy() def ShowEI(exc_type, exc_value, exc_tb, msg=None): \"\"\"Wrapper for", "config import log import logging import sys import wx class", "\"\"\"Format given exception and add details to dialog. exc_type: Exception", "inst.strerror is not None: details['error'] = '%s - %s' %", "% (inst.errno, inst.strerror) else: details['error'] = str(inst.args) text = _(\"\"\"%(msg)s", "self.tc_details.GetValue() if not text: return data = wx.TextDataObject(text) if wx.TheClipboard.Open():", "def SetContent(self, msg, exc): \"\"\"Prepare given exception information and show", "exc_type, header): \"\"\"Fill the bug dialog exc_msg: Short exception summary", "\"Dialog to show details of internal errors\" _disabled = False", "PROGRAM COMES WITH NO WARRANTY \"\"\" import bugdialog_ui import config", "creating a L{BugReport} dialog and show the details of the", "header): \"\"\"Fill the bug dialog exc_msg: Short exception summary exc_type:", "type as string header: Initial message see: L{SetContent(), SetContentEI()\"\"\" details", "exc_msg: exc_msg = _('No summary available') summary = self.st_summary.GetLabel() %", "self._fill_dialog(msg, exc_type, _('An internal error occurred')) def _fill_dialog(self, exc_msg, exc_type,", "header) def SetContentEI(self, exc_type, exc_value, exc_tb, msg=_('An internal error occurred')):", "inst: The caught exception\"\"\" details = {'msg':msg, 'type':inst.__class__.__name__} if inst.filename:", "user-friendly msg: Error message inst: The caught exception\"\"\" details =", "\"\"\"Wrapper for creating a L{BugReport} dialog and show the details", "errors\" _disabled = False # Flag to prevent dialog popups", "runs. def __init__(self): self._disabled = getattr(sys, '_called_from_test', False) bugdialog_ui.UIBugDialog.__init__(self, None,", "exc_type, exc_value, exc_tb, msg=_('An internal error occurred')): \"\"\"Format given exception", "self.tc_howto_report.GetValue() howto = howto % {'log_file': config.log_file} self.tc_howto_report.SetValue(howto) def OnCopy(self,", "return wx.ID_OK super(BugReport, self).ShowModal(**kwargs) def Show(msg, exc): \"\"\"Wrapper for creating", "details of internal errors. @copyright: 2014-2016 <NAME> @copyright: 2017 <NAME>", "__init__(self): self._disabled = getattr(sys, '_called_from_test', False) bugdialog_ui.UIBugDialog.__init__(self, None, -1, \"\")", "False) bugdialog_ui.UIBugDialog.__init__(self, None, -1, \"\") def SetContent(self, msg, exc): \"\"\"Prepare", "raised this error exc: Caught exception see ShowEI(), BugReport.SetContent()\"\"\" dialog", "THIS PROGRAM COMES WITH NO WARRANTY \"\"\" import bugdialog_ui import", "COMES WITH NO WARRANTY \"\"\" import bugdialog_ui import config import", "= str(exc) header = self.st_header.GetLabel() % {'action': msg} log.exception_orig(header) self._fill_dialog(exc_msg,", "def OnCopy(self, event): \"Copy the dialog content to the clipboard\"", "class BugReport(bugdialog_ui.UIBugDialog): \"Dialog to show details of internal errors\" _disabled", "{'msg':msg, 'type':inst.__class__.__name__} if inst.filename: details['filename'] = _('Filename: %s') % inst.filename", "L{SetContent(), SetContentEI()\"\"\" details = log.getBufferAsString() if not exc_msg: exc_msg =", "dialog and show the given exception details. exc_type: Exception type", "'exc_type':exc_type, 'exc_msg':exc_msg } self.st_header.SetLabel(header) self.st_summary.SetLabel(summary) self.tc_details.SetValue(details) howto = self.tc_howto_report.GetValue() howto", "has raised this error exc: Caught exception (Exception instance) see:", "= exc.__class__.__name__ exc_msg = str(exc) header = self.st_header.GetLabel() % {'action':", "the given exception instance. msg: Short description of the action", "action that has raised this error exc: Caught exception (Exception", "the details of the given exception instance. msg: Short description", "= '%s - %s' % (inst.errno, inst.strerror) else: details['error'] =", "if wx.TheClipboard.Open(): wx.TheClipboard.SetData(data) wx.TheClipboard.Close() else: wx.MessageBox(\"Unable to open the clipboard\",", "is not None and inst.strerror is not None: details['error'] =", "\"Error\") def ShowModal(self, **kwargs): if getattr(sys, '_called_from_test', False): return wx.ID_OK", "self._fill_dialog(exc_msg, exc_type, header) def SetContentEI(self, exc_type, exc_value, exc_tb, msg=_('An internal", "during test runs. def __init__(self): self._disabled = getattr(sys, '_called_from_test', False)", "howto = howto % {'log_file': config.log_file} self.tc_howto_report.SetValue(howto) def OnCopy(self, event):", "details. exc_type: Exception type exc_value: Exception value exc_tb: Exception traceback", "summary available') summary = self.st_summary.GetLabel() % { 'exc_type':exc_type, 'exc_msg':exc_msg }", "because it overwrites exc_info with 1 logging.error(msg, exc_info=(exc_type, exc_value, exc_tb))", "if inst.errno is not None and inst.strerror is not None:", "self.st_header.GetLabel() % {'action': msg} log.exception_orig(header) self._fill_dialog(exc_msg, exc_type, header) def SetContentEI(self,", "= howto % {'log_file': config.log_file} self.tc_howto_report.SetValue(howto) def OnCopy(self, event): \"Copy", "WITH NO WARRANTY \"\"\" import bugdialog_ui import config import log", "{'log_file': config.log_file} self.tc_howto_report.SetValue(howto) def OnCopy(self, event): \"Copy the dialog content", "for creating a L{BugReport} dialog and show the details of", "exc): \"\"\"Wrapper for creating a L{BugReport} dialog and show the", "Exception traceback msg: Short description of the exception see: SetContent()\"\"\"", "= BugReport() dialog.SetContent(msg, exc) dialog.ShowModal() dialog.Destroy() def ShowEI(exc_type, exc_value, exc_tb,", "open the clipboard\", \"Error\") def ShowModal(self, **kwargs): if getattr(sys, '_called_from_test',", "L{BugReport} dialog and show the given exception details. exc_type: Exception", "exc_type, _('An internal error occurred')) def _fill_dialog(self, exc_msg, exc_type, header):", "type exc_value: Exception value exc_tb: Exception traceback msg: Short description", "text = self.tc_details.GetValue() if not text: return data = wx.TextDataObject(text)", "and user-friendly msg: Error message inst: The caught exception\"\"\" details", "Short exception summary exc_type: Exception type as string header: Initial", "creating a L{BugReport} dialog and show the given exception details.", "dialog exc_msg: Short exception summary exc_type: Exception type as string", "wx.ID_OK super(BugReport, self).ShowModal(**kwargs) def Show(msg, exc): \"\"\"Wrapper for creating a", "see: SetContent()\"\"\" if self._disabled: return # don't use exception() because", "error occurred')): \"\"\"Format given exception and add details to dialog.", "exception see: L{Show(), BugReport.SetContent()\"\"\" dialog = BugReport() dialog.SetContentEI(exc_type, exc_value, exc_tb,", "NO WARRANTY \"\"\" import bugdialog_ui import config import log import", "The caught exception\"\"\" details = {'msg':msg, 'type':inst.__class__.__name__} if inst.filename: details['filename']", "Short description of the exception see: SetContent()\"\"\" if self._disabled: return", "exc_tb: Exception traceback msg: Short description of the exception see:", "config.log_file} self.tc_howto_report.SetValue(howto) def OnCopy(self, event): \"Copy the dialog content to", "% {'action': msg} log.exception_orig(header) self._fill_dialog(exc_msg, exc_type, header) def SetContentEI(self, exc_type,", "inst.errno is not None and inst.strerror is not None: details['error']", "self.st_summary.GetLabel() % { 'exc_type':exc_type, 'exc_msg':exc_msg } self.st_header.SetLabel(header) self.st_summary.SetLabel(summary) self.tc_details.SetValue(details) howto", "exc_type: Exception type as string header: Initial message see: L{SetContent(),", "return # don't use exception() because it overwrites exc_info with", "to dialog. exc_type: Exception type exc_value: Exception value exc_tb: Exception", "'type':inst.__class__.__name__} if inst.filename: details['filename'] = _('Filename: %s') % inst.filename if", "exception details. exc_type: Exception type exc_value: Exception value exc_tb: Exception", "log.getBufferAsString() if not exc_msg: exc_msg = _('No summary available') summary", "2017 <NAME> @license: MIT (see LICENSE.txt) - THIS PROGRAM COMES", "= self.tc_howto_report.GetValue() howto = howto % {'log_file': config.log_file} self.tc_howto_report.SetValue(howto) def", "Dialog to show details of internal errors. @copyright: 2014-2016 <NAME>", "of the exception see: SetContent()\"\"\" if self._disabled: return # don't", "exc_msg, exc_type, header): \"\"\"Fill the bug dialog exc_msg: Short exception", "\"\"\"Prepare given exception information and show it as dialog content.", "wx.TheClipboard.Close() else: wx.MessageBox(\"Unable to open the clipboard\", \"Error\") def ShowModal(self,", "Exception value exc_tb: Exception traceback msg: Short description of the", "@copyright: 2017 <NAME> @license: MIT (see LICENSE.txt) - THIS PROGRAM", "this error exc: Caught exception see ShowEI(), BugReport.SetContent()\"\"\" dialog =", "description of the action that has raised this error exc:", "exception see: SetContent()\"\"\" if self._disabled: return # don't use exception()", "exc_info with 1 logging.error(msg, exc_info=(exc_type, exc_value, exc_tb)) self._fill_dialog(msg, exc_type, _('An", "instance. msg: Short description of the action that has raised", "the exception see: SetContent()\"\"\" if self._disabled: return # don't use", "exceptions detailed and user-friendly msg: Error message inst: The caught", "exc_msg = str(exc) header = self.st_header.GetLabel() % {'action': msg} log.exception_orig(header)", "clipboard\", \"Error\") def ShowModal(self, **kwargs): if getattr(sys, '_called_from_test', False): return", "dialog popups during test runs. def __init__(self): self._disabled = getattr(sys,", "internal errors. @copyright: 2014-2016 <NAME> @copyright: 2017 <NAME> @license: MIT", "dialog = BugReport() dialog.SetContentEI(exc_type, exc_value, exc_tb, msg) dialog.ShowModal() dialog.Destroy() def", "a L{BugReport} dialog and show the details of the given", "L{BugReport} dialog and show the details of the given exception", "= self.st_header.GetLabel() % {'action': msg} log.exception_orig(header) self._fill_dialog(exc_msg, exc_type, header) def", "# don't use exception() because it overwrites exc_info with 1", "Short description of the exception see: L{Show(), BugReport.SetContent()\"\"\" dialog =", "\"Copy the dialog content to the clipboard\" text = self.tc_details.GetValue()", "code: %(error)s %(filename)s\"\"\") % details wx.MessageBox(text, _('Error'), wx.OK | wx.CENTRE", "None and inst.strerror is not None: details['error'] = '%s -", "<NAME> @copyright: 2017 <NAME> @license: MIT (see LICENSE.txt) - THIS", "dialog.SetContentEI(exc_type, exc_value, exc_tb, msg) dialog.ShowModal() dialog.Destroy() def ShowEnvironmentError(msg, inst): \"\"\"Show", "wx.TheClipboard.SetData(data) wx.TheClipboard.Close() else: wx.MessageBox(\"Unable to open the clipboard\", \"Error\") def", "msg) dialog.ShowModal() dialog.Destroy() def ShowEnvironmentError(msg, inst): \"\"\"Show EnvironmentError exceptions detailed", "of internal errors\" _disabled = False # Flag to prevent", "dialog.ShowModal() dialog.Destroy() def ShowEI(exc_type, exc_value, exc_tb, msg=None): \"\"\"Wrapper for creating", "error exc: Caught exception (Exception instance) see: SetContentEI()\"\"\" if self._disabled:", "description of the exception see: SetContent()\"\"\" if self._disabled: return #", "ShowModal(self, **kwargs): if getattr(sys, '_called_from_test', False): return wx.ID_OK super(BugReport, self).ShowModal(**kwargs)", "the bug dialog exc_msg: Short exception summary exc_type: Exception type", "\"\") def SetContent(self, msg, exc): \"\"\"Prepare given exception information and", "action that has raised this error exc: Caught exception see", "traceback msg: Short description of the exception see: SetContent()\"\"\" if", "{ 'exc_type':exc_type, 'exc_msg':exc_msg } self.st_header.SetLabel(header) self.st_summary.SetLabel(summary) self.tc_details.SetValue(details) howto = self.tc_howto_report.GetValue()", "return data = wx.TextDataObject(text) if wx.TheClipboard.Open(): wx.TheClipboard.SetData(data) wx.TheClipboard.Close() else: wx.MessageBox(\"Unable", "%(error)s %(filename)s\"\"\") % details wx.MessageBox(text, _('Error'), wx.OK | wx.CENTRE |", "show details of internal errors. @copyright: 2014-2016 <NAME> @copyright: 2017", "1 logging.error(msg, exc_info=(exc_type, exc_value, exc_tb)) self._fill_dialog(msg, exc_type, _('An internal error", "SetContentEI()\"\"\" details = log.getBufferAsString() if not exc_msg: exc_msg = _('No", "that has raised this error exc: Caught exception (Exception instance)", "error exc: Caught exception see ShowEI(), BugReport.SetContent()\"\"\" dialog = BugReport()", "if not text: return data = wx.TextDataObject(text) if wx.TheClipboard.Open(): wx.TheClipboard.SetData(data)", "add details to dialog. exc_type: Exception type exc_value: Exception value", "string header: Initial message see: L{SetContent(), SetContentEI()\"\"\" details = log.getBufferAsString()", "= self.tc_details.GetValue() if not text: return data = wx.TextDataObject(text) if", "= wx.TextDataObject(text) if wx.TheClipboard.Open(): wx.TheClipboard.SetData(data) wx.TheClipboard.Close() else: wx.MessageBox(\"Unable to open", "show details of internal errors\" _disabled = False # Flag", "data = wx.TextDataObject(text) if wx.TheClipboard.Open(): wx.TheClipboard.SetData(data) wx.TheClipboard.Close() else: wx.MessageBox(\"Unable to", "wx class BugReport(bugdialog_ui.UIBugDialog): \"Dialog to show details of internal errors\"", "OnCopy(self, event): \"Copy the dialog content to the clipboard\" text", "msg} log.exception_orig(header) self._fill_dialog(exc_msg, exc_type, header) def SetContentEI(self, exc_type, exc_value, exc_tb,", "- THIS PROGRAM COMES WITH NO WARRANTY \"\"\" import bugdialog_ui", "exc_type: Exception type exc_value: Exception value exc_tb: Exception traceback msg:", "logging import sys import wx class BugReport(bugdialog_ui.UIBugDialog): \"Dialog to show", "with 1 logging.error(msg, exc_info=(exc_type, exc_value, exc_tb)) self._fill_dialog(msg, exc_type, _('An internal", "and show the given exception details. exc_type: Exception type exc_value:", "%s' % (inst.errno, inst.strerror) else: details['error'] = str(inst.args) text =", "clipboard\" text = self.tc_details.GetValue() if not text: return data =", "wx.TextDataObject(text) if wx.TheClipboard.Open(): wx.TheClipboard.SetData(data) wx.TheClipboard.Close() else: wx.MessageBox(\"Unable to open the", "given exception instance. msg: Short description of the action that", "as string header: Initial message see: L{SetContent(), SetContentEI()\"\"\" details =", "= _('Filename: %s') % inst.filename if inst.errno is not None", "occurred')): \"\"\"Format given exception and add details to dialog. exc_type:", "(Exception instance) see: SetContentEI()\"\"\" if self._disabled: return exc_type = exc.__class__.__name__", "dialog.ShowModal() dialog.Destroy() def ShowEnvironmentError(msg, inst): \"\"\"Show EnvironmentError exceptions detailed and", "def ShowModal(self, **kwargs): if getattr(sys, '_called_from_test', False): return wx.ID_OK super(BugReport,", "instance) see: SetContentEI()\"\"\" if self._disabled: return exc_type = exc.__class__.__name__ exc_msg", "msg: Short description of the exception see: SetContent()\"\"\" if self._disabled:", "show the given exception details. exc_type: Exception type exc_value: Exception", "not None and inst.strerror is not None: details['error'] = '%s", "exc_value, exc_tb, msg=None): \"\"\"Wrapper for creating a L{BugReport} dialog and", "content. msg: Short description of the action that has raised", "exc_value, exc_tb, msg=_('An internal error occurred')): \"\"\"Format given exception and", "exc): \"\"\"Prepare given exception information and show it as dialog", "msg, exc): \"\"\"Prepare given exception information and show it as", "if not exc_msg: exc_msg = _('No summary available') summary =", "'exc_msg':exc_msg } self.st_header.SetLabel(header) self.st_summary.SetLabel(summary) self.tc_details.SetValue(details) howto = self.tc_howto_report.GetValue() howto =", "%(filename)s\"\"\") % details wx.MessageBox(text, _('Error'), wx.OK | wx.CENTRE | wx.ICON_ERROR)", "exc_msg = _('No summary available') summary = self.st_summary.GetLabel() % {", "wx.MessageBox(\"Unable to open the clipboard\", \"Error\") def ShowModal(self, **kwargs): if", "None: details['error'] = '%s - %s' % (inst.errno, inst.strerror) else:", "Error type: %(type)s Error code: %(error)s %(filename)s\"\"\") % details wx.MessageBox(text,", "def SetContentEI(self, exc_type, exc_value, exc_tb, msg=_('An internal error occurred')): \"\"\"Format", "it overwrites exc_info with 1 logging.error(msg, exc_info=(exc_type, exc_value, exc_tb)) self._fill_dialog(msg,", "exc_tb, msg) dialog.ShowModal() dialog.Destroy() def ShowEnvironmentError(msg, inst): \"\"\"Show EnvironmentError exceptions", "ShowEnvironmentError(msg, inst): \"\"\"Show EnvironmentError exceptions detailed and user-friendly msg: Error", "of internal errors. @copyright: 2014-2016 <NAME> @copyright: 2017 <NAME> @license:", "else: details['error'] = str(inst.args) text = _(\"\"\"%(msg)s Error type: %(type)s", "False # Flag to prevent dialog popups during test runs.", "self.st_summary.SetLabel(summary) self.tc_details.SetValue(details) howto = self.tc_howto_report.GetValue() howto = howto % {'log_file':", "popups during test runs. def __init__(self): self._disabled = getattr(sys, '_called_from_test',", "exception\"\"\" details = {'msg':msg, 'type':inst.__class__.__name__} if inst.filename: details['filename'] = _('Filename:", "exception (Exception instance) see: SetContentEI()\"\"\" if self._disabled: return exc_type =", "def Show(msg, exc): \"\"\"Wrapper for creating a L{BugReport} dialog and", "import sys import wx class BugReport(bugdialog_ui.UIBugDialog): \"Dialog to show details", "description of the exception see: L{Show(), BugReport.SetContent()\"\"\" dialog = BugReport()", "internal error occurred')) def _fill_dialog(self, exc_msg, exc_type, header): \"\"\"Fill the", "msg=_('An internal error occurred')): \"\"\"Format given exception and add details", "super(BugReport, self).ShowModal(**kwargs) def Show(msg, exc): \"\"\"Wrapper for creating a L{BugReport}", "<reponame>davidbrownell/Common_EnvironmentEx \"\"\"\\ Dialog to show details of internal errors. @copyright:", "occurred')) def _fill_dialog(self, exc_msg, exc_type, header): \"\"\"Fill the bug dialog", "and show it as dialog content. msg: Short description of", "is not None: details['error'] = '%s - %s' % (inst.errno,", "of the given exception instance. msg: Short description of the", "msg: Short description of the action that has raised this", "details of the given exception instance. msg: Short description of", "exc_msg: Short exception summary exc_type: Exception type as string header:", "exception() because it overwrites exc_info with 1 logging.error(msg, exc_info=(exc_type, exc_value,", "exc: Caught exception see ShowEI(), BugReport.SetContent()\"\"\" dialog = BugReport() dialog.SetContent(msg,", "detailed and user-friendly msg: Error message inst: The caught exception\"\"\"", "self._disabled = getattr(sys, '_called_from_test', False) bugdialog_ui.UIBugDialog.__init__(self, None, -1, \"\") def", "the clipboard\", \"Error\") def ShowModal(self, **kwargs): if getattr(sys, '_called_from_test', False):", "Exception type exc_value: Exception value exc_tb: Exception traceback msg: Short", "inst.filename if inst.errno is not None and inst.strerror is not", "SetContentEI(self, exc_type, exc_value, exc_tb, msg=_('An internal error occurred')): \"\"\"Format given", "import logging import sys import wx class BugReport(bugdialog_ui.UIBugDialog): \"Dialog to", "= BugReport() dialog.SetContentEI(exc_type, exc_value, exc_tb, msg) dialog.ShowModal() dialog.Destroy() def ShowEnvironmentError(msg,", "bug dialog exc_msg: Short exception summary exc_type: Exception type as", "Show(msg, exc): \"\"\"Wrapper for creating a L{BugReport} dialog and show", "see: SetContentEI()\"\"\" if self._disabled: return exc_type = exc.__class__.__name__ exc_msg =", "(inst.errno, inst.strerror) else: details['error'] = str(inst.args) text = _(\"\"\"%(msg)s Error", "L{Show(), BugReport.SetContent()\"\"\" dialog = BugReport() dialog.SetContentEI(exc_type, exc_value, exc_tb, msg) dialog.ShowModal()", "overwrites exc_info with 1 logging.error(msg, exc_info=(exc_type, exc_value, exc_tb)) self._fill_dialog(msg, exc_type,", "sys import wx class BugReport(bugdialog_ui.UIBugDialog): \"Dialog to show details of", "SetContent()\"\"\" if self._disabled: return # don't use exception() because it", "= str(inst.args) text = _(\"\"\"%(msg)s Error type: %(type)s Error code:", "as dialog content. msg: Short description of the action that", "if getattr(sys, '_called_from_test', False): return wx.ID_OK super(BugReport, self).ShowModal(**kwargs) def Show(msg,", "WARRANTY \"\"\" import bugdialog_ui import config import log import logging", "def ShowEI(exc_type, exc_value, exc_tb, msg=None): \"\"\"Wrapper for creating a L{BugReport}", "%(type)s Error code: %(error)s %(filename)s\"\"\") % details wx.MessageBox(text, _('Error'), wx.OK", "@license: MIT (see LICENSE.txt) - THIS PROGRAM COMES WITH NO", "BugReport.SetContent()\"\"\" dialog = BugReport() dialog.SetContent(msg, exc) dialog.ShowModal() dialog.Destroy() def ShowEI(exc_type,", "howto % {'log_file': config.log_file} self.tc_howto_report.SetValue(howto) def OnCopy(self, event): \"Copy the", "} self.st_header.SetLabel(header) self.st_summary.SetLabel(summary) self.tc_details.SetValue(details) howto = self.tc_howto_report.GetValue() howto = howto", "not exc_msg: exc_msg = _('No summary available') summary = self.st_summary.GetLabel()", "import bugdialog_ui import config import log import logging import sys", "_disabled = False # Flag to prevent dialog popups during", "exc.__class__.__name__ exc_msg = str(exc) header = self.st_header.GetLabel() % {'action': msg}", "inst.strerror) else: details['error'] = str(inst.args) text = _(\"\"\"%(msg)s Error type:", "exc_info=(exc_type, exc_value, exc_tb)) self._fill_dialog(msg, exc_type, _('An internal error occurred')) def", "to the clipboard\" text = self.tc_details.GetValue() if not text: return", "error occurred')) def _fill_dialog(self, exc_msg, exc_type, header): \"\"\"Fill the bug", "= _('No summary available') summary = self.st_summary.GetLabel() % { 'exc_type':exc_type,", "and inst.strerror is not None: details['error'] = '%s - %s'", "exc_value, exc_tb, msg) dialog.ShowModal() dialog.Destroy() def ShowEnvironmentError(msg, inst): \"\"\"Show EnvironmentError", "errors. @copyright: 2014-2016 <NAME> @copyright: 2017 <NAME> @license: MIT (see", "log.exception_orig(header) self._fill_dialog(exc_msg, exc_type, header) def SetContentEI(self, exc_type, exc_value, exc_tb, msg=_('An", "'_called_from_test', False): return wx.ID_OK super(BugReport, self).ShowModal(**kwargs) def Show(msg, exc): \"\"\"Wrapper", "not None: details['error'] = '%s - %s' % (inst.errno, inst.strerror)", "\"\"\"Show EnvironmentError exceptions detailed and user-friendly msg: Error message inst:", "type: %(type)s Error code: %(error)s %(filename)s\"\"\") % details wx.MessageBox(text, _('Error'),", "ShowEI(), BugReport.SetContent()\"\"\" dialog = BugReport() dialog.SetContent(msg, exc) dialog.ShowModal() dialog.Destroy() def", "to prevent dialog popups during test runs. def __init__(self): self._disabled", "False): return wx.ID_OK super(BugReport, self).ShowModal(**kwargs) def Show(msg, exc): \"\"\"Wrapper for", "details to dialog. exc_type: Exception type exc_value: Exception value exc_tb:", "prevent dialog popups during test runs. def __init__(self): self._disabled =", "= {'msg':msg, 'type':inst.__class__.__name__} if inst.filename: details['filename'] = _('Filename: %s') %", "howto = self.tc_howto_report.GetValue() howto = howto % {'log_file': config.log_file} self.tc_howto_report.SetValue(howto)", "%s') % inst.filename if inst.errno is not None and inst.strerror", "show it as dialog content. msg: Short description of the", "Flag to prevent dialog popups during test runs. def __init__(self):", "= self.st_summary.GetLabel() % { 'exc_type':exc_type, 'exc_msg':exc_msg } self.st_header.SetLabel(header) self.st_summary.SetLabel(summary) self.tc_details.SetValue(details)", "Initial message see: L{SetContent(), SetContentEI()\"\"\" details = log.getBufferAsString() if not", "this error exc: Caught exception (Exception instance) see: SetContentEI()\"\"\" if", "SetContent(self, msg, exc): \"\"\"Prepare given exception information and show it", "msg: Error message inst: The caught exception\"\"\" details = {'msg':msg,", "\"\"\"Fill the bug dialog exc_msg: Short exception summary exc_type: Exception", "available') summary = self.st_summary.GetLabel() % { 'exc_type':exc_type, 'exc_msg':exc_msg } self.st_header.SetLabel(header)", "see: L{Show(), BugReport.SetContent()\"\"\" dialog = BugReport() dialog.SetContentEI(exc_type, exc_value, exc_tb, msg)", "exception summary exc_type: Exception type as string header: Initial message", "Exception type as string header: Initial message see: L{SetContent(), SetContentEI()\"\"\"", "message inst: The caught exception\"\"\" details = {'msg':msg, 'type':inst.__class__.__name__} if", "{'action': msg} log.exception_orig(header) self._fill_dialog(exc_msg, exc_type, header) def SetContentEI(self, exc_type, exc_value,", "given exception information and show it as dialog content. msg:", "if self._disabled: return # don't use exception() because it overwrites", "the clipboard\" text = self.tc_details.GetValue() if not text: return data", "for creating a L{BugReport} dialog and show the given exception", "Error message inst: The caught exception\"\"\" details = {'msg':msg, 'type':inst.__class__.__name__}", "Caught exception see ShowEI(), BugReport.SetContent()\"\"\" dialog = BugReport() dialog.SetContent(msg, exc)", "details = log.getBufferAsString() if not exc_msg: exc_msg = _('No summary", "dialog.Destroy() def ShowEnvironmentError(msg, inst): \"\"\"Show EnvironmentError exceptions detailed and user-friendly", "dialog.Destroy() def ShowEI(exc_type, exc_value, exc_tb, msg=None): \"\"\"Wrapper for creating a", "see ShowEI(), BugReport.SetContent()\"\"\" dialog = BugReport() dialog.SetContent(msg, exc) dialog.ShowModal() dialog.Destroy()", "import log import logging import sys import wx class BugReport(bugdialog_ui.UIBugDialog):", "bugdialog_ui.UIBugDialog.__init__(self, None, -1, \"\") def SetContent(self, msg, exc): \"\"\"Prepare given", "<NAME> @license: MIT (see LICENSE.txt) - THIS PROGRAM COMES WITH", "logging.error(msg, exc_info=(exc_type, exc_value, exc_tb)) self._fill_dialog(msg, exc_type, _('An internal error occurred'))", "SetContentEI()\"\"\" if self._disabled: return exc_type = exc.__class__.__name__ exc_msg = str(exc)", "def __init__(self): self._disabled = getattr(sys, '_called_from_test', False) bugdialog_ui.UIBugDialog.__init__(self, None, -1,", "% { 'exc_type':exc_type, 'exc_msg':exc_msg } self.st_header.SetLabel(header) self.st_summary.SetLabel(summary) self.tc_details.SetValue(details) howto =", "the exception see: L{Show(), BugReport.SetContent()\"\"\" dialog = BugReport() dialog.SetContentEI(exc_type, exc_value,", "details['filename'] = _('Filename: %s') % inst.filename if inst.errno is not", "traceback msg: Short description of the exception see: L{Show(), BugReport.SetContent()\"\"\"", "(see LICENSE.txt) - THIS PROGRAM COMES WITH NO WARRANTY \"\"\"", "summary exc_type: Exception type as string header: Initial message see:", "not text: return data = wx.TextDataObject(text) if wx.TheClipboard.Open(): wx.TheClipboard.SetData(data) wx.TheClipboard.Close()", "import wx class BugReport(bugdialog_ui.UIBugDialog): \"Dialog to show details of internal", "message see: L{SetContent(), SetContentEI()\"\"\" details = log.getBufferAsString() if not exc_msg:", "the given exception details. exc_type: Exception type exc_value: Exception value", "else: wx.MessageBox(\"Unable to open the clipboard\", \"Error\") def ShowModal(self, **kwargs):", "None, -1, \"\") def SetContent(self, msg, exc): \"\"\"Prepare given exception", "dialog. exc_type: Exception type exc_value: Exception value exc_tb: Exception traceback", "use exception() because it overwrites exc_info with 1 logging.error(msg, exc_info=(exc_type,", "% inst.filename if inst.errno is not None and inst.strerror is", "Short description of the action that has raised this error", "exception and add details to dialog. exc_type: Exception type exc_value:", "BugReport.SetContent()\"\"\" dialog = BugReport() dialog.SetContentEI(exc_type, exc_value, exc_tb, msg) dialog.ShowModal() dialog.Destroy()", "Caught exception (Exception instance) see: SetContentEI()\"\"\" if self._disabled: return exc_type", "dialog.SetContent(msg, exc) dialog.ShowModal() dialog.Destroy() def ShowEI(exc_type, exc_value, exc_tb, msg=None): \"\"\"Wrapper", "= getattr(sys, '_called_from_test', False) bugdialog_ui.UIBugDialog.__init__(self, None, -1, \"\") def SetContent(self,", "_('An internal error occurred')) def _fill_dialog(self, exc_msg, exc_type, header): \"\"\"Fill", "**kwargs): if getattr(sys, '_called_from_test', False): return wx.ID_OK super(BugReport, self).ShowModal(**kwargs) def", "'%s - %s' % (inst.errno, inst.strerror) else: details['error'] = str(inst.args)", "text = _(\"\"\"%(msg)s Error type: %(type)s Error code: %(error)s %(filename)s\"\"\")", "has raised this error exc: Caught exception see ShowEI(), BugReport.SetContent()\"\"\"", "see: L{SetContent(), SetContentEI()\"\"\" details = log.getBufferAsString() if not exc_msg: exc_msg", "header = self.st_header.GetLabel() % {'action': msg} log.exception_orig(header) self._fill_dialog(exc_msg, exc_type, header)", "given exception and add details to dialog. exc_type: Exception type", "and add details to dialog. exc_type: Exception type exc_value: Exception", "of the action that has raised this error exc: Caught", "self.tc_details.SetValue(details) howto = self.tc_howto_report.GetValue() howto = howto % {'log_file': config.log_file}", "header: Initial message see: L{SetContent(), SetContentEI()\"\"\" details = log.getBufferAsString() if", "Exception traceback msg: Short description of the exception see: L{Show(),", "msg=None): \"\"\"Wrapper for creating a L{BugReport} dialog and show the", "exc_type, header) def SetContentEI(self, exc_type, exc_value, exc_tb, msg=_('An internal error", "dialog = BugReport() dialog.SetContent(msg, exc) dialog.ShowModal() dialog.Destroy() def ShowEI(exc_type, exc_value,", "event): \"Copy the dialog content to the clipboard\" text =", "MIT (see LICENSE.txt) - THIS PROGRAM COMES WITH NO WARRANTY", "_(\"\"\"%(msg)s Error type: %(type)s Error code: %(error)s %(filename)s\"\"\") % details", "inst): \"\"\"Show EnvironmentError exceptions detailed and user-friendly msg: Error message", "test runs. def __init__(self): self._disabled = getattr(sys, '_called_from_test', False) bugdialog_ui.UIBugDialog.__init__(self,", "internal errors\" _disabled = False # Flag to prevent dialog", "self._disabled: return exc_type = exc.__class__.__name__ exc_msg = str(exc) header =", "show the details of the given exception instance. msg: Short", "raised this error exc: Caught exception (Exception instance) see: SetContentEI()\"\"\"", "details['error'] = '%s - %s' % (inst.errno, inst.strerror) else: details['error']", "2014-2016 <NAME> @copyright: 2017 <NAME> @license: MIT (see LICENSE.txt) -", "bugdialog_ui import config import log import logging import sys import", "it as dialog content. msg: Short description of the action", "self).ShowModal(**kwargs) def Show(msg, exc): \"\"\"Wrapper for creating a L{BugReport} dialog", "-1, \"\") def SetContent(self, msg, exc): \"\"\"Prepare given exception information", "summary = self.st_summary.GetLabel() % { 'exc_type':exc_type, 'exc_msg':exc_msg } self.st_header.SetLabel(header) self.st_summary.SetLabel(summary)", "inst.filename: details['filename'] = _('Filename: %s') % inst.filename if inst.errno is", "- %s' % (inst.errno, inst.strerror) else: details['error'] = str(inst.args) text", "getattr(sys, '_called_from_test', False) bugdialog_ui.UIBugDialog.__init__(self, None, -1, \"\") def SetContent(self, msg,", "Error code: %(error)s %(filename)s\"\"\") % details wx.MessageBox(text, _('Error'), wx.OK |", "exc_tb, msg=None): \"\"\"Wrapper for creating a L{BugReport} dialog and show", "exc: Caught exception (Exception instance) see: SetContentEI()\"\"\" if self._disabled: return", "of the exception see: L{Show(), BugReport.SetContent()\"\"\" dialog = BugReport() dialog.SetContentEI(exc_type,", "ShowEI(exc_type, exc_value, exc_tb, msg=None): \"\"\"Wrapper for creating a L{BugReport} dialog", "self._disabled: return # don't use exception() because it overwrites exc_info", "_('Filename: %s') % inst.filename if inst.errno is not None and", "def _fill_dialog(self, exc_msg, exc_type, header): \"\"\"Fill the bug dialog exc_msg:", "'_called_from_test', False) bugdialog_ui.UIBugDialog.__init__(self, None, -1, \"\") def SetContent(self, msg, exc):", "= _(\"\"\"%(msg)s Error type: %(type)s Error code: %(error)s %(filename)s\"\"\") %", "wx.TheClipboard.Open(): wx.TheClipboard.SetData(data) wx.TheClipboard.Close() else: wx.MessageBox(\"Unable to open the clipboard\", \"Error\")", "_fill_dialog(self, exc_msg, exc_type, header): \"\"\"Fill the bug dialog exc_msg: Short", "@copyright: 2014-2016 <NAME> @copyright: 2017 <NAME> @license: MIT (see LICENSE.txt)", "\"\"\" import bugdialog_ui import config import log import logging import", "self.tc_howto_report.SetValue(howto) def OnCopy(self, event): \"Copy the dialog content to the", "BugReport() dialog.SetContent(msg, exc) dialog.ShowModal() dialog.Destroy() def ShowEI(exc_type, exc_value, exc_tb, msg=None):", "to show details of internal errors\" _disabled = False #", "that has raised this error exc: Caught exception see ShowEI(),", "exception instance. msg: Short description of the action that has", "_('No summary available') summary = self.st_summary.GetLabel() % { 'exc_type':exc_type, 'exc_msg':exc_msg", "text: return data = wx.TextDataObject(text) if wx.TheClipboard.Open(): wx.TheClipboard.SetData(data) wx.TheClipboard.Close() else:", "if self._disabled: return exc_type = exc.__class__.__name__ exc_msg = str(exc) header", "exc_tb, msg=_('An internal error occurred')): \"\"\"Format given exception and add", "def ShowEnvironmentError(msg, inst): \"\"\"Show EnvironmentError exceptions detailed and user-friendly msg:", "% {'log_file': config.log_file} self.tc_howto_report.SetValue(howto) def OnCopy(self, event): \"Copy the dialog", "exception information and show it as dialog content. msg: Short", "to open the clipboard\", \"Error\") def ShowModal(self, **kwargs): if getattr(sys,", "msg: Short description of the exception see: L{Show(), BugReport.SetContent()\"\"\" dialog", "str(exc) header = self.st_header.GetLabel() % {'action': msg} log.exception_orig(header) self._fill_dialog(exc_msg, exc_type,", "the dialog content to the clipboard\" text = self.tc_details.GetValue() if", "EnvironmentError exceptions detailed and user-friendly msg: Error message inst: The", "= False # Flag to prevent dialog popups during test", "to show details of internal errors. @copyright: 2014-2016 <NAME> @copyright:", "if inst.filename: details['filename'] = _('Filename: %s') % inst.filename if inst.errno", "log import logging import sys import wx class BugReport(bugdialog_ui.UIBugDialog): \"Dialog" ]
[ "email_from, recipient_list ) mail.send() c=1 m=\"your password is changed succesfully\"", "= 'QRcode scanner for license Forget password' message = \"Password", "document_last=Document.objects.values_list('document')[document_count-1] document_name=document_last[0] print(email) t=Document.objects.last() num_list=['0','1','2','3','4','5','6','7','8','9'] password1=\"\" for i in range(0,8):", "3) id =obj.data.decode(\"utf-8\") cv2.imshow(\"QR Reader\", frame) key = cv2.waitKey(10) &", "msg=\"\" if(request.method==\"POST\"): num_list=['0','1','2','3','4','5','6','7','8','9'] password1=\"\" for i in range(0,8): password1=<PASSWORD>(num_list) user_data=Document.objects.filter(Email=request.POST.get(\"email\")).update(password=<PASSWORD>)", "to your email\" elif(request.method=='POST'and Document.objects.values_list('document').filter(Email=request.POST.get('email'),password=request.POST.get('<PASSWORD>')).count()==0): msg=\"your email or password is", "0, 0), 3) id =obj.data.decode(\"utf-8\") cv2.imshow(\"QR Reader\", frame) key =", "fpdf import FPDF pdf=FPDF() pdf.add_page() pdf.image(filename1,x=50,y=None,w=60,h=60,type=\"\",link=uploaded_file_url)''' return render(request, 'simple_upload.html', {", "obj.polygon (x,y,w,h) = obj.rect pts = np.array(points, np.int32) pts =", "from django.core.files.storage import FileSystemStorage,default_storage from django.core.mail import send_mail, EmailMessage from", "'POST': form = DocumentForm(request.POST, request.FILES,request.POST) if form.is_valid(): form.save() email=form.cleaned_data['Email'] document_count=Document.objects.values_list('document').count()", "\"+request.POST.get(\"pswd\") email_from = settings.EMAIL_HOST_USER recipient_list = [request.POST.get(\"email\"), ] mail=EmailMessage( subject,", "mail=EmailMessage( subject, message, email_from, recipient_list ) mail.send() c=1 m=\"your password", "t=p[1] print(t) subject = 'QRcode scanner for license' message =", "in request.POST and request.method==\"POST\"): cap = cv2.VideoCapture(0+cv2.CAP_DSHOW) font = cv2.FONT_HERSHEY_PLAIN", "= fs.save(myfile.name, myfile) uploaded_file_url = fs.url(filename) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) media_path", "pdf.image(filename1,x=50,y=None,w=60,h=60,type=\"\",link=uploaded_file_url)''' return render(request, 'simple_upload.html', { 'uploaded_file_url': uploaded_file_url }) return render(request,", "= obj.rect pts = np.array(points, np.int32) pts = pts.reshape((-1, 1,", "num_list=['0','1','2','3','4','5','6','7','8','9'] password1=\"\" for i in range(0,8): password1=<PASSWORD>+random.<PASSWORD>(num_list) t.password=<PASSWORD> print(type(document_name)) document_name1=document_name.encode('ascii')", "cv2.destroyAllWindows() return render(request,\"user_req.html\",{\"id\":id}) if('proceed' in request.POST and request.method==\"POST\"): userdata=Document.objects.filter(file_url=request.POST.get(\"id1\")).filter(password=request.POST.get(\"password1\")) return", "font = cv2.FONT_HERSHEY_PLAIN decodedObjects=[] while decodedObjects==[]: _, frame = cap.read()", "Document.objects.all() return render(request, 'home.html', { 'documents': documents }) \"\"\"def simple_upload(request):", "document_name=document_last[0] print(email) t=Document.objects.last() num_list=['0','1','2','3','4','5','6','7','8','9'] password1=\"\" for i in range(0,8): password1=<PASSWORD>+random.<PASSWORD>(num_list)", "= DocumentForm() return render(request, 'model_form_upload.html', {'form': form,'msg':msg}) def mypass(request): m=\"\"", "email_from = settings.EMAIL_HOST_USER recipient_list = [email, ] mail=EmailMessage( subject, message,", "have change the password successfully\"}) else:\"\"\" c=0 if(user_data1): subject =", "for license' message = \"Password has succesfully changed\"+\" \"+request.POST.get(\"pswd\") email_from", "'''from fpdf import FPDF pdf=FPDF() pdf.add_page() pdf.image(filename1,x=50,y=None,w=60,h=60,type=\"\",link=uploaded_file_url)''' return render(request, 'simple_upload.html',", "messages import os import pyqrcode import png import random import", "successfully uploaded\" return redirect('model_form_upload') else: form = DocumentForm() return render(request,", "import messages import os import pyqrcode import png import random", "subject, message, email_from, recipient_list ) mail.send() c=1 m=\"your password is", "mail.send() msg=\"your qrcode is sent to your email\" elif(request.method=='POST'and Document.objects.values_list('document').filter(Email=request.POST.get('email'),password=request.POST.get('<PASSWORD>')).count()==0):", "np import pyzbar.pyzbar as pyzbar def home(request): documents= Document.objects.all() return", "= obj.polygon (x,y,w,h) = obj.rect pts = np.array(points, np.int32) pts", "succesfully and mail sent\" elif(user_data==0): msg=\"your email is incorrect or", "50), font, 2, (255, 0, 0), 3) id =obj.data.decode(\"utf-8\") cv2.imshow(\"QR", "subject = 'QRcode scanner for license' message = \"Password has", "subject, message, email_from, recipient_list ) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) mail.attach_file(os.path.join(BASE_DIR,filename2)) mail.send()", "'simple_upload.html', { 'uploaded_file_url': uploaded_file_url }) return render(request, 'simple_upload.html')\"\"\" def model_form_upload(request):", "import pyzbar.pyzbar as pyzbar def home(request): documents= Document.objects.all() return render(request,", "& 0xFF if decodedObjects!=[] : cv2.destroyAllWindows() return render(request,\"user_req.html\",{\"id\":id}) if('proceed' in", "0xFF if decodedObjects!=[] : cv2.destroyAllWindows() return render(request,\"user_req.html\",{\"id\":id}) if('proceed' in request.POST", "fs.save(myfile.name, myfile) uploaded_file_url = fs.url(filename) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) media_path =", "cv2.polylines(frame, [pts], True, (0, 255, 0), 3) cv2.putText(frame, str(obj.data), (50,", "filename = fs.save(myfile.name, myfile) uploaded_file_url = fs.url(filename) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))", "import pyqrcode import png import random import base64 import cv2", "pts = np.array(points, np.int32) pts = pts.reshape((-1, 1, 2)) cv2.polylines(frame,", "request.POST and request.method==\"POST\"): cap = cv2.VideoCapture(0+cv2.CAP_DSHOW) font = cv2.FONT_HERSHEY_PLAIN decodedObjects=[]", "= os.path.dirname(os.path.dirname(os.path.abspath(__file__))) k=os.path.join(BASE_DIR,t) print(k) mail.attach_file(k) mail.send() msg=\"your qrcode is sent", "user_data1=Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"pswd\")) \"\"\"if(len_user_data==1): userdata.password=request.POST.get(\"pswd\") return render(request,'mypass.html',{u:\"you have change the password successfully\"})", "random import base64 import cv2 import numpy as np import", "frame = cap.read() decodedObjects = pyzbar.decode(frame) for obj in decodedObjects:", "full_path=os.path.join(media_path,myfile.name) qr=pyqrcode.create(uploaded_file_url) filename_before=filename.rsplit(\".\") filename1=filename_before[0]+\".png\" s=qr.png(filename1,scale=6) '''from fpdf import FPDF pdf=FPDF()", "import render, redirect from django.conf import settings from django.core.files.storage import", "incorrect\" else: m=\"\" print(m) return render(request,'mypass.html',{\"m\":m}) def user_req(request): if(\"scanner\" in", "scanner for license' message = password1 email_from = settings.EMAIL_HOST_USER recipient_list", "mail.send() msg=\"your successfully uploaded\" return redirect('model_form_upload') else: form = DocumentForm()", "cv2 import numpy as np import pyzbar.pyzbar as pyzbar def", "email_from, recipient_list ) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) k=os.path.join(BASE_DIR,t) print(k) mail.attach_file(k) mail.send()", "else:\"\"\" c=0 if(user_data1): subject = 'QRcode scanner for license' message", "not found\" return render(request,\"forget_pass.html\",{\"msg\":msg}) def qrcode_miss(request): msg=\"\" if(request.method=='POST' and Document.objects.filter(Email=request.POST.get('email'),password=request.POST.get('password1'))):", "render(request, 'simple_upload.html')\"\"\" def model_form_upload(request): id=\"\" msg=\"\" if request.method == 'POST':", "settings.EMAIL_HOST_USER recipient_list = [request.POST.get(\"email\"), ] mail=EmailMessage( subject, message, email_from, recipient_list", "else: m=\"\" print(m) return render(request,'mypass.html',{\"m\":m}) def user_req(request): if(\"scanner\" in request.POST", "import Document from core.forms import DocumentForm from django.contrib import messages", "t.save() qr=pyqrcode.create(ax) filename=document_name.rsplit(\".\") filename1=filename[0].split(\"/\") filename2=filename1[1]+\".png\" qr.png(filename2,scale=6) \"\"\"mail=EmailMessage('QR',password1,'<EMAIL>',[email]) #mail.attach(filename2,filename2.content_type) mail.send()\"\"\" subject", "return render(request, 'simple_upload.html')\"\"\" def model_form_upload(request): id=\"\" msg=\"\" if request.method ==", "t.file_url=ax print(ax) t.save() qr=pyqrcode.create(ax) filename=document_name.rsplit(\".\") filename1=filename[0].split(\"/\") filename2=filename1[1]+\".png\" qr.png(filename2,scale=6) \"\"\"mail=EmailMessage('QR',password1,'<EMAIL>',[email]) #mail.attach(filename2,filename2.content_type)", "m=\"\" print(m) return render(request,'mypass.html',{\"m\":m}) def user_req(request): if(\"scanner\" in request.POST and", "print(t) subject = 'QRcode scanner for license' message = \"resend\"", "your email\" elif(request.method=='POST'and Document.objects.values_list('document').filter(Email=request.POST.get('email'),password=request.POST.get('<PASSWORD>')).count()==0): msg=\"your email or password is incorrect\"", "document_name1=document_name.encode('ascii') document_encode=str(base64.b64encode(document_name1)) ax=document_encode[2:-1] t.file_url=ax print(ax) t.save() qr=pyqrcode.create(ax) filename=document_name.rsplit(\".\") filename1=filename[0].split(\"/\") filename2=filename1[1]+\".png\"", "email_from = settings.EMAIL_HOST_USER recipient_list = [request.POST.get(\"email\"), ] mail=EmailMessage( subject, message,", "fs = FileSystemStorage() filename = fs.save(myfile.name, myfile) uploaded_file_url = fs.url(filename)", "scanner for license' message = \"Password has succesfully changed\"+\" \"+request.POST.get(\"pswd\")", "while decodedObjects==[]: _, frame = cap.read() decodedObjects = pyzbar.decode(frame) for", "filename_before=filename.rsplit(\".\") filename1=filename_before[0]+\".png\" s=qr.png(filename1,scale=6) '''from fpdf import FPDF pdf=FPDF() pdf.add_page() pdf.image(filename1,x=50,y=None,w=60,h=60,type=\"\",link=uploaded_file_url)'''", "password' message = \"Password has succesfully changed\"+\" \"+<PASSWORD> email_from =", "password1=<PASSWORD>(num_list) user_data=Document.objects.filter(Email=request.POST.get(\"email\")).update(password=<PASSWORD>) subject = 'QRcode scanner for license Forget password'", ": cv2.destroyAllWindows() return render(request,\"user_req.html\",{\"id\":id}) if('proceed' in request.POST and request.method==\"POST\"): userdata=Document.objects.filter(file_url=request.POST.get(\"id1\")).filter(password=request.POST.get(\"password1\"))", "message, email_from, recipient_list ) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) mail.attach_file(os.path.join(BASE_DIR,filename2)) mail.send() msg=\"your", "qr=pyqrcode.create(ax) filename=document_name.rsplit(\".\") filename1=filename[0].split(\"/\") filename2=filename1[1]+\".png\" qr.png(filename2,scale=6) \"\"\"mail=EmailMessage('QR',password1,'<EMAIL>',[email]) #mail.attach(filename2,filename2.content_type) mail.send()\"\"\" subject =", "return render(request,'mypass.html',{u:\"you have change the password successfully\"}) else:\"\"\" c=0 if(user_data1):", "settings from django.core.files.storage import FileSystemStorage,default_storage from django.core.mail import send_mail, EmailMessage", "FileSystemStorage,default_storage from django.core.mail import send_mail, EmailMessage from core.models import Document", "range(0,8): password1=<PASSWORD>+random.<PASSWORD>(num_list) t.password=<PASSWORD> print(type(document_name)) document_name1=document_name.encode('ascii') document_encode=str(base64.b64encode(document_name1)) ax=document_encode[2:-1] t.file_url=ax print(ax) t.save()", "decodedObjects: points = obj.polygon (x,y,w,h) = obj.rect pts = np.array(points,", "has succesfully changed\"+\" \"+<PASSWORD> email_from = settings.EMAIL_HOST_USER recipient_list = [request.POST.get(\"email\"),", "'documents': documents }) \"\"\"def simple_upload(request): if request.method == 'POST' and", "mail=EmailMessage( subject, message, email_from, recipient_list ) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) k=os.path.join(BASE_DIR,t)", "1, 2)) cv2.polylines(frame, [pts], True, (0, 255, 0), 3) cv2.putText(frame,", "cv2.putText(frame, str(obj.data), (50, 50), font, 2, (255, 0, 0), 3)", "user_data=Document.objects.values_list('document').filter(Email=request.POST.get('email'),password=request.POST.get('password1')) m=user_data[0][0] p=m.split('/') print(p) t=p[1] print(t) subject = 'QRcode scanner", "decodedObjects!=[] : cv2.destroyAllWindows() return render(request,\"user_req.html\",{\"id\":id}) if('proceed' in request.POST and request.method==\"POST\"):", "'simple_upload.html')\"\"\" def model_form_upload(request): id=\"\" msg=\"\" if request.method == 'POST': form", "render(request,\"user_req.html\",) def user(request): return render(request,\"user.html\",) def forget_pass(request): msg=\"\" if(request.method==\"POST\"): num_list=['0','1','2','3','4','5','6','7','8','9']", "\"+<PASSWORD> email_from = settings.EMAIL_HOST_USER recipient_list = [request.POST.get(\"email\"), ] mail=EmailMessage( subject,", "return render(request, 'simple_upload.html', { 'uploaded_file_url': uploaded_file_url }) return render(request, 'simple_upload.html')\"\"\"", "form = DocumentForm() return render(request, 'model_form_upload.html', {'form': form,'msg':msg}) def mypass(request):", "and mail sent\" elif(user_data==0): msg=\"your email is incorrect or not", "FPDF pdf=FPDF() pdf.add_page() pdf.image(filename1,x=50,y=None,w=60,h=60,type=\"\",link=uploaded_file_url)''' return render(request, 'simple_upload.html', { 'uploaded_file_url': uploaded_file_url", "elif(user_data==0): msg=\"your email is incorrect or not found\" return render(request,\"forget_pass.html\",{\"msg\":msg})", "qr.png(filename2,scale=6) \"\"\"mail=EmailMessage('QR',password1,'<EMAIL>',[email]) #mail.attach(filename2,filename2.content_type) mail.send()\"\"\" subject = 'QRcode scanner for license'", "mail=EmailMessage( subject, message, email_from, recipient_list ) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) mail.attach_file(os.path.join(BASE_DIR,filename2))", "user_req(request): if(\"scanner\" in request.POST and request.method==\"POST\"): cap = cv2.VideoCapture(0+cv2.CAP_DSHOW) font", "import settings from django.core.files.storage import FileSystemStorage,default_storage from django.core.mail import send_mail,", "= cv2.FONT_HERSHEY_PLAIN decodedObjects=[] while decodedObjects==[]: _, frame = cap.read() decodedObjects", "'uploaded_file_url': uploaded_file_url }) return render(request, 'simple_upload.html')\"\"\" def model_form_upload(request): id=\"\" msg=\"\"", "home(request): documents= Document.objects.all() return render(request, 'home.html', { 'documents': documents })", "myfile = request.FILES['myfile'] fs = FileSystemStorage() filename = fs.save(myfile.name, myfile)", "recipient_list = [request.POST.get('email'),] mail=EmailMessage( subject, message, email_from, recipient_list ) BASE_DIR", "subject = 'QRcode scanner for license Forget password' message =", "= os.path.dirname(os.path.dirname(os.path.abspath(__file__))) media_path = os.path.join(BASE_DIR,'media') full_path=os.path.join(media_path,myfile.name) qr=pyqrcode.create(uploaded_file_url) filename_before=filename.rsplit(\".\") filename1=filename_before[0]+\".png\" s=qr.png(filename1,scale=6)", "str(obj.data), (50, 50), font, 2, (255, 0, 0), 3) id", "user(request): return render(request,\"user.html\",) def forget_pass(request): msg=\"\" if(request.method==\"POST\"): num_list=['0','1','2','3','4','5','6','7','8','9'] password1=\"\" for", "png import random import base64 import cv2 import numpy as", "s=qr.png(filename1,scale=6) '''from fpdf import FPDF pdf=FPDF() pdf.add_page() pdf.image(filename1,x=50,y=None,w=60,h=60,type=\"\",link=uploaded_file_url)''' return render(request,", "message = \"resend\" email_from = settings.EMAIL_HOST_USER recipient_list = [request.POST.get('email'),] mail=EmailMessage(", "mypass(request): m=\"\" if(request.POST.get(\"pswd\")==request.POST.get(\"pswd3\")): user_data=Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"old_pswd\")).update(password=request.POST.get(\"pswd\")) user_data1=Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"pswd\")) \"\"\"if(len_user_data==1): userdata.password=request.POST.get(\"pswd\") return render(request,'mypass.html',{u:\"you have", "cv2.FONT_HERSHEY_PLAIN decodedObjects=[] while decodedObjects==[]: _, frame = cap.read() decodedObjects =", "decodedObjects=[] while decodedObjects==[]: _, frame = cap.read() decodedObjects = pyzbar.decode(frame)", "incorrect or not found\" return render(request,\"forget_pass.html\",{\"msg\":msg}) def qrcode_miss(request): msg=\"\" if(request.method=='POST'", "if request.method == 'POST': form = DocumentForm(request.POST, request.FILES,request.POST) if form.is_valid():", "pdf.add_page() pdf.image(filename1,x=50,y=None,w=60,h=60,type=\"\",link=uploaded_file_url)''' return render(request, 'simple_upload.html', { 'uploaded_file_url': uploaded_file_url }) return", "documents= Document.objects.all() return render(request, 'home.html', { 'documents': documents }) \"\"\"def", "request.method == 'POST': form = DocumentForm(request.POST, request.FILES,request.POST) if form.is_valid(): form.save()", "'QRcode scanner for license' message = \"Password has succesfully changed\"+\"", "\"\"\"if(len_user_data==1): userdata.password=request.POST.get(\"pswd\") return render(request,'mypass.html',{u:\"you have change the password successfully\"}) else:\"\"\"", "if(user_data>0): msg=\"your password is changed succesfully and mail sent\" elif(user_data==0):", "{ 'uploaded_file_url': uploaded_file_url }) return render(request, 'simple_upload.html')\"\"\" def model_form_upload(request): id=\"\"", "license' message = password1 email_from = settings.EMAIL_HOST_USER recipient_list = [email,", "if(request.POST.get(\"pswd\")==request.POST.get(\"pswd3\")): user_data=Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"old_pswd\")).update(password=request.POST.get(\"pswd\")) user_data1=Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"pswd\")) \"\"\"if(len_user_data==1): userdata.password=request.POST.get(\"pswd\") return render(request,'mypass.html',{u:\"you have change the", "sent\" elif(user_data==0): msg=\"your email is incorrect or not found\" return", "password successfully\"}) else:\"\"\" c=0 if(user_data1): subject = 'QRcode scanner for", "and request.method==\"POST\"): userdata=Document.objects.filter(file_url=request.POST.get(\"id1\")).filter(password=request.POST.get(\"password1\")) return render(request,\"user_req.html\",{\"userdata\":userdata}) return render(request,\"user_req.html\",) def user(request): return", "mail.send() if(user_data>0): msg=\"your password is changed succesfully and mail sent\"", "print(type(document_name)) document_name1=document_name.encode('ascii') document_encode=str(base64.b64encode(document_name1)) ax=document_encode[2:-1] t.file_url=ax print(ax) t.save() qr=pyqrcode.create(ax) filename=document_name.rsplit(\".\") filename1=filename[0].split(\"/\")", ") mail.send() if(user_data>0): msg=\"your password is changed succesfully and mail", "= 'QRcode scanner for license' message = password1 email_from =", "= np.array(points, np.int32) pts = pts.reshape((-1, 1, 2)) cv2.polylines(frame, [pts],", "def forget_pass(request): msg=\"\" if(request.method==\"POST\"): num_list=['0','1','2','3','4','5','6','7','8','9'] password1=\"\" for i in range(0,8):", "c=0 if(user_data1): subject = 'QRcode scanner for license' message =", "#mail.attach(filename2,filename2.content_type) mail.send()\"\"\" subject = 'QRcode scanner for license' message =", "msg=\"your email is incorrect or not found\" return render(request,\"forget_pass.html\",{\"msg\":msg}) def", "subject = 'QRcode scanner for license' message = \"resend\" email_from", "\"Password has succesfully changed\"+\" \"+<PASSWORD> email_from = settings.EMAIL_HOST_USER recipient_list =", "if form.is_valid(): form.save() email=form.cleaned_data['Email'] document_count=Document.objects.values_list('document').count() document_last=Document.objects.values_list('document')[document_count-1] document_name=document_last[0] print(email) t=Document.objects.last() num_list=['0','1','2','3','4','5','6','7','8','9']", "import cv2 import numpy as np import pyzbar.pyzbar as pyzbar", "= os.path.dirname(os.path.dirname(os.path.abspath(__file__))) mail.attach_file(os.path.join(BASE_DIR,filename2)) mail.send() msg=\"your successfully uploaded\" return redirect('model_form_upload') else:", "message = \"Password has succesfully changed\"+\" \"+request.POST.get(\"pswd\") email_from = settings.EMAIL_HOST_USER", "cv2.imshow(\"QR Reader\", frame) key = cv2.waitKey(10) & 0xFF if decodedObjects!=[]", "mail sent\" elif(user_data==0): msg=\"your email is incorrect or not found\"", "recipient_list = [email, ] mail=EmailMessage( subject, message, email_from, recipient_list )", "FileSystemStorage() filename = fs.save(myfile.name, myfile) uploaded_file_url = fs.url(filename) BASE_DIR =", "import DocumentForm from django.contrib import messages import os import pyqrcode", "recipient_list ) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) mail.attach_file(os.path.join(BASE_DIR,filename2)) mail.send() msg=\"your successfully uploaded\"", "recipient_list = [request.POST.get(\"email\"), ] mail=EmailMessage( subject, message, email_from, recipient_list )", "pyzbar.pyzbar as pyzbar def home(request): documents= Document.objects.all() return render(request, 'home.html',", "] mail=EmailMessage( subject, message, email_from, recipient_list ) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))", "qr=pyqrcode.create(uploaded_file_url) filename_before=filename.rsplit(\".\") filename1=filename_before[0]+\".png\" s=qr.png(filename1,scale=6) '''from fpdf import FPDF pdf=FPDF() pdf.add_page()", "= \"Password has succesfully changed\"+\" \"+<PASSWORD> email_from = settings.EMAIL_HOST_USER recipient_list", "np.int32) pts = pts.reshape((-1, 1, 2)) cv2.polylines(frame, [pts], True, (0,", "[request.POST.get('email'),] mail=EmailMessage( subject, message, email_from, recipient_list ) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))", "<filename>core/views.py from django.shortcuts import render, redirect from django.conf import settings", "request.FILES['myfile'] fs = FileSystemStorage() filename = fs.save(myfile.name, myfile) uploaded_file_url =", "request.method==\"POST\"): cap = cv2.VideoCapture(0+cv2.CAP_DSHOW) font = cv2.FONT_HERSHEY_PLAIN decodedObjects=[] while decodedObjects==[]:", "user_data=Document.objects.filter(Email=request.POST.get(\"email\")).update(password=<PASSWORD>) subject = 'QRcode scanner for license Forget password' message", "mail.send()\"\"\" subject = 'QRcode scanner for license' message = password1", "settings.EMAIL_HOST_USER recipient_list = [request.POST.get('email'),] mail=EmailMessage( subject, message, email_from, recipient_list )", "filename1=filename[0].split(\"/\") filename2=filename1[1]+\".png\" qr.png(filename2,scale=6) \"\"\"mail=EmailMessage('QR',password1,'<EMAIL>',[email]) #mail.attach(filename2,filename2.content_type) mail.send()\"\"\" subject = 'QRcode scanner", "in decodedObjects: points = obj.polygon (x,y,w,h) = obj.rect pts =", "def model_form_upload(request): id=\"\" msg=\"\" if request.method == 'POST': form =", "font, 2, (255, 0, 0), 3) id =obj.data.decode(\"utf-8\") cv2.imshow(\"QR Reader\",", "= request.FILES['myfile'] fs = FileSystemStorage() filename = fs.save(myfile.name, myfile) uploaded_file_url", "\"Password has succesfully changed\"+\" \"+request.POST.get(\"pswd\") email_from = settings.EMAIL_HOST_USER recipient_list =", "key = cv2.waitKey(10) & 0xFF if decodedObjects!=[] : cv2.destroyAllWindows() return", "True, (0, 255, 0), 3) cv2.putText(frame, str(obj.data), (50, 50), font,", "def mypass(request): m=\"\" if(request.POST.get(\"pswd\")==request.POST.get(\"pswd3\")): user_data=Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"old_pswd\")).update(password=request.POST.get(\"pswd\")) user_data1=Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"pswd\")) \"\"\"if(len_user_data==1): userdata.password=request.POST.get(\"pswd\") return render(request,'mypass.html',{u:\"you", "DocumentForm from django.contrib import messages import os import pyqrcode import", "import numpy as np import pyzbar.pyzbar as pyzbar def home(request):", "uploaded_file_url }) return render(request, 'simple_upload.html')\"\"\" def model_form_upload(request): id=\"\" msg=\"\" if", "render(request,\"user_req.html\",{\"userdata\":userdata}) return render(request,\"user_req.html\",) def user(request): return render(request,\"user.html\",) def forget_pass(request): msg=\"\"", "changed\"+\" \"+request.POST.get(\"pswd\") email_from = settings.EMAIL_HOST_USER recipient_list = [request.POST.get(\"email\"), ] mail=EmailMessage(", "the password successfully\"}) else:\"\"\" c=0 if(user_data1): subject = 'QRcode scanner", "and request.FILES['myfile']: myfile = request.FILES['myfile'] fs = FileSystemStorage() filename =", "return render(request, 'model_form_upload.html', {'form': form,'msg':msg}) def mypass(request): m=\"\" if(request.POST.get(\"pswd\")==request.POST.get(\"pswd3\")): user_data=Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"old_pswd\")).update(password=request.POST.get(\"pswd\"))", "os.path.dirname(os.path.dirname(os.path.abspath(__file__))) mail.attach_file(os.path.join(BASE_DIR,filename2)) mail.send() msg=\"your successfully uploaded\" return redirect('model_form_upload') else: form", "has succesfully changed\"+\" \"+request.POST.get(\"pswd\") email_from = settings.EMAIL_HOST_USER recipient_list = [request.POST.get(\"email\"),", "2)) cv2.polylines(frame, [pts], True, (0, 255, 0), 3) cv2.putText(frame, str(obj.data),", "from django.contrib import messages import os import pyqrcode import png", "changed\"+\" \"+<PASSWORD> email_from = settings.EMAIL_HOST_USER recipient_list = [request.POST.get(\"email\"), ] mail=EmailMessage(", "np.array(points, np.int32) pts = pts.reshape((-1, 1, 2)) cv2.polylines(frame, [pts], True,", "os import pyqrcode import png import random import base64 import", "render, redirect from django.conf import settings from django.core.files.storage import FileSystemStorage,default_storage", "core.forms import DocumentForm from django.contrib import messages import os import", "obj.rect pts = np.array(points, np.int32) pts = pts.reshape((-1, 1, 2))", "cap.read() decodedObjects = pyzbar.decode(frame) for obj in decodedObjects: points =", "c=1 m=\"your password is changed succesfully\" elif(len(Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"old_pswd\")))==0 and request.method==\"POST\"): m=\"your", "render(request,\"user.html\",) def forget_pass(request): msg=\"\" if(request.method==\"POST\"): num_list=['0','1','2','3','4','5','6','7','8','9'] password1=\"\" for i in", "if(\"scanner\" in request.POST and request.method==\"POST\"): cap = cv2.VideoCapture(0+cv2.CAP_DSHOW) font =", "print(ax) t.save() qr=pyqrcode.create(ax) filename=document_name.rsplit(\".\") filename1=filename[0].split(\"/\") filename2=filename1[1]+\".png\" qr.png(filename2,scale=6) \"\"\"mail=EmailMessage('QR',password1,'<EMAIL>',[email]) #mail.attach(filename2,filename2.content_type) mail.send()\"\"\"", "recipient_list ) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) k=os.path.join(BASE_DIR,t) print(k) mail.attach_file(k) mail.send() msg=\"your", "os.path.dirname(os.path.dirname(os.path.abspath(__file__))) media_path = os.path.join(BASE_DIR,'media') full_path=os.path.join(media_path,myfile.name) qr=pyqrcode.create(uploaded_file_url) filename_before=filename.rsplit(\".\") filename1=filename_before[0]+\".png\" s=qr.png(filename1,scale=6) '''from", "in range(0,8): password1=<PASSWORD>(num_list) user_data=Document.objects.filter(Email=request.POST.get(\"email\")).update(password=<PASSWORD>) subject = 'QRcode scanner for license", "pyzbar def home(request): documents= Document.objects.all() return render(request, 'home.html', { 'documents':", "send_mail, EmailMessage from core.models import Document from core.forms import DocumentForm", "email or password is incorrect\" else: m=\"\" print(m) return render(request,'mypass.html',{\"m\":m})", "_, frame = cap.read() decodedObjects = pyzbar.decode(frame) for obj in", "request.FILES,request.POST) if form.is_valid(): form.save() email=form.cleaned_data['Email'] document_count=Document.objects.values_list('document').count() document_last=Document.objects.values_list('document')[document_count-1] document_name=document_last[0] print(email) t=Document.objects.last()", "for obj in decodedObjects: points = obj.polygon (x,y,w,h) = obj.rect", "scanner for license Forget password' message = \"Password has succesfully", "cap = cv2.VideoCapture(0+cv2.CAP_DSHOW) font = cv2.FONT_HERSHEY_PLAIN decodedObjects=[] while decodedObjects==[]: _,", "0), 3) id =obj.data.decode(\"utf-8\") cv2.imshow(\"QR Reader\", frame) key = cv2.waitKey(10)", "== 'POST' and request.FILES['myfile']: myfile = request.FILES['myfile'] fs = FileSystemStorage()", "DocumentForm(request.POST, request.FILES,request.POST) if form.is_valid(): form.save() email=form.cleaned_data['Email'] document_count=Document.objects.values_list('document').count() document_last=Document.objects.values_list('document')[document_count-1] document_name=document_last[0] print(email)", "redirect from django.conf import settings from django.core.files.storage import FileSystemStorage,default_storage from", "and request.method==\"POST\"): cap = cv2.VideoCapture(0+cv2.CAP_DSHOW) font = cv2.FONT_HERSHEY_PLAIN decodedObjects=[] while", "request.method==\"POST\"): m=\"your email or password is incorrect\" else: m=\"\" print(m)", "mail.attach_file(k) mail.send() msg=\"your qrcode is sent to your email\" elif(request.method=='POST'and", "as pyzbar def home(request): documents= Document.objects.all() return render(request, 'home.html', {", "DocumentForm() return render(request, 'model_form_upload.html', {'form': form,'msg':msg}) def mypass(request): m=\"\" if(request.POST.get(\"pswd\")==request.POST.get(\"pswd3\")):", "return render(request,\"forget_pass.html\",{\"msg\":msg}) def qrcode_miss(request): msg=\"\" if(request.method=='POST' and Document.objects.filter(Email=request.POST.get('email'),password=request.POST.get('password1'))): user_data=Document.objects.values_list('document').filter(Email=request.POST.get('email'),password=request.POST.get('password1')) m=user_data[0][0]", "media_path = os.path.join(BASE_DIR,'media') full_path=os.path.join(media_path,myfile.name) qr=pyqrcode.create(uploaded_file_url) filename_before=filename.rsplit(\".\") filename1=filename_before[0]+\".png\" s=qr.png(filename1,scale=6) '''from fpdf", "m=\"your password is changed succesfully\" elif(len(Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"old_pswd\")))==0 and request.method==\"POST\"): m=\"your email", "django.core.mail import send_mail, EmailMessage from core.models import Document from core.forms", "form.is_valid(): form.save() email=form.cleaned_data['Email'] document_count=Document.objects.values_list('document').count() document_last=Document.objects.values_list('document')[document_count-1] document_name=document_last[0] print(email) t=Document.objects.last() num_list=['0','1','2','3','4','5','6','7','8','9'] password1=\"\"", "=obj.data.decode(\"utf-8\") cv2.imshow(\"QR Reader\", frame) key = cv2.waitKey(10) & 0xFF if", "core.models import Document from core.forms import DocumentForm from django.contrib import", "'QRcode scanner for license' message = password1 email_from = settings.EMAIL_HOST_USER", "license Forget password' message = \"Password has succesfully changed\"+\" \"+<PASSWORD>", "msg=\"\" if request.method == 'POST': form = DocumentForm(request.POST, request.FILES,request.POST) if", "= DocumentForm(request.POST, request.FILES,request.POST) if form.is_valid(): form.save() email=form.cleaned_data['Email'] document_count=Document.objects.values_list('document').count() document_last=Document.objects.values_list('document')[document_count-1] document_name=document_last[0]", "and request.method==\"POST\"): m=\"your email or password is incorrect\" else: m=\"\"", "successfully\"}) else:\"\"\" c=0 if(user_data1): subject = 'QRcode scanner for license'", "print(p) t=p[1] print(t) subject = 'QRcode scanner for license' message", "== 'POST': form = DocumentForm(request.POST, request.FILES,request.POST) if form.is_valid(): form.save() email=form.cleaned_data['Email']", "elif(request.method=='POST'and Document.objects.values_list('document').filter(Email=request.POST.get('email'),password=request.POST.get('<PASSWORD>')).count()==0): msg=\"your email or password is incorrect\" return render(request,'qrcode_miss.html',{\"msg\":msg})", "(255, 0, 0), 3) id =obj.data.decode(\"utf-8\") cv2.imshow(\"QR Reader\", frame) key", "password1=<PASSWORD>+random.<PASSWORD>(num_list) t.password=<PASSWORD> print(type(document_name)) document_name1=document_name.encode('ascii') document_encode=str(base64.b64encode(document_name1)) ax=document_encode[2:-1] t.file_url=ax print(ax) t.save() qr=pyqrcode.create(ax)", "email is incorrect or not found\" return render(request,\"forget_pass.html\",{\"msg\":msg}) def qrcode_miss(request):", "is changed succesfully and mail sent\" elif(user_data==0): msg=\"your email is", "msg=\"your qrcode is sent to your email\" elif(request.method=='POST'and Document.objects.values_list('document').filter(Email=request.POST.get('email'),password=request.POST.get('<PASSWORD>')).count()==0): msg=\"your", "qrcode_miss(request): msg=\"\" if(request.method=='POST' and Document.objects.filter(Email=request.POST.get('email'),password=request.POST.get('password1'))): user_data=Document.objects.values_list('document').filter(Email=request.POST.get('email'),password=request.POST.get('password1')) m=user_data[0][0] p=m.split('/') print(p) t=p[1]", "id =obj.data.decode(\"utf-8\") cv2.imshow(\"QR Reader\", frame) key = cv2.waitKey(10) & 0xFF", "'QRcode scanner for license' message = \"resend\" email_from = settings.EMAIL_HOST_USER", "is sent to your email\" elif(request.method=='POST'and Document.objects.values_list('document').filter(Email=request.POST.get('email'),password=request.POST.get('<PASSWORD>')).count()==0): msg=\"your email or", "email\" elif(request.method=='POST'and Document.objects.values_list('document').filter(Email=request.POST.get('email'),password=request.POST.get('<PASSWORD>')).count()==0): msg=\"your email or password is incorrect\" return", "'QRcode scanner for license Forget password' message = \"Password has", "id=\"\" msg=\"\" if request.method == 'POST': form = DocumentForm(request.POST, request.FILES,request.POST)", "or not found\" return render(request,\"forget_pass.html\",{\"msg\":msg}) def qrcode_miss(request): msg=\"\" if(request.method=='POST' and", "password is changed succesfully and mail sent\" elif(user_data==0): msg=\"your email", "from django.core.mail import send_mail, EmailMessage from core.models import Document from", "Document from core.forms import DocumentForm from django.contrib import messages import", "uploaded\" return redirect('model_form_upload') else: form = DocumentForm() return render(request, 'model_form_upload.html',", "message = \"Password has succesfully changed\"+\" \"+<PASSWORD> email_from = settings.EMAIL_HOST_USER", "if('proceed' in request.POST and request.method==\"POST\"): userdata=Document.objects.filter(file_url=request.POST.get(\"id1\")).filter(password=request.POST.get(\"password1\")) return render(request,\"user_req.html\",{\"userdata\":userdata}) return render(request,\"user_req.html\",)", "range(0,8): password1=<PASSWORD>(num_list) user_data=Document.objects.filter(Email=request.POST.get(\"email\")).update(password=<PASSWORD>) subject = 'QRcode scanner for license Forget", "fs.url(filename) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) media_path = os.path.join(BASE_DIR,'media') full_path=os.path.join(media_path,myfile.name) qr=pyqrcode.create(uploaded_file_url) filename_before=filename.rsplit(\".\")", "in request.POST and request.method==\"POST\"): userdata=Document.objects.filter(file_url=request.POST.get(\"id1\")).filter(password=request.POST.get(\"password1\")) return render(request,\"user_req.html\",{\"userdata\":userdata}) return render(request,\"user_req.html\",) def", "is incorrect or not found\" return render(request,\"forget_pass.html\",{\"msg\":msg}) def qrcode_miss(request): msg=\"\"", "django.conf import settings from django.core.files.storage import FileSystemStorage,default_storage from django.core.mail import", "return render(request,\"user_req.html\",{\"id\":id}) if('proceed' in request.POST and request.method==\"POST\"): userdata=Document.objects.filter(file_url=request.POST.get(\"id1\")).filter(password=request.POST.get(\"password1\")) return render(request,\"user_req.html\",{\"userdata\":userdata})", "import base64 import cv2 import numpy as np import pyzbar.pyzbar", "settings.EMAIL_HOST_USER recipient_list = [email, ] mail=EmailMessage( subject, message, email_from, recipient_list", "] mail=EmailMessage( subject, message, email_from, recipient_list ) mail.send() if(user_data>0): msg=\"your", "frame) key = cv2.waitKey(10) & 0xFF if decodedObjects!=[] : cv2.destroyAllWindows()", "scanner for license' message = \"resend\" email_from = settings.EMAIL_HOST_USER recipient_list", "email_from, recipient_list ) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) mail.attach_file(os.path.join(BASE_DIR,filename2)) mail.send() msg=\"your successfully", "from core.models import Document from core.forms import DocumentForm from django.contrib", "EmailMessage from core.models import Document from core.forms import DocumentForm from", "mail.send() c=1 m=\"your password is changed succesfully\" elif(len(Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"old_pswd\")))==0 and request.method==\"POST\"):", "import random import base64 import cv2 import numpy as np", "pyzbar.decode(frame) for obj in decodedObjects: points = obj.polygon (x,y,w,h) =", "password1=\"\" for i in range(0,8): password1=<PASSWORD>(num_list) user_data=Document.objects.filter(Email=request.POST.get(\"email\")).update(password=<PASSWORD>) subject = 'QRcode", "= \"Password has succesfully changed\"+\" \"+request.POST.get(\"pswd\") email_from = settings.EMAIL_HOST_USER recipient_list", ") BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) k=os.path.join(BASE_DIR,t) print(k) mail.attach_file(k) mail.send() msg=\"your qrcode", "render(request,'mypass.html',{u:\"you have change the password successfully\"}) else:\"\"\" c=0 if(user_data1): subject", "pts = pts.reshape((-1, 1, 2)) cv2.polylines(frame, [pts], True, (0, 255,", "return render(request, 'home.html', { 'documents': documents }) \"\"\"def simple_upload(request): if", "return render(request,\"user.html\",) def forget_pass(request): msg=\"\" if(request.method==\"POST\"): num_list=['0','1','2','3','4','5','6','7','8','9'] password1=\"\" for i", "qrcode is sent to your email\" elif(request.method=='POST'and Document.objects.values_list('document').filter(Email=request.POST.get('email'),password=request.POST.get('<PASSWORD>')).count()==0): msg=\"your email", "changed succesfully and mail sent\" elif(user_data==0): msg=\"your email is incorrect", "form.save() email=form.cleaned_data['Email'] document_count=Document.objects.values_list('document').count() document_last=Document.objects.values_list('document')[document_count-1] document_name=document_last[0] print(email) t=Document.objects.last() num_list=['0','1','2','3','4','5','6','7','8','9'] password1=\"\" for", "= os.path.join(BASE_DIR,'media') full_path=os.path.join(media_path,myfile.name) qr=pyqrcode.create(uploaded_file_url) filename_before=filename.rsplit(\".\") filename1=filename_before[0]+\".png\" s=qr.png(filename1,scale=6) '''from fpdf import", "license' message = \"resend\" email_from = settings.EMAIL_HOST_USER recipient_list = [request.POST.get('email'),]", "for i in range(0,8): password1=<PASSWORD>+random.<PASSWORD>(num_list) t.password=<PASSWORD> print(type(document_name)) document_name1=document_name.encode('ascii') document_encode=str(base64.b64encode(document_name1)) ax=document_encode[2:-1]", "print(m) return render(request,'mypass.html',{\"m\":m}) def user_req(request): if(\"scanner\" in request.POST and request.method==\"POST\"):", "numpy as np import pyzbar.pyzbar as pyzbar def home(request): documents=", "print(k) mail.attach_file(k) mail.send() msg=\"your qrcode is sent to your email\"", "= [email, ] mail=EmailMessage( subject, message, email_from, recipient_list ) BASE_DIR", "num_list=['0','1','2','3','4','5','6','7','8','9'] password1=\"\" for i in range(0,8): password1=<PASSWORD>(num_list) user_data=Document.objects.filter(Email=request.POST.get(\"email\")).update(password=<PASSWORD>) subject =", "if(user_data1): subject = 'QRcode scanner for license' message = \"Password", "for i in range(0,8): password1=<PASSWORD>(num_list) user_data=Document.objects.filter(Email=request.POST.get(\"email\")).update(password=<PASSWORD>) subject = 'QRcode scanner", "django.core.files.storage import FileSystemStorage,default_storage from django.core.mail import send_mail, EmailMessage from core.models", "succesfully\" elif(len(Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"old_pswd\")))==0 and request.method==\"POST\"): m=\"your email or password is incorrect\"", "= settings.EMAIL_HOST_USER recipient_list = [request.POST.get(\"email\"), ] mail=EmailMessage( subject, message, email_from,", "'model_form_upload.html', {'form': form,'msg':msg}) def mypass(request): m=\"\" if(request.POST.get(\"pswd\")==request.POST.get(\"pswd3\")): user_data=Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"old_pswd\")).update(password=request.POST.get(\"pswd\")) user_data1=Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"pswd\")) \"\"\"if(len_user_data==1):", "for license' message = password1 email_from = settings.EMAIL_HOST_USER recipient_list =", "myfile) uploaded_file_url = fs.url(filename) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) media_path = os.path.join(BASE_DIR,'media')", "mail=EmailMessage( subject, message, email_from, recipient_list ) mail.send() if(user_data>0): msg=\"your password", "i in range(0,8): password1=<PASSWORD>(num_list) user_data=Document.objects.filter(Email=request.POST.get(\"email\")).update(password=<PASSWORD>) subject = 'QRcode scanner for", "os.path.dirname(os.path.dirname(os.path.abspath(__file__))) k=os.path.join(BASE_DIR,t) print(k) mail.attach_file(k) mail.send() msg=\"your qrcode is sent to", "= settings.EMAIL_HOST_USER recipient_list = [email, ] mail=EmailMessage( subject, message, email_from,", "= 'QRcode scanner for license' message = \"resend\" email_from =", "= cv2.VideoCapture(0+cv2.CAP_DSHOW) font = cv2.FONT_HERSHEY_PLAIN decodedObjects=[] while decodedObjects==[]: _, frame", "255, 0), 3) cv2.putText(frame, str(obj.data), (50, 50), font, 2, (255,", "3) cv2.putText(frame, str(obj.data), (50, 50), font, 2, (255, 0, 0),", "password is incorrect\" else: m=\"\" print(m) return render(request,'mypass.html',{\"m\":m}) def user_req(request):", "render(request, 'model_form_upload.html', {'form': form,'msg':msg}) def mypass(request): m=\"\" if(request.POST.get(\"pswd\")==request.POST.get(\"pswd3\")): user_data=Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"old_pswd\")).update(password=request.POST.get(\"pswd\")) user_data1=Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"pswd\"))", "license' message = \"Password has succesfully changed\"+\" \"+request.POST.get(\"pswd\") email_from =", "and Document.objects.filter(Email=request.POST.get('email'),password=request.POST.get('password1'))): user_data=Document.objects.values_list('document').filter(Email=request.POST.get('email'),password=request.POST.get('password1')) m=user_data[0][0] p=m.split('/') print(p) t=p[1] print(t) subject =", "redirect('model_form_upload') else: form = DocumentForm() return render(request, 'model_form_upload.html', {'form': form,'msg':msg})", "[request.POST.get(\"email\"), ] mail=EmailMessage( subject, message, email_from, recipient_list ) mail.send() c=1", "if decodedObjects!=[] : cv2.destroyAllWindows() return render(request,\"user_req.html\",{\"id\":id}) if('proceed' in request.POST and", "{ 'documents': documents }) \"\"\"def simple_upload(request): if request.method == 'POST'", "document_count=Document.objects.values_list('document').count() document_last=Document.objects.values_list('document')[document_count-1] document_name=document_last[0] print(email) t=Document.objects.last() num_list=['0','1','2','3','4','5','6','7','8','9'] password1=\"\" for i in", "BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) media_path = os.path.join(BASE_DIR,'media') full_path=os.path.join(media_path,myfile.name) qr=pyqrcode.create(uploaded_file_url) filename_before=filename.rsplit(\".\") filename1=filename_before[0]+\".png\"", "forget_pass(request): msg=\"\" if(request.method==\"POST\"): num_list=['0','1','2','3','4','5','6','7','8','9'] password1=\"\" for i in range(0,8): password1=<PASSWORD>(num_list)", "change the password successfully\"}) else:\"\"\" c=0 if(user_data1): subject = 'QRcode", "BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) k=os.path.join(BASE_DIR,t) print(k) mail.attach_file(k) mail.send() msg=\"your qrcode is", "i in range(0,8): password1=<PASSWORD>+random.<PASSWORD>(num_list) t.password=<PASSWORD> print(type(document_name)) document_name1=document_name.encode('ascii') document_encode=str(base64.b64encode(document_name1)) ax=document_encode[2:-1] t.file_url=ax", "document_encode=str(base64.b64encode(document_name1)) ax=document_encode[2:-1] t.file_url=ax print(ax) t.save() qr=pyqrcode.create(ax) filename=document_name.rsplit(\".\") filename1=filename[0].split(\"/\") filename2=filename1[1]+\".png\" qr.png(filename2,scale=6)", "'POST' and request.FILES['myfile']: myfile = request.FILES['myfile'] fs = FileSystemStorage() filename", "sent to your email\" elif(request.method=='POST'and Document.objects.values_list('document').filter(Email=request.POST.get('email'),password=request.POST.get('<PASSWORD>')).count()==0): msg=\"your email or password", "0), 3) cv2.putText(frame, str(obj.data), (50, 50), font, 2, (255, 0,", "password is changed succesfully\" elif(len(Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"old_pswd\")))==0 and request.method==\"POST\"): m=\"your email or", "import FPDF pdf=FPDF() pdf.add_page() pdf.image(filename1,x=50,y=None,w=60,h=60,type=\"\",link=uploaded_file_url)''' return render(request, 'simple_upload.html', { 'uploaded_file_url':", "recipient_list ) mail.send() if(user_data>0): msg=\"your password is changed succesfully and", "return render(request,\"user_req.html\",{\"userdata\":userdata}) return render(request,\"user_req.html\",) def user(request): return render(request,\"user.html\",) def forget_pass(request):", "message = password1 email_from = settings.EMAIL_HOST_USER recipient_list = [email, ]", "= [request.POST.get(\"email\"), ] mail=EmailMessage( subject, message, email_from, recipient_list ) mail.send()", "msg=\"your password is changed succesfully and mail sent\" elif(user_data==0): msg=\"your", "[email, ] mail=EmailMessage( subject, message, email_from, recipient_list ) BASE_DIR =", "(50, 50), font, 2, (255, 0, 0), 3) id =obj.data.decode(\"utf-8\")", "found\" return render(request,\"forget_pass.html\",{\"msg\":msg}) def qrcode_miss(request): msg=\"\" if(request.method=='POST' and Document.objects.filter(Email=request.POST.get('email'),password=request.POST.get('password1'))): user_data=Document.objects.values_list('document').filter(Email=request.POST.get('email'),password=request.POST.get('password1'))", "for license' message = \"resend\" email_from = settings.EMAIL_HOST_USER recipient_list =", "base64 import cv2 import numpy as np import pyzbar.pyzbar as", "= pyzbar.decode(frame) for obj in decodedObjects: points = obj.polygon (x,y,w,h)", "2, (255, 0, 0), 3) id =obj.data.decode(\"utf-8\") cv2.imshow(\"QR Reader\", frame)", "message, email_from, recipient_list ) mail.send() if(user_data>0): msg=\"your password is changed", "] mail=EmailMessage( subject, message, email_from, recipient_list ) mail.send() c=1 m=\"your", "filename=document_name.rsplit(\".\") filename1=filename[0].split(\"/\") filename2=filename1[1]+\".png\" qr.png(filename2,scale=6) \"\"\"mail=EmailMessage('QR',password1,'<EMAIL>',[email]) #mail.attach(filename2,filename2.content_type) mail.send()\"\"\" subject = 'QRcode", "subject = 'QRcode scanner for license' message = password1 email_from", "email=form.cleaned_data['Email'] document_count=Document.objects.values_list('document').count() document_last=Document.objects.values_list('document')[document_count-1] document_name=document_last[0] print(email) t=Document.objects.last() num_list=['0','1','2','3','4','5','6','7','8','9'] password1=\"\" for i", "request.method == 'POST' and request.FILES['myfile']: myfile = request.FILES['myfile'] fs =", "else: form = DocumentForm() return render(request, 'model_form_upload.html', {'form': form,'msg':msg}) def", "from django.conf import settings from django.core.files.storage import FileSystemStorage,default_storage from django.core.mail", "documents }) \"\"\"def simple_upload(request): if request.method == 'POST' and request.FILES['myfile']:", "= cv2.waitKey(10) & 0xFF if decodedObjects!=[] : cv2.destroyAllWindows() return render(request,\"user_req.html\",{\"id\":id})", "return redirect('model_form_upload') else: form = DocumentForm() return render(request, 'model_form_upload.html', {'form':", "subject, message, email_from, recipient_list ) mail.send() if(user_data>0): msg=\"your password is", ") BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) mail.attach_file(os.path.join(BASE_DIR,filename2)) mail.send() msg=\"your successfully uploaded\" return", "Document.objects.filter(Email=request.POST.get('email'),password=request.POST.get('password1'))): user_data=Document.objects.values_list('document').filter(Email=request.POST.get('email'),password=request.POST.get('password1')) m=user_data[0][0] p=m.split('/') print(p) t=p[1] print(t) subject = 'QRcode", "}) \"\"\"def simple_upload(request): if request.method == 'POST' and request.FILES['myfile']: myfile", "= FileSystemStorage() filename = fs.save(myfile.name, myfile) uploaded_file_url = fs.url(filename) BASE_DIR", "(0, 255, 0), 3) cv2.putText(frame, str(obj.data), (50, 50), font, 2,", "as np import pyzbar.pyzbar as pyzbar def home(request): documents= Document.objects.all()", "obj in decodedObjects: points = obj.polygon (x,y,w,h) = obj.rect pts", "password1 email_from = settings.EMAIL_HOST_USER recipient_list = [email, ] mail=EmailMessage( subject,", "simple_upload(request): if request.method == 'POST' and request.FILES['myfile']: myfile = request.FILES['myfile']", "\"\"\"def simple_upload(request): if request.method == 'POST' and request.FILES['myfile']: myfile =", "message, email_from, recipient_list ) mail.send() c=1 m=\"your password is changed", "succesfully changed\"+\" \"+<PASSWORD> email_from = settings.EMAIL_HOST_USER recipient_list = [request.POST.get(\"email\"), ]", "request.POST and request.method==\"POST\"): userdata=Document.objects.filter(file_url=request.POST.get(\"id1\")).filter(password=request.POST.get(\"password1\")) return render(request,\"user_req.html\",{\"userdata\":userdata}) return render(request,\"user_req.html\",) def user(request):", "def user(request): return render(request,\"user.html\",) def forget_pass(request): msg=\"\" if(request.method==\"POST\"): num_list=['0','1','2','3','4','5','6','7','8','9'] password1=\"\"", "password1=\"\" for i in range(0,8): password1=<PASSWORD>+random.<PASSWORD>(num_list) t.password=<PASSWORD> print(type(document_name)) document_name1=document_name.encode('ascii') document_encode=str(base64.b64encode(document_name1))", "email_from, recipient_list ) mail.send() if(user_data>0): msg=\"your password is changed succesfully", "pdf=FPDF() pdf.add_page() pdf.image(filename1,x=50,y=None,w=60,h=60,type=\"\",link=uploaded_file_url)''' return render(request, 'simple_upload.html', { 'uploaded_file_url': uploaded_file_url })", "def user_req(request): if(\"scanner\" in request.POST and request.method==\"POST\"): cap = cv2.VideoCapture(0+cv2.CAP_DSHOW)", "subject, message, email_from, recipient_list ) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) k=os.path.join(BASE_DIR,t) print(k)", "user_data=Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"old_pswd\")).update(password=request.POST.get(\"pswd\")) user_data1=Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"pswd\")) \"\"\"if(len_user_data==1): userdata.password=request.POST.get(\"pswd\") return render(request,'mypass.html',{u:\"you have change the password", "userdata.password=request.POST.get(\"pswd\") return render(request,'mypass.html',{u:\"you have change the password successfully\"}) else:\"\"\" c=0", "def home(request): documents= Document.objects.all() return render(request, 'home.html', { 'documents': documents", "= 'QRcode scanner for license' message = \"Password has succesfully", "ax=document_encode[2:-1] t.file_url=ax print(ax) t.save() qr=pyqrcode.create(ax) filename=document_name.rsplit(\".\") filename1=filename[0].split(\"/\") filename2=filename1[1]+\".png\" qr.png(filename2,scale=6) \"\"\"mail=EmailMessage('QR',password1,'<EMAIL>',[email])", "django.contrib import messages import os import pyqrcode import png import", "django.shortcuts import render, redirect from django.conf import settings from django.core.files.storage", "BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) mail.attach_file(os.path.join(BASE_DIR,filename2)) mail.send() msg=\"your successfully uploaded\" return redirect('model_form_upload')", "decodedObjects==[]: _, frame = cap.read() decodedObjects = pyzbar.decode(frame) for obj", "= [request.POST.get('email'),] mail=EmailMessage( subject, message, email_from, recipient_list ) BASE_DIR =", "m=\"your email or password is incorrect\" else: m=\"\" print(m) return", "succesfully changed\"+\" \"+request.POST.get(\"pswd\") email_from = settings.EMAIL_HOST_USER recipient_list = [request.POST.get(\"email\"), ]", "mail.attach_file(os.path.join(BASE_DIR,filename2)) mail.send() msg=\"your successfully uploaded\" return redirect('model_form_upload') else: form =", "m=\"\" if(request.POST.get(\"pswd\")==request.POST.get(\"pswd3\")): user_data=Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"old_pswd\")).update(password=request.POST.get(\"pswd\")) user_data1=Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"pswd\")) \"\"\"if(len_user_data==1): userdata.password=request.POST.get(\"pswd\") return render(request,'mypass.html',{u:\"you have change", "or password is incorrect\" else: m=\"\" print(m) return render(request,'mypass.html',{\"m\":m}) def", "is changed succesfully\" elif(len(Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"old_pswd\")))==0 and request.method==\"POST\"): m=\"your email or password", "form = DocumentForm(request.POST, request.FILES,request.POST) if form.is_valid(): form.save() email=form.cleaned_data['Email'] document_count=Document.objects.values_list('document').count() document_last=Document.objects.values_list('document')[document_count-1]", "render(request,\"user_req.html\",{\"id\":id}) if('proceed' in request.POST and request.method==\"POST\"): userdata=Document.objects.filter(file_url=request.POST.get(\"id1\")).filter(password=request.POST.get(\"password1\")) return render(request,\"user_req.html\",{\"userdata\":userdata}) return", "uploaded_file_url = fs.url(filename) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) media_path = os.path.join(BASE_DIR,'media') full_path=os.path.join(media_path,myfile.name)", "p=m.split('/') print(p) t=p[1] print(t) subject = 'QRcode scanner for license'", "print(email) t=Document.objects.last() num_list=['0','1','2','3','4','5','6','7','8','9'] password1=\"\" for i in range(0,8): password1=<PASSWORD>+random.<PASSWORD>(num_list) t.password=<PASSWORD>", "model_form_upload(request): id=\"\" msg=\"\" if request.method == 'POST': form = DocumentForm(request.POST,", "render(request,\"forget_pass.html\",{\"msg\":msg}) def qrcode_miss(request): msg=\"\" if(request.method=='POST' and Document.objects.filter(Email=request.POST.get('email'),password=request.POST.get('password1'))): user_data=Document.objects.values_list('document').filter(Email=request.POST.get('email'),password=request.POST.get('password1')) m=user_data[0][0] p=m.split('/')", "from django.shortcuts import render, redirect from django.conf import settings from", "import os import pyqrcode import png import random import base64", "if(request.method=='POST' and Document.objects.filter(Email=request.POST.get('email'),password=request.POST.get('password1'))): user_data=Document.objects.values_list('document').filter(Email=request.POST.get('email'),password=request.POST.get('password1')) m=user_data[0][0] p=m.split('/') print(p) t=p[1] print(t) subject", "'home.html', { 'documents': documents }) \"\"\"def simple_upload(request): if request.method ==", "return render(request,\"user_req.html\",) def user(request): return render(request,\"user.html\",) def forget_pass(request): msg=\"\" if(request.method==\"POST\"):", "= \"resend\" email_from = settings.EMAIL_HOST_USER recipient_list = [request.POST.get('email'),] mail=EmailMessage( subject,", "request.method==\"POST\"): userdata=Document.objects.filter(file_url=request.POST.get(\"id1\")).filter(password=request.POST.get(\"password1\")) return render(request,\"user_req.html\",{\"userdata\":userdata}) return render(request,\"user_req.html\",) def user(request): return render(request,\"user.html\",)", "from core.forms import DocumentForm from django.contrib import messages import os", "render(request, 'simple_upload.html', { 'uploaded_file_url': uploaded_file_url }) return render(request, 'simple_upload.html')\"\"\" def", "import send_mail, EmailMessage from core.models import Document from core.forms import", "= pts.reshape((-1, 1, 2)) cv2.polylines(frame, [pts], True, (0, 255, 0),", "[request.POST.get(\"email\"), ] mail=EmailMessage( subject, message, email_from, recipient_list ) mail.send() if(user_data>0):", "= password1 email_from = settings.EMAIL_HOST_USER recipient_list = [email, ] mail=EmailMessage(", "pyqrcode import png import random import base64 import cv2 import", "pts.reshape((-1, 1, 2)) cv2.polylines(frame, [pts], True, (0, 255, 0), 3)", "filename1=filename_before[0]+\".png\" s=qr.png(filename1,scale=6) '''from fpdf import FPDF pdf=FPDF() pdf.add_page() pdf.image(filename1,x=50,y=None,w=60,h=60,type=\"\",link=uploaded_file_url)''' return", "t=Document.objects.last() num_list=['0','1','2','3','4','5','6','7','8','9'] password1=\"\" for i in range(0,8): password1=<PASSWORD>+random.<PASSWORD>(num_list) t.password=<PASSWORD> print(type(document_name))", "for license Forget password' message = \"Password has succesfully changed\"+\"", "= fs.url(filename) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) media_path = os.path.join(BASE_DIR,'media') full_path=os.path.join(media_path,myfile.name) qr=pyqrcode.create(uploaded_file_url)", "if request.method == 'POST' and request.FILES['myfile']: myfile = request.FILES['myfile'] fs", "def qrcode_miss(request): msg=\"\" if(request.method=='POST' and Document.objects.filter(Email=request.POST.get('email'),password=request.POST.get('password1'))): user_data=Document.objects.values_list('document').filter(Email=request.POST.get('email'),password=request.POST.get('password1')) m=user_data[0][0] p=m.split('/') print(p)", "import FileSystemStorage,default_storage from django.core.mail import send_mail, EmailMessage from core.models import", "form,'msg':msg}) def mypass(request): m=\"\" if(request.POST.get(\"pswd\")==request.POST.get(\"pswd3\")): user_data=Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"old_pswd\")).update(password=request.POST.get(\"pswd\")) user_data1=Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"pswd\")) \"\"\"if(len_user_data==1): userdata.password=request.POST.get(\"pswd\") return", "\"\"\"mail=EmailMessage('QR',password1,'<EMAIL>',[email]) #mail.attach(filename2,filename2.content_type) mail.send()\"\"\" subject = 'QRcode scanner for license' message", ") mail.send() c=1 m=\"your password is changed succesfully\" elif(len(Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"old_pswd\")))==0 and", "{'form': form,'msg':msg}) def mypass(request): m=\"\" if(request.POST.get(\"pswd\")==request.POST.get(\"pswd3\")): user_data=Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"old_pswd\")).update(password=request.POST.get(\"pswd\")) user_data1=Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"pswd\")) \"\"\"if(len_user_data==1): userdata.password=request.POST.get(\"pswd\")", "is incorrect\" else: m=\"\" print(m) return render(request,'mypass.html',{\"m\":m}) def user_req(request): if(\"scanner\"", "(x,y,w,h) = obj.rect pts = np.array(points, np.int32) pts = pts.reshape((-1,", "request.FILES['myfile']: myfile = request.FILES['myfile'] fs = FileSystemStorage() filename = fs.save(myfile.name,", "recipient_list ) mail.send() c=1 m=\"your password is changed succesfully\" elif(len(Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"old_pswd\")))==0", "in range(0,8): password1=<PASSWORD>+random.<PASSWORD>(num_list) t.password=<PASSWORD> print(type(document_name)) document_name1=document_name.encode('ascii') document_encode=str(base64.b64encode(document_name1)) ax=document_encode[2:-1] t.file_url=ax print(ax)", "\"resend\" email_from = settings.EMAIL_HOST_USER recipient_list = [request.POST.get('email'),] mail=EmailMessage( subject, message,", "cv2.VideoCapture(0+cv2.CAP_DSHOW) font = cv2.FONT_HERSHEY_PLAIN decodedObjects=[] while decodedObjects==[]: _, frame =", "elif(len(Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"old_pswd\")))==0 and request.method==\"POST\"): m=\"your email or password is incorrect\" else:", "cv2.waitKey(10) & 0xFF if decodedObjects!=[] : cv2.destroyAllWindows() return render(request,\"user_req.html\",{\"id\":id}) if('proceed'", "email_from = settings.EMAIL_HOST_USER recipient_list = [request.POST.get('email'),] mail=EmailMessage( subject, message, email_from,", "import png import random import base64 import cv2 import numpy", "Reader\", frame) key = cv2.waitKey(10) & 0xFF if decodedObjects!=[] :", "= cap.read() decodedObjects = pyzbar.decode(frame) for obj in decodedObjects: points", "decodedObjects = pyzbar.decode(frame) for obj in decodedObjects: points = obj.polygon", "filename2=filename1[1]+\".png\" qr.png(filename2,scale=6) \"\"\"mail=EmailMessage('QR',password1,'<EMAIL>',[email]) #mail.attach(filename2,filename2.content_type) mail.send()\"\"\" subject = 'QRcode scanner for", "points = obj.polygon (x,y,w,h) = obj.rect pts = np.array(points, np.int32)", "os.path.join(BASE_DIR,'media') full_path=os.path.join(media_path,myfile.name) qr=pyqrcode.create(uploaded_file_url) filename_before=filename.rsplit(\".\") filename1=filename_before[0]+\".png\" s=qr.png(filename1,scale=6) '''from fpdf import FPDF", "render(request, 'home.html', { 'documents': documents }) \"\"\"def simple_upload(request): if request.method", "= settings.EMAIL_HOST_USER recipient_list = [request.POST.get('email'),] mail=EmailMessage( subject, message, email_from, recipient_list", "k=os.path.join(BASE_DIR,t) print(k) mail.attach_file(k) mail.send() msg=\"your qrcode is sent to your", "t.password=<PASSWORD> print(type(document_name)) document_name1=document_name.encode('ascii') document_encode=str(base64.b64encode(document_name1)) ax=document_encode[2:-1] t.file_url=ax print(ax) t.save() qr=pyqrcode.create(ax) filename=document_name.rsplit(\".\")", "msg=\"your successfully uploaded\" return redirect('model_form_upload') else: form = DocumentForm() return", "m=user_data[0][0] p=m.split('/') print(p) t=p[1] print(t) subject = 'QRcode scanner for", "userdata=Document.objects.filter(file_url=request.POST.get(\"id1\")).filter(password=request.POST.get(\"password1\")) return render(request,\"user_req.html\",{\"userdata\":userdata}) return render(request,\"user_req.html\",) def user(request): return render(request,\"user.html\",) def", "render(request,'mypass.html',{\"m\":m}) def user_req(request): if(\"scanner\" in request.POST and request.method==\"POST\"): cap =", "[pts], True, (0, 255, 0), 3) cv2.putText(frame, str(obj.data), (50, 50),", "return render(request,'mypass.html',{\"m\":m}) def user_req(request): if(\"scanner\" in request.POST and request.method==\"POST\"): cap", "Forget password' message = \"Password has succesfully changed\"+\" \"+<PASSWORD> email_from", "if(request.method==\"POST\"): num_list=['0','1','2','3','4','5','6','7','8','9'] password1=\"\" for i in range(0,8): password1=<PASSWORD>(num_list) user_data=Document.objects.filter(Email=request.POST.get(\"email\")).update(password=<PASSWORD>) subject", "changed succesfully\" elif(len(Document.objects.filter(Email=request.POST.get(\"email\"),password=request.POST.get(\"old_pswd\")))==0 and request.method==\"POST\"): m=\"your email or password is", "}) return render(request, 'simple_upload.html')\"\"\" def model_form_upload(request): id=\"\" msg=\"\" if request.method", "message, email_from, recipient_list ) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) k=os.path.join(BASE_DIR,t) print(k) mail.attach_file(k)", "msg=\"\" if(request.method=='POST' and Document.objects.filter(Email=request.POST.get('email'),password=request.POST.get('password1'))): user_data=Document.objects.values_list('document').filter(Email=request.POST.get('email'),password=request.POST.get('password1')) m=user_data[0][0] p=m.split('/') print(p) t=p[1] print(t)" ]
[]
[ "query = input(\">> \") if 'exit' in query: exit() else:", "exit by type exit\\n') while True: query = input(\">> \")", "from chatterbot import ChatBot from chatterbot.trainers import ChatterBotCorpusTrainer form termcolor", "ChatterBotCorpusTrainer(chatbot) trainer.train('chatterbot.corpus.english') cprint(\"#\" * 50, \"magenta\") cprint((f\"A Chatot \").center(50), \"yellow\")", "- 18 october 2020 ''' from chatterbot import ChatBot from", "\"yellow\") cprint(\"#\" * 50, \"magenta\") print('You can exit by type", "termcolor import cprint import time chatbot = ChatBot('Bot') trainer =", "= ChatterBotCorpusTrainer(chatbot) trainer.train('chatterbot.corpus.english') cprint(\"#\" * 50, \"magenta\") cprint((f\"A Chatot \").center(50),", "ChatBot from chatterbot.trainers import ChatterBotCorpusTrainer form termcolor import cprint import", "cprint(\"#\" * 50, \"magenta\") cprint((f\"A Chatot \").center(50), \"yellow\") cprint(\"#\" *", "exit\\n') while True: query = input(\">> \") if 'exit' in", "Python mini project Date - 18 october 2020 ''' from", "from chatterbot.trainers import ChatterBotCorpusTrainer form termcolor import cprint import time", "cprint import time chatbot = ChatBot('Bot') trainer = ChatterBotCorpusTrainer(chatbot) trainer.train('chatterbot.corpus.english')", "Date - 18 october 2020 ''' from chatterbot import ChatBot", "2020 ''' from chatterbot import ChatBot from chatterbot.trainers import ChatterBotCorpusTrainer", "50, \"magenta\") cprint((f\"A Chatot \").center(50), \"yellow\") cprint(\"#\" * 50, \"magenta\")", "ChatterBotCorpusTrainer form termcolor import cprint import time chatbot = ChatBot('Bot')", "by - <NAME> Purpose - Python mini project Date -", "type exit\\n') while True: query = input(\">> \") if 'exit'", "print('You can exit by type exit\\n') while True: query =", "chatterbot.trainers import ChatterBotCorpusTrainer form termcolor import cprint import time chatbot", "ChatBot('Bot') trainer = ChatterBotCorpusTrainer(chatbot) trainer.train('chatterbot.corpus.english') cprint(\"#\" * 50, \"magenta\") cprint((f\"A", "<NAME> Purpose - Python mini project Date - 18 october", "chatterbot import ChatBot from chatterbot.trainers import ChatterBotCorpusTrainer form termcolor import", "* 50, \"magenta\") cprint((f\"A Chatot \").center(50), \"yellow\") cprint(\"#\" * 50,", "50, \"magenta\") print('You can exit by type exit\\n') while True:", "can exit by type exit\\n') while True: query = input(\">>", "''' from chatterbot import ChatBot from chatterbot.trainers import ChatterBotCorpusTrainer form", "october 2020 ''' from chatterbot import ChatBot from chatterbot.trainers import", "form termcolor import cprint import time chatbot = ChatBot('Bot') trainer", "trainer.train('chatterbot.corpus.english') cprint(\"#\" * 50, \"magenta\") cprint((f\"A Chatot \").center(50), \"yellow\") cprint(\"#\"", "\"magenta\") cprint((f\"A Chatot \").center(50), \"yellow\") cprint(\"#\" * 50, \"magenta\") print('You", "import ChatBot from chatterbot.trainers import ChatterBotCorpusTrainer form termcolor import cprint", "cprint((f\"A Chatot \").center(50), \"yellow\") cprint(\"#\" * 50, \"magenta\") print('You can", "\"magenta\") print('You can exit by type exit\\n') while True: query", "mini project Date - 18 october 2020 ''' from chatterbot", "cprint(\"#\" * 50, \"magenta\") print('You can exit by type exit\\n')", "trainer = ChatterBotCorpusTrainer(chatbot) trainer.train('chatterbot.corpus.english') cprint(\"#\" * 50, \"magenta\") cprint((f\"A Chatot", "while True: query = input(\">> \") if 'exit' in query:", "''' Made by - <NAME> Purpose - Python mini project", "= ChatBot('Bot') trainer = ChatterBotCorpusTrainer(chatbot) trainer.train('chatterbot.corpus.english') cprint(\"#\" * 50, \"magenta\")", "Made by - <NAME> Purpose - Python mini project Date", "Chatot \").center(50), \"yellow\") cprint(\"#\" * 50, \"magenta\") print('You can exit", "import cprint import time chatbot = ChatBot('Bot') trainer = ChatterBotCorpusTrainer(chatbot)", "Purpose - Python mini project Date - 18 october 2020", "True: query = input(\">> \") if 'exit' in query: exit()", "import time chatbot = ChatBot('Bot') trainer = ChatterBotCorpusTrainer(chatbot) trainer.train('chatterbot.corpus.english') cprint(\"#\"", "18 october 2020 ''' from chatterbot import ChatBot from chatterbot.trainers", "- <NAME> Purpose - Python mini project Date - 18", "= input(\">> \") if 'exit' in query: exit() else: print(chatbot.get_response(query))", "time chatbot = ChatBot('Bot') trainer = ChatterBotCorpusTrainer(chatbot) trainer.train('chatterbot.corpus.english') cprint(\"#\" *", "import ChatterBotCorpusTrainer form termcolor import cprint import time chatbot =", "- Python mini project Date - 18 october 2020 '''", "chatbot = ChatBot('Bot') trainer = ChatterBotCorpusTrainer(chatbot) trainer.train('chatterbot.corpus.english') cprint(\"#\" * 50,", "\").center(50), \"yellow\") cprint(\"#\" * 50, \"magenta\") print('You can exit by", "project Date - 18 october 2020 ''' from chatterbot import", "by type exit\\n') while True: query = input(\">> \") if", "* 50, \"magenta\") print('You can exit by type exit\\n') while" ]
[ "mask = c[1] else: address = c[0][c[0].index(\"[\")+1:len(c[0])-1] binaryValue = decimalToBinary(int(c[1]))", "the bit in the value unchanged. For example, consider the", "runs this port isn't compatible with the docking program on", "\"r\") def processData(file): data = [] for x in f:", "overwrites the corresponding bit in the value, while an X", "inspection, you discover that the sea port's computer system uses", "still need the sum of all values left in memory", "val in memory.values(): sum += val # print(memory) return sum", "value, while an X leaves the bit in the value", "range(len(bin_address)): # find each X and add its idx to", "current bitmask is applied to values immediately before they are", "to write 0 to address 8: value: 000000000000000000000000000000000000 (decimal 0)", "all values left in memory after it completes? \"\"\" def", "overwritten, three of which are different, and two of which", "this example, the sum is 208. Execute the initialization program", "8. The bitmask is always given as a string of", "0s & 1s # w/ leading 0s possible = decimalToBinary(i)", "1 else: newAddress += bin_address[i] addresses.append(newAddress) return addresses def initialize_v2(commands):", "generate all possible combos of 0s & 1s # w/", "== \"mask\": mask = c[1] else: address = c[0][c[0].index(\"[\")+1:len(c[0])-1] binaryValue", "different, and two of which are floating. Floating bits take", "(decimal 19) 000000000000000000000000000000011000 (decimal 24) 000000000000000000000000000000011001 (decimal 25) 000000000000000000000000000000011010 (decimal", "If the bitmask bit is 0, the corresponding memory address", "potentially causing many memory addresses to be written all at", "producing a sum of 165. Execute the initialization program. What", "i == xPositions[currPos]: newAddress += c[currPos] currPos += 1 else:", "36 bits, written with the most significant bit (representing 2^35)", "initialize_v2(data) print(\"Part 2:\", sumAllValuesV2) # binary = decimalToBinary(33323) # binary", "in range(len(bin_address)): # find each X and add its idx", "0, and the 64s bit is overwritten with 1. The", "1. If the bitmask bit is X, the corresponding memory", "left in memory after it completes? \"\"\" def calculateCombinations(bin_address): combinations", "decimalToBinary(33323) # binary = leadingZeros(36, binary) # print(binary) # combos", "all values left in memory after the initialization program completes.", "program completes. (The entire 36-bit address space begins initialized to", "three of which are different, and two of which are", "combination of values; with two floating bits, four actual memory", "and instead fluctuates unpredictably. In practice, this means the floating", "newAddress = \"\" currPos = 0 for i in range(len(bin_address)):", "specifying a bitmask (mask = ....). The mask it specifies", "it overwrote were already the values the mask tried to", "each X and add its idx to a list if", "input) can either update the bitmask or write a value", "communicate with your ferry's docking program. It must be using", "the left and the least significant bit (2^0, that is,", "with 1. The program then attempts to write the value", "it completes? \"\"\" f = open(\"challenges\\data\\day14data.txt\", \"r\") def processData(file): data", "newVal = \"\" for i in range(len(mask)): if mask[i] !=", "address 26 with a different bitmask: address: 000000000000000000000000000000011010 (decimal 26)", "bit in the value, while an X leaves the bit", "result: 000000000000000000000000000001000000 (decimal 64) 64 is written to address 8", "list if bin_address[i] == \"X\": xPositions.append(i) # xCount += 1", "mask[i] else: newVal += binary36[i] addresses = calculateCombinations(newVal) for a", "bitmask: address: 000000000000000000000000000000011010 (decimal 26) mask: 00000000000000000000000000000000X0XX result: 00000000000000000000000000000001X0XX This", "== \"X\": xPositions.append(i) # xCount += 1 if len(xPositions) >", "(mask = ....). The mask it specifies will overwrite two", "c[1] else: address = c[0][c[0].index(\"[\")+1:len(c[0])-1] binaryAddress = decimalToBinary(int(address)) binary36 =", "effect, as the bits it overwrote were already the values", "completes? \"\"\" f = open(\"challenges\\data\\day14data.txt\", \"r\") def processData(file): data =", "val # print(memory) return sum data = processData(f) # [print(d)", "# [print(d) for d in data] sumAllValues = initialize(data) print(\"Part", "to Binary number def decimalToBinary(n): return bin(n).replace(\"0b\", \"\") def leadingZeros(length,", "binary36 = leadingZeros(36, binaryAddress) newVal = \"\" for i in", "\"0\"*leadingZeros + bin_num def initialize(commands): memory = {} mask =", "ferry approaches the sea port, the captain asks for your", "101) This time, the mask has no effect, as the", "mask[i] == \"X\": memory[address] += binary36[i] else: memory[address] += mask[i]", "(decimal 26) 000000000000000000000000000000011011 (decimal 27) The entire 36-bit address space", "to memory. Values and memory addresses are both 36-bit unsigned", "bits in every written value: the 2s bit is overwritten", "time, the mask has no effect, as the bits it", "c[0][c[0].index(\"[\")+1:len(c[0])-1] binaryValue = decimalToBinary(int(c[1])) binary36 = leadingZeros(36, binaryValue) memory[address] =", "are different, and two of which are floating. Floating bits", "computer system still can't communicate with your ferry's docking program.", "initialize(commands): memory = {} mask = \"X\"*36 for c in", "write the value 11 to memory address 8. The bitmask", "only two values in memory are not zero - 101", "overwritten with 0, and the 64s bit is overwritten with", "# print(memory) return sum data = processData(f) # [print(d) for", "in the value unchanged. For example, consider the following program:", "the mask has no effect, as the bits it overwrote", "The initialization program (your puzzle input) can either update the", "example, only two values in memory are not zero -", "following way: If the bitmask bit is 0, the corresponding", "you still need the sum of all values left in", "following program: mask = XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X mem[8] = 11 mem[7] =", "causing writes to eight memory addresses: 000000000000000000000000000000010000 (decimal 16) 000000000000000000000000000000010001", "program tries to write 0 to address 8: value: 000000000000000000000000000000000000", "address bit is unchanged. If the bitmask bit is 1,", "connected to anything and instead fluctuates unpredictably. In practice, this", "value: the 2s bit is overwritten with 0, and the", "# current combo associated idx is in xPositions[i] newAddress =", "(decimal 101) This time, the mask has no effect, as", "decimalToBinary(int(c[1])) binary36 = leadingZeros(36, binaryValue) memory[address] = \"\" for i", "24) 000000000000000000000000000000011001 (decimal 25) 000000000000000000000000000000011010 (decimal 26) 000000000000000000000000000000011011 (decimal 27)", "and i == xPositions[currPos]: newAddress += c[currPos] currPos += 1", "of the mask, the value 73 is written to memory", "if mask[i] == \"X\": memory[address] += binary36[i] else: memory[address] +=", "with two floating bits, four actual memory addresses are written:", "memory.values(): sum += val # print(memory) return sum data =", "possible values, potentially causing many memory addresses to be written", "The computer system that runs this port isn't compatible with", "26) mask: 00000000000000000000000000000000X0XX result: 00000000000000000000000000000001X0XX This results in an address", "= processData(f) # [print(d) for d in data] sumAllValues =", "= initialize(data) print(\"Part 1:\", sumAllValues) sumAllValuesV2 = initialize_v2(data) print(\"Part 2:\",", "the value 11 to memory address 8. By expanding everything", "the corresponding bit in the value, while an X leaves", "written: 000000000000000000000000000000011010 (decimal 26) 000000000000000000000000000000011011 (decimal 27) 000000000000000000000000000000111010 (decimal 58)", "decimalToBinary(n): return bin(n).replace(\"0b\", \"\") def leadingZeros(length, bin_num): leadingZeros = length", "bitmask system in its initialization program. Although you don't have", "value: 000000000000000000000000000000000000 (decimal 0) mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X result: 000000000000000000000000000001000000 (decimal 64)", "(decimal 27) 000000000000000000000000000000111010 (decimal 58) 000000000000000000000000000000111011 (decimal 59) Next, the", "in an address with three floating bits, causing writes to", "# binary = decimalToBinary(33323) # binary = leadingZeros(36, binary) #", "w/ leading 0s possible = decimalToBinary(i) while len(possible) < len(xPositions):", "in addresses: memory[a] = int(c[1]) sum = 0 for val", "the docking program on the ferry, so the docking parameters", "significant bit (representing 2^35) on the left and the least", "for val in memory.values(): sum += int(\"\".join(val), 2) return sum", "the initialization program using an emulator for a version 2", "= [] for c in combinations: # need to insert", "Decimal number # to Binary number def decimalToBinary(n): return bin(n).replace(\"0b\",", "xPositions[currPos]: newAddress += c[currPos] currPos += 1 else: newAddress +=", "= int(c[1]) sum = 0 for val in memory.values(): sum", "of which are different, and two of which are floating.", "to anything and instead fluctuates unpredictably. In practice, this means", "the corresponding memory address bit is unchanged. If the bitmask", "values, potentially causing many memory addresses to be written all", "= initialize_v2(data) print(\"Part 2:\", sumAllValuesV2) # binary = decimalToBinary(33323) #", "possible combos of 0s & 1s # w/ leading 0s", "initialization program. What is the sum of all values left", "addresses to be written all at once! For example, consider", "memory[address] += mask[i] sum = 0 for val in memory.values():", "software! The initialization program (your puzzle input) can either update", "else: newAddress += bin_address[i] addresses.append(newAddress) return addresses def initialize_v2(commands): memory", "leadingZeros(36, binaryValue) memory[address] = \"\" for i in range(len(mask)): if", "and memory addresses are both 36-bit unsigned integers. For example,", "101) mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X result: 000000000000000000000000000001100101 (decimal 101) This time, the", "initialize your ferry's docking program, you need the sum of", "ferry's docking program, you need the sum of all values", "convert Decimal number # to Binary number def decimalToBinary(n): return", "acts as a memory address decoder. Immediately before a value", "need to generate all possible combos of 0s & 1s", "your help again. The computer system that runs this port", "the sum is 208. Execute the initialization program using an", "[] for i in range(len(bin_address)): # find each X and", "(decimal 101) mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X result: 000000000000000000000000000001100101 (decimal 101) This time,", "address decoder. Immediately before a value is written to memory,", "len(xPositions) > 0: for i in range(2**(len(xPositions))): # need to", "the corresponding memory address bit is overwritten with 1. If", "address 8: value: 000000000000000000000000000000000000 (decimal 0) mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X result: 000000000000000000000000000001000000", "tried to set. Finally, the program tries to write 0", "0, the corresponding memory address bit is unchanged. If the", "doesn't modify the values being written at all. Instead, it", "\"0\": newVal += mask[i] else: newVal += binary36[i] addresses =", "000000000000000000000000000000010001 (decimal 17) 000000000000000000000000000000010010 (decimal 18) 000000000000000000000000000000010011 (decimal 19) 000000000000000000000000000000011000", "eight memory addresses: 000000000000000000000000000000010000 (decimal 16) 000000000000000000000000000000010001 (decimal 17) 000000000000000000000000000000010010", "corresponding memory address bit is floating. A floating bit is", "like mem[8] = 11 would write the value 11 to", "currPos < len(xPositions) and i == xPositions[currPos]: newAddress += c[currPos]", "overwrite two bits in every written value: the 2s bit", "in memory.values(): sum += int(\"\".join(val), 2) return sum \"\"\" ---", "= \") data.append((x[0], x[1])) return data # Function to convert", "== \"X\": memory[address] += binary36[i] else: memory[address] += mask[i] sum", "bit is X, the corresponding memory address bit is floating.", "initialization program (your puzzle input) can either update the bitmask", "a memory address decoder. Immediately before a value is written", "= [] # xCount = 0 xPositions = [] for", "bitmask bit is X, the corresponding memory address bit is", "for d in data] sumAllValues = initialize(data) print(\"Part 1:\", sumAllValues)", "a moment, a line like mem[8] = 11 would write", "program is about to write to memory address 26 with", "memory addresses: 000000000000000000000000000000010000 (decimal 16) 000000000000000000000000000000010001 (decimal 17) 000000000000000000000000000000010010 (decimal", "initialization program. Although you don't have the correct decoder chip", "instead. Then, the program tries to write 101 to address", "'').split(\" = \") data.append((x[0], x[1])) return data # Function to", "have the correct decoder chip handy, you can emulate it", "36-bit address space still begins initialized to the value 0", "data = [] for x in f: x=x.strip().replace('\\n', '').split(\" =", "59) Next, the program is about to write to memory", "18) 000000000000000000000000000000010011 (decimal 19) 000000000000000000000000000000011000 (decimal 24) 000000000000000000000000000000011001 (decimal 25)", "initialize(data) print(\"Part 1:\", sumAllValues) sumAllValuesV2 = initialize_v2(data) print(\"Part 2:\", sumAllValuesV2)", "the value 11 to memory address 8. The bitmask is", "has no effect, as the bits it overwrote were already", "correctly initialized in the docking program's memory. After a brief", "is, the 1s bit) on the right. The current bitmask", "at once! For example, consider the following program: mask =", "In practice, this means the floating bits will take on", "fluctuates unpredictably. In practice, this means the floating bits will", "program on the ferry, so the docking parameters aren't being", "bitmask is applied to values immediately before they are written", "strange bitmask system in its initialization program. Although you don't", "memory: a 0 or 1 overwrites the corresponding bit in", "Instead, it acts as a memory address decoder. Immediately before", "corresponding memory address bit is unchanged. If the bitmask bit", "not connected to anything and instead fluctuates unpredictably. In practice,", "--- Day 14: Docking Data --- As your ferry approaches", "to memory address 8 instead. Then, the program tries to", "11 would write the value 11 to memory address 8.", "\"\"\" f = open(\"challenges\\data\\day14data.txt\", \"r\") def processData(file): data = []", "i in range(len(bin_address)): # find each X and add its", "= leadingZeros(36, binary) # print(binary) # combos = initialize_v2([(\"mask\", \"100X100X101011111X100000100X11010011\"),", "000000000000000000000000000000X1001X result: 000000000000000000000000000000X1101X After applying the mask, four bits are", "write 0 to address 8: value: 000000000000000000000000000000000000 (decimal 0) mask:", "address 7: value: 000000000000000000000000000001100101 (decimal 101) mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X result: 000000000000000000000000000001100101", "decoder. Immediately before a value is written to memory, each", "before they are written to memory: a 0 or 1", "mask is applied as follows: value: 000000000000000000000000000000001011 (decimal 11) mask:", "be written all at once! For example, consider the following", "everything out to individual bits, the mask is applied as", "\"\" for i in range(len(mask)): if mask[i] == \"X\": memory[address]", "address: 000000000000000000000000000000011010 (decimal 26) mask: 00000000000000000000000000000000X0XX result: 00000000000000000000000000000001X0XX This results", "range(2**(len(xPositions))): # need to generate all possible combos of 0s", "possible = decimalToBinary(i) while len(possible) < len(xPositions): possible = \"0\"+possible", "system in its initialization program. Although you don't have the", "i in range(len(mask)): if mask[i] == \"X\": memory[address] += binary36[i]", "are written to memory: a 0 or 1 overwrites the", "mask = c[1] else: address = c[0][c[0].index(\"[\")+1:len(c[0])-1] binaryAddress = decimalToBinary(int(address))", "program: mask = XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X mem[8] = 11 mem[7] = 101", "destination memory address in the following way: If the bitmask", "values being written at all. Instead, it acts as a", "that was there previously. To initialize your ferry's docking program,", "of 165. Execute the initialization program. What is the sum", "version 2 decoder chip. What is the sum of all", "that is, the 1s bit) on the right. The current", "= decimalToBinary(i) while len(possible) < len(xPositions): possible = \"0\"+possible combinations.append(possible)", "[print(d) for d in data] sumAllValues = initialize(data) print(\"Part 1:\",", "set. Finally, the program tries to write 0 to address", "and two of which are floating. Floating bits take on", "find each X and add its idx to a list", "practice, this means the floating bits will take on all", "unsigned integers. For example, ignoring bitmasks for a moment, a", "Function to convert Decimal number # to Binary number def", "on the ferry, so the docking parameters aren't being correctly", "combinations = [] # xCount = 0 xPositions = []", "written to memory address 8 instead. Then, the program tries", "= 0 for i in range(len(bin_address)): if currPos < len(xPositions)", "the bitmask bit is X, the corresponding memory address bit", "if len(xPositions) > 0: for i in range(2**(len(xPositions))): # need", "# w/ leading 0s possible = decimalToBinary(i) while len(possible) <", "expanding everything out to individual bits, the mask is applied", "return addresses def initialize_v2(commands): memory = {} mask = \"X\"*36", "= c[1] else: address = c[0][c[0].index(\"[\")+1:len(c[0])-1] binaryAddress = decimalToBinary(int(address)) binary36", "so the docking parameters aren't being correctly initialized in the", "goes to write to memory address 42, it first applies", "leaves the bit in the value unchanged. For example, consider", "101 mem[8] = 0 This program starts by specifying a", "the value, while an X leaves the bit in the", "f = open(\"challenges\\data\\day14data.txt\", \"r\") def processData(file): data = [] for", "if mask[i] != \"0\": newVal += mask[i] else: newVal +=", "every possible combination of values; with two floating bits, four", "= 0 xPositions = [] for i in range(len(bin_address)): #", "left in memory after the initialization program completes. (The entire", "entire 36-bit address space begins initialized to the value 0", "to convert Decimal number # to Binary number def decimalToBinary(n):", "000000000000000000000000000000011011 (decimal 27) The entire 36-bit address space still begins", "memory after it completes? \"\"\" f = open(\"challenges\\data\\day14data.txt\", \"r\") def", "with a different bitmask: address: 000000000000000000000000000000011010 (decimal 26) mask: 00000000000000000000000000000000X0XX", "first applies the bitmask: address: 000000000000000000000000000000101010 (decimal 42) mask: 000000000000000000000000000000X1001X", "initialized to the value 0 at every address.) In the", "brief inspection, you discover that the sea port's computer system", "as a string of 36 bits, written with the most", "(2^0, that is, the 1s bit) on the right. The", "+= 1 else: newAddress += bin_address[i] addresses.append(newAddress) return addresses def", "take on all possible values, potentially causing many memory addresses", "values left in memory after it completes? \"\"\" f =", "mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X result: 000000000000000000000000000001001001 (decimal 73) So, because of the", "c[0][c[0].index(\"[\")+1:len(c[0])-1] binaryAddress = decimalToBinary(int(address)) binary36 = leadingZeros(36, binaryAddress) newVal =", "+= binary36[i] addresses = calculateCombinations(newVal) for a in addresses: memory[a]", "in software! The initialization program (your puzzle input) can either", "17) 000000000000000000000000000000010010 (decimal 18) 000000000000000000000000000000010011 (decimal 19) 000000000000000000000000000000011000 (decimal 24)", "bitmasks for a moment, a line like mem[8] = 11", "memory address 8 instead. Then, the program tries to write", "return bin(n).replace(\"0b\", \"\") def leadingZeros(length, bin_num): leadingZeros = length -", "else: address = c[0][c[0].index(\"[\")+1:len(c[0])-1] binaryValue = decimalToBinary(int(c[1])) binary36 = leadingZeros(36,", "= 11 mem[7] = 101 mem[8] = 0 This program", "= 1 When this program goes to write to memory", "Finally, the program tries to write 0 to address 8:", "the bitmask modifies the corresponding bit of the destination memory", "the bitmask: address: 000000000000000000000000000000101010 (decimal 42) mask: 000000000000000000000000000000X1001X result: 000000000000000000000000000000X1101X", "27) The entire 36-bit address space still begins initialized to", "is in xPositions[i] newAddress = \"\" currPos = 0 for", "addresses are both 36-bit unsigned integers. For example, ignoring bitmasks", "corresponding bit in the value, while an X leaves the", "written value: the 2s bit is overwritten with 0, and", "the mask is applied as follows: value: 000000000000000000000000000000001011 (decimal 11)", "in memory after it completes? \"\"\" f = open(\"challenges\\data\\day14data.txt\", \"r\")", "using an emulator for a version 2 decoder chip. What", "of the program. In this example, the sum is 208.", "currPos = 0 for i in range(len(bin_address)): if currPos <", "# binary = leadingZeros(36, binary) # print(binary) # combos =", "00000000000000000000000000000000X0XX mem[26] = 1 When this program goes to write", "the following program: mask = 000000000000000000000000000000X1001X mem[42] = 100 mask", "sumAllValues = initialize(data) print(\"Part 1:\", sumAllValues) sumAllValuesV2 = initialize_v2(data) print(\"Part", "mask, the value 73 is written to memory address 8", "entire 36-bit address space still begins initialized to the value", "after it completes? \"\"\" f = open(\"challenges\\data\\day14data.txt\", \"r\") def processData(file):", "three floating bits, causing writes to eight memory addresses: 000000000000000000000000000000010000", "not zero - 101 (at address 7) and 64 (at", "version 2 of the decoder chip! A version 2 decoder", "example, consider the following program: mask = 000000000000000000000000000000X1001X mem[42] =", "combination[i] into binary number # current combo associated idx is", "This time, the mask has no effect, as the bits", "is 1, the corresponding memory address bit is overwritten with", "{} mask = \"X\"*36 for c in commands: if c[0]", "def leadingZeros(length, bin_num): leadingZeros = length - len(bin_num) return \"0\"*leadingZeros", "anything and instead fluctuates unpredictably. In practice, this means the", "values left in memory after the initialization program completes. (The", "0 xPositions = [] for i in range(len(bin_address)): # find", "in data] sumAllValues = initialize(data) print(\"Part 1:\", sumAllValues) sumAllValuesV2 =", "Binary number def decimalToBinary(n): return bin(n).replace(\"0b\", \"\") def leadingZeros(length, bin_num):", "# combos = initialize_v2([(\"mask\", \"100X100X101011111X100000100X11010011\"), # (\"mem[33323]\", \"349380\")]) # print(combos)", "is overwritten with 0, and the 64s bit is overwritten", "length - len(bin_num) return \"0\"*leadingZeros + bin_num def initialize(commands): memory", "000000000000000000000000000001100101 (decimal 101) mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X result: 000000000000000000000000000001100101 (decimal 101) This", "every address.) In the above example, only two values in", "instead fluctuates unpredictably. In practice, this means the floating bits", "1:\", sumAllValues) sumAllValuesV2 = initialize_v2(data) print(\"Part 2:\", sumAllValuesV2) # binary", "values left in memory after it completes? \"\"\" def calculateCombinations(bin_address):", "7: value: 000000000000000000000000000001100101 (decimal 101) mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X result: 000000000000000000000000000001100101 (decimal", "an emulator for a version 2 decoder chip. What is", "discover that the sea port's computer system uses a strange", "sum = 0 for val in memory.values(): sum += val", "binary36 = leadingZeros(36, binaryValue) memory[address] = \"\" for i in", "bit (2^0, that is, the 1s bit) on the right.", "take on every possible combination of values; with two floating", "the sea port's computer system still can't communicate with your", "this means the floating bits will take on all possible", "11 to memory address 8. By expanding everything out to", "of all values left in memory after the initialization program", "(decimal 26) 000000000000000000000000000000011011 (decimal 27) 000000000000000000000000000000111010 (decimal 58) 000000000000000000000000000000111011 (decimal", "address with three floating bits, causing writes to eight memory", "\"mask\": mask = c[1] else: address = c[0][c[0].index(\"[\")+1:len(c[0])-1] binaryValue =", "The entire 36-bit address space still begins initialized to the", "xPositions.append(i) # xCount += 1 if len(xPositions) > 0: for", "is written to memory address 8 instead. Then, the program", "two of which are floating. Floating bits take on every", "A floating bit is not connected to anything and instead", "still begins initialized to the value 0 at every address,", "currPos += 1 else: newAddress += bin_address[i] addresses.append(newAddress) return addresses", "mask: 00000000000000000000000000000000X0XX result: 00000000000000000000000000000001X0XX This results in an address with", "idx to a list if bin_address[i] == \"X\": xPositions.append(i) #", "Data --- As your ferry approaches the sea port, the", "are both 36-bit unsigned integers. For example, ignoring bitmasks for", "....). The mask it specifies will overwrite two bits in", "to the value 0 at every address.) In the above", "in memory are not zero - 101 (at address 7)", "bitmask is always given as a string of 36 bits,", "left and the least significant bit (2^0, that is, the", "(The entire 36-bit address space begins initialized to the value", "the mask, four bits are overwritten, three of which are", "binary) # print(binary) # combos = initialize_v2([(\"mask\", \"100X100X101011111X100000100X11010011\"), # (\"mem[33323]\",", "memory are not zero - 101 (at address 7) and", "before a value is written to memory, each bit in", "your ferry approaches the sea port, the captain asks for", "actual memory addresses are written: 000000000000000000000000000000011010 (decimal 26) 000000000000000000000000000000011011 (decimal", "else: newVal += binary36[i] addresses = calculateCombinations(newVal) for a in", "bin(n).replace(\"0b\", \"\") def leadingZeros(length, bin_num): leadingZeros = length - len(bin_num)", "in the docking program's memory. After a brief inspection, you", "(decimal 73) So, because of the mask, the value 73", "in range(len(mask)): if mask[i] != \"0\": newVal += mask[i] else:", "processData(file): data = [] for x in f: x=x.strip().replace('\\n', '').split(\"", "to memory: a 0 or 1 overwrites the corresponding bit", "can emulate it in software! The initialization program (your puzzle", "000000000000000000000000000000011001 (decimal 25) 000000000000000000000000000000011010 (decimal 26) 000000000000000000000000000000011011 (decimal 27) The", "values immediately before they are written to memory: a 0", "As your ferry approaches the sea port, the captain asks", "36-bit unsigned integers. For example, ignoring bitmasks for a moment,", "bitmask bit is 0, the corresponding memory address bit is", "different bitmask: address: 000000000000000000000000000000011010 (decimal 26) mask: 00000000000000000000000000000000X0XX result: 00000000000000000000000000000001X0XX", "with the docking program on the ferry, so the docking", "memory. Values and memory addresses are both 36-bit unsigned integers.", "(at address 8) - producing a sum of 165. Execute", "the value 73 is written to memory address 8 instead.", "your ferry's docking program, you need the sum of all", "# xCount += 1 if len(xPositions) > 0: for i", "x=x.strip().replace('\\n', '').split(\" = \") data.append((x[0], x[1])) return data # Function", "return sum data = processData(f) # [print(d) for d in", "If the bitmask bit is 1, the corresponding memory address", "parameters aren't being correctly initialized in the docking program's memory.", "for your help again. The computer system that runs this", "overwritten with 1. The program then attempts to write the", "len(bin_num) return \"0\"*leadingZeros + bin_num def initialize(commands): memory = {}", "mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X result: 000000000000000000000000000001000000 (decimal 64) 64 is written to", "the initialization program completes. (The entire 36-bit address space begins", "address 8) - producing a sum of 165. Execute the", "with 0, and the 64s bit is overwritten with 1.", "address 42, it first applies the bitmask: address: 000000000000000000000000000000101010 (decimal", "result: 00000000000000000000000000000001X0XX This results in an address with three floating", "address space still begins initialized to the value 0 at", "address, and you still need the sum of all values", "def calculateCombinations(bin_address): combinations = [] # xCount = 0 xPositions", "binaryAddress) newVal = \"\" for i in range(len(mask)): if mask[i]", "val in memory.values(): sum += int(\"\".join(val), 2) return sum \"\"\"", "value unchanged. For example, consider the following program: mask =", "program: mask = 000000000000000000000000000000X1001X mem[42] = 100 mask = 00000000000000000000000000000000X0XX", "for c in combinations: # need to insert combination[i] into", "address 7) and 64 (at address 8) - producing a", "for x in f: x=x.strip().replace('\\n', '').split(\" = \") data.append((x[0], x[1]))", "both 36-bit unsigned integers. For example, ignoring bitmasks for a", "the bits it overwrote were already the values the mask", "initialization program using an emulator for a version 2 decoder", "program starts by specifying a bitmask (mask = ....). The", "of all values left in memory at the end of", "(decimal 18) 000000000000000000000000000000010011 (decimal 19) 000000000000000000000000000000011000 (decimal 24) 000000000000000000000000000000011001 (decimal", "is overwritten with 1. If the bitmask bit is X,", "Day 14: Docking Data --- As your ferry approaches the", "an X leaves the bit in the value unchanged. For", "(decimal 64) 64 is written to address 8 instead, overwriting", "as follows: value: 000000000000000000000000000000001011 (decimal 11) mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X result: 000000000000000000000000000001001001", "i in range(len(mask)): if mask[i] != \"0\": newVal += mask[i]", "= decimalToBinary(int(c[1])) binary36 = leadingZeros(36, binaryValue) memory[address] = \"\" for", "By expanding everything out to individual bits, the mask is", "leading 0s possible = decimalToBinary(i) while len(possible) < len(xPositions): possible", "memory. After a brief inspection, you discover that the sea", "program. What is the sum of all values left in", "print(memory) return sum data = processData(f) # [print(d) for d", "example, ignoring bitmasks for a moment, a line like mem[8]", "two values in memory are not zero - 101 (at", "sea port's computer system still can't communicate with your ferry's", "100 mask = 00000000000000000000000000000000X0XX mem[26] = 1 When this program", "always given as a string of 36 bits, written with", "decoder chip. What is the sum of all values left", "bit in the bitmask modifies the corresponding bit of the", "bits will take on all possible values, potentially causing many", "port isn't compatible with the docking program on the ferry,", "writes to eight memory addresses: 000000000000000000000000000000010000 (decimal 16) 000000000000000000000000000000010001 (decimal", "are written: 000000000000000000000000000000011010 (decimal 26) 000000000000000000000000000000011011 (decimal 27) 000000000000000000000000000000111010 (decimal", "address 8 instead, overwriting the value that was there previously.", "the initialization program. What is the sum of all values", "the sum of all values left in memory after it", "1. The program then attempts to write the value 11", "don't have the correct decoder chip handy, you can emulate", "handy, you can emulate it in software! The initialization program", "When this program goes to write to memory address 42,", "mask[i] != \"0\": newVal += mask[i] else: newVal += binary36[i]", "def initialize(commands): memory = {} mask = \"X\"*36 for c", "address bit is overwritten with 1. If the bitmask bit", "for i in range(len(bin_address)): if currPos < len(xPositions) and i", "memory address decoder. Immediately before a value is written to", "\"\" for i in range(len(mask)): if mask[i] != \"0\": newVal", "return sum \"\"\" --- Part Two --- For some reason,", "are not zero - 101 (at address 7) and 64", "four actual memory addresses are written: 000000000000000000000000000000011010 (decimal 26) 000000000000000000000000000000011011", "you discover that the sea port's computer system uses a", "i in range(len(bin_address)): if currPos < len(xPositions) and i ==", "c in commands: if c[0] == \"mask\": mask = c[1]", "still can't communicate with your ferry's docking program. It must", "modifies the corresponding bit of the destination memory address in", "memory address 8. The bitmask is always given as a", "write a value to memory. Values and memory addresses are", "asks for your help again. The computer system that runs", "corresponding bit of the destination memory address in the following", "and the least significant bit (2^0, that is, the 1s", "system that runs this port isn't compatible with the docking", "program (your puzzle input) can either update the bitmask or", "14: Docking Data --- As your ferry approaches the sea", "Two --- For some reason, the sea port's computer system", "xCount = 0 xPositions = [] for i in range(len(bin_address)):", "for i in range(len(bin_address)): # find each X and add", "print(\"Part 2:\", sumAllValuesV2) # binary = decimalToBinary(33323) # binary =", "After a brief inspection, you discover that the sea port's", "(representing 2^35) on the left and the least significant bit", "write to memory address 42, it first applies the bitmask:", "floating bit is not connected to anything and instead fluctuates", "Immediately before a value is written to memory, each bit", "if bin_address[i] == \"X\": xPositions.append(i) # xCount += 1 if", "+= mask[i] sum = 0 for val in memory.values(): sum", "value to memory. Values and memory addresses are both 36-bit", "consider the following program: mask = 000000000000000000000000000000X1001X mem[42] = 100", "to write to memory address 42, it first applies the", "is always given as a string of 36 bits, written", "for a version 2 decoder chip. What is the sum", "26) 000000000000000000000000000000011011 (decimal 27) The entire 36-bit address space still", "chip handy, you can emulate it in software! The initialization", "and 64 (at address 8) - producing a sum of", "a bitmask (mask = ....). The mask it specifies will", "\"mask\": mask = c[1] else: address = c[0][c[0].index(\"[\")+1:len(c[0])-1] binaryAddress =", "bit is overwritten with 1. If the bitmask bit is", "for a in addresses: memory[a] = int(c[1]) sum = 0", "to set. Finally, the program tries to write 0 to", "its initialization program. Although you don't have the correct decoder", "the most significant bit (representing 2^35) on the left and", "xPositions[i] newAddress = \"\" currPos = 0 for i in", "binaryAddress = decimalToBinary(int(address)) binary36 = leadingZeros(36, binaryAddress) newVal = \"\"", "chip. What is the sum of all values left in", "return \"0\"*leadingZeros + bin_num def initialize(commands): memory = {} mask", "completes? \"\"\" def calculateCombinations(bin_address): combinations = [] # xCount =", "Floating bits take on every possible combination of values; with", "memory address 42, it first applies the bitmask: address: 000000000000000000000000000000101010", "a version 2 decoder chip. What is the sum of", "sum data = processData(f) # [print(d) for d in data]", "to values immediately before they are written to memory: a", "port, the captain asks for your help again. The computer", "on the left and the least significant bit (2^0, that", "instead, overwriting the value that was there previously. To initialize", "bits, four actual memory addresses are written: 000000000000000000000000000000011010 (decimal 26)", "for i in range(len(mask)): if mask[i] != \"0\": newVal +=", "binary = leadingZeros(36, binary) # print(binary) # combos = initialize_v2([(\"mask\",", "000000000000000000000000000001000000 (decimal 64) 64 is written to address 8 instead,", "floating bits, four actual memory addresses are written: 000000000000000000000000000000011010 (decimal", "mask = 000000000000000000000000000000X1001X mem[42] = 100 mask = 00000000000000000000000000000000X0XX mem[26]", "2:\", sumAllValuesV2) # binary = decimalToBinary(33323) # binary = leadingZeros(36,", "a value to memory. Values and memory addresses are both", "for a moment, a line like mem[8] = 11 would", "memory[address] = \"\" for i in range(len(mask)): if mask[i] ==", "bin_num): leadingZeros = length - len(bin_num) return \"0\"*leadingZeros + bin_num", "again. The computer system that runs this port isn't compatible", "sum = 0 for val in memory.values(): sum += int(\"\".join(val),", "overwrote were already the values the mask tried to set.", "after it completes? \"\"\" def calculateCombinations(bin_address): combinations = [] #", "decoder chip handy, you can emulate it in software! The", "must be using version 2 of the decoder chip! A", "000000000000000000000000000000010011 (decimal 19) 000000000000000000000000000000011000 (decimal 24) 000000000000000000000000000000011001 (decimal 25) 000000000000000000000000000000011010", "the values being written at all. Instead, it acts as", "were already the values the mask tried to set. Finally,", "docking program's memory. After a brief inspection, you discover that", "overwriting the value that was there previously. To initialize your", "f: x=x.strip().replace('\\n', '').split(\" = \") data.append((x[0], x[1])) return data #", "bit of the destination memory address in the following way:", "every written value: the 2s bit is overwritten with 0,", "decimalToBinary(int(address)) binary36 = leadingZeros(36, binaryAddress) newVal = \"\" for i", "\"0\"+possible combinations.append(possible) addresses = [] for c in combinations: #", "xCount += 1 if len(xPositions) > 0: for i in", "== \"mask\": mask = c[1] else: address = c[0][c[0].index(\"[\")+1:len(c[0])-1] binaryAddress", "approaches the sea port, the captain asks for your help", "will overwrite two bits in every written value: the 2s", "(decimal 24) 000000000000000000000000000000011001 (decimal 25) 000000000000000000000000000000011010 (decimal 26) 000000000000000000000000000000011011 (decimal", "In this example, the sum is 208. Execute the initialization", "the sum of all values left in memory after the", "the value 0 at every address, and you still need", "a brief inspection, you discover that the sea port's computer", "four bits are overwritten, three of which are different, and", "the bitmask bit is 0, the corresponding memory address bit", "the program is about to write to memory address 26", "For example, consider the following program: mask = XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X mem[8]", "= 000000000000000000000000000000X1001X mem[42] = 100 mask = 00000000000000000000000000000000X0XX mem[26] =", "0 at every address.) In the above example, only two", "1s # w/ leading 0s possible = decimalToBinary(i) while len(possible)", "addresses.append(newAddress) return addresses def initialize_v2(commands): memory = {} mask =", "mask tried to set. Finally, the program tries to write", "open(\"challenges\\data\\day14data.txt\", \"r\") def processData(file): data = [] for x in", "address 8. By expanding everything out to individual bits, the", "the corresponding bit of the destination memory address in the", "program goes to write to memory address 42, it first", "program's memory. After a brief inspection, you discover that the", "to a list if bin_address[i] == \"X\": xPositions.append(i) # xCount", "len(possible) < len(xPositions): possible = \"0\"+possible combinations.append(possible) addresses = []", "= [] for i in range(len(bin_address)): # find each X", "Execute the initialization program using an emulator for a version", "binary = decimalToBinary(33323) # binary = leadingZeros(36, binary) # print(binary)", "sum of all values left in memory after the initialization", "bin_address[i] == \"X\": xPositions.append(i) # xCount += 1 if len(xPositions)", "all. Instead, it acts as a memory address decoder. Immediately", "to eight memory addresses: 000000000000000000000000000000010000 (decimal 16) 000000000000000000000000000000010001 (decimal 17)", "would write the value 11 to memory address 8. The", "with 1. If the bitmask bit is X, the corresponding", "on every possible combination of values; with two floating bits,", "73) So, because of the mask, the value 73 is", "bin_num def initialize(commands): memory = {} mask = \"X\"*36 for", "floating. A floating bit is not connected to anything and", "computer system uses a strange bitmask system in its initialization", "all at once! For example, consider the following program: mask", "given as a string of 36 bits, written with the", "000000000000000000000000000000011011 (decimal 27) 000000000000000000000000000000111010 (decimal 58) 000000000000000000000000000000111011 (decimal 59) Next,", "c[currPos] currPos += 1 else: newAddress += bin_address[i] addresses.append(newAddress) return", "mem[7] = 101 mem[8] = 0 This program starts by", "+= binary36[i] else: memory[address] += mask[i] sum = 0 for", "version 2 decoder chip doesn't modify the values being written", "0 for val in memory.values(): sum += val # print(memory)", "(decimal 17) 000000000000000000000000000000010010 (decimal 18) 000000000000000000000000000000010011 (decimal 19) 000000000000000000000000000000011000 (decimal", "> 0: for i in range(2**(len(xPositions))): # need to generate", "+= int(\"\".join(val), 2) return sum \"\"\" --- Part Two ---", "X and add its idx to a list if bin_address[i]", "If the bitmask bit is X, the corresponding memory address", "1 overwrites the corresponding bit in the value, while an", "follows: value: 000000000000000000000000000000001011 (decimal 11) mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X result: 000000000000000000000000000001001001 (decimal", "addresses: 000000000000000000000000000000010000 (decimal 16) 000000000000000000000000000000010001 (decimal 17) 000000000000000000000000000000010010 (decimal 18)", "left in memory at the end of the program. In", "to memory address 42, it first applies the bitmask: address:", "by specifying a bitmask (mask = ....). The mask it", "42, it first applies the bitmask: address: 000000000000000000000000000000101010 (decimal 42)", "& 1s # w/ leading 0s possible = decimalToBinary(i) while", "in range(2**(len(xPositions))): # need to generate all possible combos of", "applied to values immediately before they are written to memory:", "at every address.) In the above example, only two values", "8) - producing a sum of 165. Execute the initialization", "the bitmask or write a value to memory. Values and", "attempts to write the value 11 to memory address 8.", "leadingZeros(length, bin_num): leadingZeros = length - len(bin_num) return \"0\"*leadingZeros +", "memory addresses are both 36-bit unsigned integers. For example, ignoring", "is floating. A floating bit is not connected to anything", "in memory at the end of the program. In this", "2 decoder chip doesn't modify the values being written at", "000000000000000000000000000000011010 (decimal 26) mask: 00000000000000000000000000000000X0XX result: 00000000000000000000000000000001X0XX This results in", "the following way: If the bitmask bit is 0, the", "= leadingZeros(36, binaryAddress) newVal = \"\" for i in range(len(mask)):", "binaryValue) memory[address] = \"\" for i in range(len(mask)): if mask[i]", "overwritten with 1. If the bitmask bit is X, the", "need the sum of all values left in memory at", "integers. For example, ignoring bitmasks for a moment, a line", "number # to Binary number def decimalToBinary(n): return bin(n).replace(\"0b\", \"\")", "significant bit (2^0, that is, the 1s bit) on the", "in memory.values(): sum += val # print(memory) return sum data", "(decimal 27) The entire 36-bit address space still begins initialized", "(decimal 25) 000000000000000000000000000000011010 (decimal 26) 000000000000000000000000000000011011 (decimal 27) The entire", "bit is 1, the corresponding memory address bit is overwritten", "begins initialized to the value 0 at every address, and", "docking program, you need the sum of all values left", "values; with two floating bits, four actual memory addresses are", "are floating. Floating bits take on every possible combination of", "some reason, the sea port's computer system still can't communicate", "bin_address[i] addresses.append(newAddress) return addresses def initialize_v2(commands): memory = {} mask", "- producing a sum of 165. Execute the initialization program.", "computer system that runs this port isn't compatible with the", "newAddress += bin_address[i] addresses.append(newAddress) return addresses def initialize_v2(commands): memory =", "a in addresses: memory[a] = int(c[1]) sum = 0 for", "print(binary) # combos = initialize_v2([(\"mask\", \"100X100X101011111X100000100X11010011\"), # (\"mem[33323]\", \"349380\")]) #", "000000000000000000000000000001001001 (decimal 73) So, because of the mask, the value", "ignoring bitmasks for a moment, a line like mem[8] =", "64 is written to address 8 instead, overwriting the value", "mask[i] sum = 0 for val in memory.values(): sum +=", "1, the corresponding memory address bit is overwritten with 1.", "sum of all values left in memory after it completes?", "42) mask: 000000000000000000000000000000X1001X result: 000000000000000000000000000000X1101X After applying the mask, four", "number # current combo associated idx is in xPositions[i] newAddress", "\"\"\" --- Part Two --- For some reason, the sea", "memory after the initialization program completes. (The entire 36-bit address", "--- For some reason, the sea port's computer system still", "print(\"Part 1:\", sumAllValues) sumAllValuesV2 = initialize_v2(data) print(\"Part 2:\", sumAllValuesV2) #", "(decimal 26) mask: 00000000000000000000000000000000X0XX result: 00000000000000000000000000000001X0XX This results in an", "chip! A version 2 decoder chip doesn't modify the values", "+= c[currPos] currPos += 1 else: newAddress += bin_address[i] addresses.append(newAddress)", "a 0 or 1 overwrites the corresponding bit in the", "port's computer system still can't communicate with your ferry's docking", "and you still need the sum of all values left", "a list if bin_address[i] == \"X\": xPositions.append(i) # xCount +=", "value 0 at every address, and you still need the", "# need to generate all possible combos of 0s &", "being correctly initialized in the docking program's memory. After a", "= XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X mem[8] = 11 mem[7] = 101 mem[8] =", "101 to address 7: value: 000000000000000000000000000001100101 (decimal 101) mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X", "idx is in xPositions[i] newAddress = \"\" currPos = 0", "most significant bit (representing 2^35) on the left and the", "= length - len(bin_num) return \"0\"*leadingZeros + bin_num def initialize(commands):", "Next, the program is about to write to memory address", "example, the sum is 208. Execute the initialization program using", "64 (at address 8) - producing a sum of 165.", "update the bitmask or write a value to memory. Values", "in its initialization program. Although you don't have the correct", "XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X result: 000000000000000000000000000001100101 (decimal 101) This time, the mask has", "following program: mask = 000000000000000000000000000000X1001X mem[42] = 100 mask =", "is the sum of all values left in memory after", "memory address in the following way: If the bitmask bit", "in xPositions[i] newAddress = \"\" currPos = 0 for i", "aren't being correctly initialized in the docking program's memory. After", "line like mem[8] = 11 would write the value 11", "once! For example, consider the following program: mask = 000000000000000000000000000000X1001X", "applies the bitmask: address: 000000000000000000000000000000101010 (decimal 42) mask: 000000000000000000000000000000X1001X result:", "address = c[0][c[0].index(\"[\")+1:len(c[0])-1] binaryAddress = decimalToBinary(int(address)) binary36 = leadingZeros(36, binaryAddress)", "mem[8] = 0 This program starts by specifying a bitmask", "bit is unchanged. If the bitmask bit is 1, the", "will take on all possible values, potentially causing many memory", "d in data] sumAllValues = initialize(data) print(\"Part 1:\", sumAllValues) sumAllValuesV2", "0: for i in range(2**(len(xPositions))): # need to generate all", "\"X\"*36 for c in commands: if c[0] == \"mask\": mask", "mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X result: 000000000000000000000000000001100101 (decimal 101) This time, the mask", "applied as follows: value: 000000000000000000000000000000001011 (decimal 11) mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X result:", "= {} mask = \"X\"*36 for c in commands: if", "else: address = c[0][c[0].index(\"[\")+1:len(c[0])-1] binaryAddress = decimalToBinary(int(address)) binary36 = leadingZeros(36,", "= \"\" for i in range(len(mask)): if mask[i] != \"0\":", "insert combination[i] into binary number # current combo associated idx", "the 1s bit) on the right. The current bitmask is", "a sum of 165. Execute the initialization program. What is", "000000000000000000000000000000X1001X mem[42] = 100 mask = 00000000000000000000000000000000X0XX mem[26] = 1", "the end of the program. In this example, the sum", "leadingZeros(36, binary) # print(binary) # combos = initialize_v2([(\"mask\", \"100X100X101011111X100000100X11010011\"), #", "= 101 mem[8] = 0 This program starts by specifying", "to write the value 11 to memory address 8. By", "add its idx to a list if bin_address[i] == \"X\":", "bit is 0, the corresponding memory address bit is unchanged.", "the docking program's memory. After a brief inspection, you discover", "(decimal 58) 000000000000000000000000000000111011 (decimal 59) Next, the program is about", "bitmask bit is 1, the corresponding memory address bit is", "00000000000000000000000000000001X0XX This results in an address with three floating bits,", "+= 1 if len(xPositions) > 0: for i in range(2**(len(xPositions))):", "000000000000000000000000000000001011 (decimal 11) mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X result: 000000000000000000000000000001001001 (decimal 73) So,", "memory addresses to be written all at once! For example,", "either update the bitmask or write a value to memory.", "addresses = calculateCombinations(newVal) for a in addresses: memory[a] = int(c[1])", "= 0 for val in memory.values(): sum += val #", "= open(\"challenges\\data\\day14data.txt\", \"r\") def processData(file): data = [] for x", "(decimal 42) mask: 000000000000000000000000000000X1001X result: 000000000000000000000000000000X1101X After applying the mask,", "[] # xCount = 0 xPositions = [] for i", "while len(possible) < len(xPositions): possible = \"0\"+possible combinations.append(possible) addresses =", "the program tries to write 0 to address 8: value:", "all values left in memory after it completes? \"\"\" f", "possible = \"0\"+possible combinations.append(possible) addresses = [] for c in", "of 0s & 1s # w/ leading 0s possible =", "= decimalToBinary(int(address)) binary36 = leadingZeros(36, binaryAddress) newVal = \"\" for", "that runs this port isn't compatible with the docking program", "--- Part Two --- For some reason, the sea port's", "in every written value: the 2s bit is overwritten with", "i in range(2**(len(xPositions))): # need to generate all possible combos", "0) mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X result: 000000000000000000000000000001000000 (decimal 64) 64 is written", "combo associated idx is in xPositions[i] newAddress = \"\" currPos", "= \"\" for i in range(len(mask)): if mask[i] == \"X\":", "above example, only two values in memory are not zero", "the docking parameters aren't being correctly initialized in the docking", "binary36[i] else: memory[address] += mask[i] sum = 0 for val", "26 with a different bitmask: address: 000000000000000000000000000000011010 (decimal 26) mask:", "000000000000000000000000000000111011 (decimal 59) Next, the program is about to write", "bits, written with the most significant bit (representing 2^35) on", "The current bitmask is applied to values immediately before they", "to insert combination[i] into binary number # current combo associated", "Although you don't have the correct decoder chip handy, you", "\"X\": memory[address] += binary36[i] else: memory[address] += mask[i] sum =", "which are different, and two of which are floating. Floating", "1 if len(xPositions) > 0: for i in range(2**(len(xPositions))): #", "to write 101 to address 7: value: 000000000000000000000000000001100101 (decimal 101)", "number def decimalToBinary(n): return bin(n).replace(\"0b\", \"\") def leadingZeros(length, bin_num): leadingZeros", "or write a value to memory. Values and memory addresses", "bitmask modifies the corresponding bit of the destination memory address", "newVal += mask[i] else: newVal += binary36[i] addresses = calculateCombinations(newVal)", "in combinations: # need to insert combination[i] into binary number", "memory.values(): sum += int(\"\".join(val), 2) return sum \"\"\" --- Part", "modify the values being written at all. Instead, it acts", "the bitmask bit is 1, the corresponding memory address bit", "0 to address 8: value: 000000000000000000000000000000000000 (decimal 0) mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X", "on the right. The current bitmask is applied to values", "written at all. Instead, it acts as a memory address", "= 11 would write the value 11 to memory address", "= ....). The mask it specifies will overwrite two bits", "to address 8: value: 000000000000000000000000000000000000 (decimal 0) mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X result:", "completes. (The entire 36-bit address space begins initialized to the", "= c[0][c[0].index(\"[\")+1:len(c[0])-1] binaryValue = decimalToBinary(int(c[1])) binary36 = leadingZeros(36, binaryValue) memory[address]", "each bit in the bitmask modifies the corresponding bit of", "bit is not connected to anything and instead fluctuates unpredictably.", "program then attempts to write the value 11 to memory", "the decoder chip! A version 2 decoder chip doesn't modify", "1s bit) on the right. The current bitmask is applied", "ferry's docking program. It must be using version 2 of", "write to memory address 26 with a different bitmask: address:", "if currPos < len(xPositions) and i == xPositions[currPos]: newAddress +=", "bit) on the right. The current bitmask is applied to", "uses a strange bitmask system in its initialization program. Although", "= \"0\"+possible combinations.append(possible) addresses = [] for c in combinations:", "in range(len(mask)): if mask[i] == \"X\": memory[address] += binary36[i] else:", "208. Execute the initialization program using an emulator for a", "current combo associated idx is in xPositions[i] newAddress = \"\"", "range(len(bin_address)): if currPos < len(xPositions) and i == xPositions[currPos]: newAddress", "you can emulate it in software! The initialization program (your", "combinations.append(possible) addresses = [] for c in combinations: # need", "for i in range(2**(len(xPositions))): # need to generate all possible", "int(c[1]) sum = 0 for val in memory.values(): sum +=", "For example, ignoring bitmasks for a moment, a line like", "0 for val in memory.values(): sum += int(\"\".join(val), 2) return", "to write to memory address 26 with a different bitmask:", "in commands: if c[0] == \"mask\": mask = c[1] else:", "Docking Data --- As your ferry approaches the sea port,", "the value unchanged. For example, consider the following program: mask", "= 0 This program starts by specifying a bitmask (mask", "mask it specifies will overwrite two bits in every written", "a string of 36 bits, written with the most significant", "mask has no effect, as the bits it overwrote were", "emulator for a version 2 decoder chip. What is the", "# Function to convert Decimal number # to Binary number", "bits, causing writes to eight memory addresses: 000000000000000000000000000000010000 (decimal 16)", "was there previously. To initialize your ferry's docking program, you", "the above example, only two values in memory are not", "00000000000000000000000000000000X0XX result: 00000000000000000000000000000001X0XX This results in an address with three", "value 0 at every address.) In the above example, only", "or 1 overwrites the corresponding bit in the value, while", "bit in the value unchanged. For example, consider the following", "X leaves the bit in the value unchanged. For example,", "= \"\" currPos = 0 for i in range(len(bin_address)): if", "+= bin_address[i] addresses.append(newAddress) return addresses def initialize_v2(commands): memory = {}", "they are written to memory: a 0 or 1 overwrites", "at every address, and you still need the sum of", "are overwritten, three of which are different, and two of", "need the sum of all values left in memory after", "= 00000000000000000000000000000000X0XX mem[26] = 1 When this program goes to", "result: 000000000000000000000000000000X1101X After applying the mask, four bits are overwritten,", "sum += val # print(memory) return sum data = processData(f)", "a line like mem[8] = 11 would write the value", "port's computer system uses a strange bitmask system in its", "The bitmask is always given as a string of 36", "This program starts by specifying a bitmask (mask = ....).", "1 When this program goes to write to memory address", "x in f: x=x.strip().replace('\\n', '').split(\" = \") data.append((x[0], x[1])) return", "19) 000000000000000000000000000000011000 (decimal 24) 000000000000000000000000000000011001 (decimal 25) 000000000000000000000000000000011010 (decimal 26)", "data = processData(f) # [print(d) for d in data] sumAllValues", "values left in memory at the end of the program.", "= c[0][c[0].index(\"[\")+1:len(c[0])-1] binaryAddress = decimalToBinary(int(address)) binary36 = leadingZeros(36, binaryAddress) newVal", "address 8 instead. Then, the program tries to write 101", "possible combination of values; with two floating bits, four actual", "the program tries to write 101 to address 7: value:", "unchanged. For example, consider the following program: mask = XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X", "can either update the bitmask or write a value to", "binaryValue = decimalToBinary(int(c[1])) binary36 = leadingZeros(36, binaryValue) memory[address] = \"\"", "starts by specifying a bitmask (mask = ....). The mask", "many memory addresses to be written all at once! For", "is applied as follows: value: 000000000000000000000000000000001011 (decimal 11) mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X", "(decimal 59) Next, the program is about to write to", "is X, the corresponding memory address bit is floating. A", "an address with three floating bits, causing writes to eight", "memory at the end of the program. In this example,", "data.append((x[0], x[1])) return data # Function to convert Decimal number", "25) 000000000000000000000000000000011010 (decimal 26) 000000000000000000000000000000011011 (decimal 27) The entire 36-bit", "all values left in memory at the end of the", "you don't have the correct decoder chip handy, you can", "into binary number # current combo associated idx is in", "to memory address 8. By expanding everything out to individual", "consider the following program: mask = XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X mem[8] = 11", "address bit is floating. A floating bit is not connected", "floating. Floating bits take on every possible combination of values;", "000000000000000000000000000000011010 (decimal 26) 000000000000000000000000000000011011 (decimal 27) 000000000000000000000000000000111010 (decimal 58) 000000000000000000000000000000111011", "X, the corresponding memory address bit is floating. A floating", "initialized to the value 0 at every address, and you", "= decimalToBinary(33323) # binary = leadingZeros(36, binary) # print(binary) #", "all possible combos of 0s & 1s # w/ leading", "values in memory are not zero - 101 (at address", "applying the mask, four bits are overwritten, three of which", "of 36 bits, written with the most significant bit (representing", "def initialize_v2(commands): memory = {} mask = \"X\"*36 for c", "The mask it specifies will overwrite two bits in every", "the mask, the value 73 is written to memory address", "in the bitmask modifies the corresponding bit of the destination", "all possible values, potentially causing many memory addresses to be", "mem[26] = 1 When this program goes to write to", "out to individual bits, the mask is applied as follows:", "causing many memory addresses to be written all at once!", "program using an emulator for a version 2 decoder chip.", "address in the following way: If the bitmask bit is", "you need the sum of all values left in memory", "# to Binary number def decimalToBinary(n): return bin(n).replace(\"0b\", \"\") def", "- 101 (at address 7) and 64 (at address 8)", "emulate it in software! The initialization program (your puzzle input)", "is written to address 8 instead, overwriting the value that", "int(\"\".join(val), 2) return sum \"\"\" --- Part Two --- For", "for val in memory.values(): sum += val # print(memory) return", "captain asks for your help again. The computer system that", "mask = \"X\"*36 for c in commands: if c[0] ==", "binary number # current combo associated idx is in xPositions[i]", "program tries to write 101 to address 7: value: 000000000000000000000000000001100101", "it in software! The initialization program (your puzzle input) can", "The program then attempts to write the value 11 to", "individual bits, the mask is applied as follows: value: 000000000000000000000000000000001011", "the values the mask tried to set. Finally, the program", "64s bit is overwritten with 1. The program then attempts", "To initialize your ferry's docking program, you need the sum", "it acts as a memory address decoder. Immediately before a", "decimalToBinary(i) while len(possible) < len(xPositions): possible = \"0\"+possible combinations.append(possible) addresses", "000000000000000000000000000000111010 (decimal 58) 000000000000000000000000000000111011 (decimal 59) Next, the program is", "addresses def initialize_v2(commands): memory = {} mask = \"X\"*36 for", "commands: if c[0] == \"mask\": mask = c[1] else: address", "the floating bits will take on all possible values, potentially", "end of the program. In this example, the sum is", "\") data.append((x[0], x[1])) return data # Function to convert Decimal", "after the initialization program completes. (The entire 36-bit address space", "XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X result: 000000000000000000000000000001000000 (decimal 64) 64 is written to address", "sea port, the captain asks for your help again. The", "= calculateCombinations(newVal) for a in addresses: memory[a] = int(c[1]) sum", "< len(xPositions) and i == xPositions[currPos]: newAddress += c[currPos] currPos", "initialized in the docking program's memory. After a brief inspection,", "= [] for x in f: x=x.strip().replace('\\n', '').split(\" = \")", "= c[1] else: address = c[0][c[0].index(\"[\")+1:len(c[0])-1] binaryValue = decimalToBinary(int(c[1])) binary36", "result: 000000000000000000000000000001100101 (decimal 101) This time, the mask has no", "there previously. To initialize your ferry's docking program, you need", "27) 000000000000000000000000000000111010 (decimal 58) 000000000000000000000000000000111011 (decimal 59) Next, the program", "A version 2 decoder chip doesn't modify the values being", "this port isn't compatible with the docking program on the", "results in an address with three floating bits, causing writes", "value that was there previously. To initialize your ferry's docking", "in f: x=x.strip().replace('\\n', '').split(\" = \") data.append((x[0], x[1])) return data", "58) 000000000000000000000000000000111011 (decimal 59) Next, the program is about to", "11) mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X result: 000000000000000000000000000001001001 (decimal 73) So, because of", "and the 64s bit is overwritten with 1. The program", "way: If the bitmask bit is 0, the corresponding memory", "write 101 to address 7: value: 000000000000000000000000000001100101 (decimal 101) mask:", "mask = XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X mem[8] = 11 mem[7] = 101 mem[8]", "mem[8] = 11 would write the value 11 to memory", "bits it overwrote were already the values the mask tried", "memory address bit is floating. A floating bit is not", "sum of all values left in memory at the end", "000000000000000000000000000000000000 (decimal 0) mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X result: 000000000000000000000000000001000000 (decimal 64) 64", "addresses are written: 000000000000000000000000000000011010 (decimal 26) 000000000000000000000000000000011011 (decimal 27) 000000000000000000000000000000111010", "[] for c in combinations: # need to insert combination[i]", "to generate all possible combos of 0s & 1s #", "XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X mem[8] = 11 mem[7] = 101 mem[8] = 0", "Then, the program tries to write 101 to address 7:", "space still begins initialized to the value 0 at every", "\"X\": xPositions.append(i) # xCount += 1 if len(xPositions) > 0:", "ferry, so the docking parameters aren't being correctly initialized in", "sum of 165. Execute the initialization program. What is the", "can't communicate with your ferry's docking program. It must be", "the destination memory address in the following way: If the", "its idx to a list if bin_address[i] == \"X\": xPositions.append(i)", "system still can't communicate with your ferry's docking program. It", "bit (representing 2^35) on the left and the least significant", "memory addresses are written: 000000000000000000000000000000011010 (decimal 26) 000000000000000000000000000000011011 (decimal 27)", "(your puzzle input) can either update the bitmask or write", "in the following way: If the bitmask bit is 0,", "to address 7: value: 000000000000000000000000000001100101 (decimal 101) mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X result:", "to the value 0 at every address, and you still", "the sum of all values left in memory at the", "# need to insert combination[i] into binary number # current", "0 This program starts by specifying a bitmask (mask =", "What is the sum of all values left in memory", "unpredictably. In practice, this means the floating bits will take", "program. Although you don't have the correct decoder chip handy,", "memory after it completes? \"\"\" def calculateCombinations(bin_address): combinations = []", "def decimalToBinary(n): return bin(n).replace(\"0b\", \"\") def leadingZeros(length, bin_num): leadingZeros =", "is applied to values immediately before they are written to", "\"\"\" def calculateCombinations(bin_address): combinations = [] # xCount = 0", "as a memory address decoder. Immediately before a value is", "8. By expanding everything out to individual bits, the mask", "For some reason, the sea port's computer system still can't", "a strange bitmask system in its initialization program. Although you", "range(len(mask)): if mask[i] == \"X\": memory[address] += binary36[i] else: memory[address]", "So, because of the mask, the value 73 is written", "000000000000000000000000000000101010 (decimal 42) mask: 000000000000000000000000000000X1001X result: 000000000000000000000000000000X1101X After applying the", "the right. The current bitmask is applied to values immediately", "associated idx is in xPositions[i] newAddress = \"\" currPos =", "After applying the mask, four bits are overwritten, three of", "000000000000000000000000000000011000 (decimal 24) 000000000000000000000000000000011001 (decimal 25) 000000000000000000000000000000011010 (decimal 26) 000000000000000000000000000000011011", "of the decoder chip! A version 2 decoder chip doesn't", "the value that was there previously. To initialize your ferry's", "written all at once! For example, consider the following program:", "this program goes to write to memory address 42, it", "because of the mask, the value 73 is written to", "8 instead, overwriting the value that was there previously. To", "return data # Function to convert Decimal number # to", "address: 000000000000000000000000000000101010 (decimal 42) mask: 000000000000000000000000000000X1001X result: 000000000000000000000000000000X1101X After applying", "data] sumAllValues = initialize(data) print(\"Part 1:\", sumAllValues) sumAllValuesV2 = initialize_v2(data)", "then attempts to write the value 11 to memory address", "+= mask[i] else: newVal += binary36[i] addresses = calculateCombinations(newVal) for", "program. In this example, the sum is 208. Execute the", "combos of 0s & 1s # w/ leading 0s possible", "the captain asks for your help again. The computer system", "example, consider the following program: mask = XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X mem[8] =", "memory address 8. By expanding everything out to individual bits,", "with the most significant bit (representing 2^35) on the left", "to memory, each bit in the bitmask modifies the corresponding", "- len(bin_num) return \"0\"*leadingZeros + bin_num def initialize(commands): memory =", "of values; with two floating bits, four actual memory addresses", "101 (at address 7) and 64 (at address 8) -", "the following program: mask = XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X mem[8] = 11 mem[7]", "2s bit is overwritten with 0, and the 64s bit", "For example, consider the following program: mask = 000000000000000000000000000000X1001X mem[42]", "7) and 64 (at address 8) - producing a sum", "chip doesn't modify the values being written at all. Instead,", "(decimal 16) 000000000000000000000000000000010001 (decimal 17) 000000000000000000000000000000010010 (decimal 18) 000000000000000000000000000000010011 (decimal", "0 for i in range(len(bin_address)): if currPos < len(xPositions) and", "73 is written to memory address 8 instead. Then, the", "memory[a] = int(c[1]) sum = 0 for val in memory.values():", "the 64s bit is overwritten with 1. The program then", "value 73 is written to memory address 8 instead. Then,", "decoder chip! A version 2 decoder chip doesn't modify the", "specifies will overwrite two bits in every written value: the", "to memory address 26 with a different bitmask: address: 000000000000000000000000000000011010", "if c[0] == \"mask\": mask = c[1] else: address =", "initialize_v2(commands): memory = {} mask = \"X\"*36 for c in", "two bits in every written value: the 2s bit is", "memory address 26 with a different bitmask: address: 000000000000000000000000000000011010 (decimal", "in range(len(bin_address)): if currPos < len(xPositions) and i == xPositions[currPos]:", "it completes? \"\"\" def calculateCombinations(bin_address): combinations = [] # xCount", "the sea port, the captain asks for your help again.", "written to address 8 instead, overwriting the value that was", "with three floating bits, causing writes to eight memory addresses:", "puzzle input) can either update the bitmask or write a", "two floating bits, four actual memory addresses are written: 000000000000000000000000000000011010", "tries to write 101 to address 7: value: 000000000000000000000000000001100101 (decimal", "11 to memory address 8. The bitmask is always given", "memory = {} mask = \"X\"*36 for c in commands:", "Part Two --- For some reason, the sea port's computer", "000000000000000000000000000000X1101X After applying the mask, four bits are overwritten, three", "zero - 101 (at address 7) and 64 (at address", "unchanged. If the bitmask bit is 1, the corresponding memory", "= 0 for val in memory.values(): sum += int(\"\".join(val), 2)", "as the bits it overwrote were already the values the", "initialization program completes. (The entire 36-bit address space begins initialized", "addresses = [] for c in combinations: # need to", "len(xPositions) and i == xPositions[currPos]: newAddress += c[currPos] currPos +=", "mem[8] = 11 mem[7] = 101 mem[8] = 0 This", "least significant bit (2^0, that is, the 1s bit) on", "c[0] == \"mask\": mask = c[1] else: address = c[0][c[0].index(\"[\")+1:len(c[0])-1]", "floating bits will take on all possible values, potentially causing", "c in combinations: # need to insert combination[i] into binary", "addresses: memory[a] = int(c[1]) sum = 0 for val in", "to memory address 8. The bitmask is always given as", "165. Execute the initialization program. What is the sum of", "memory[address] += binary36[i] else: memory[address] += mask[i] sum = 0", "0s possible = decimalToBinary(i) while len(possible) < len(xPositions): possible =", "= 100 mask = 00000000000000000000000000000000X0XX mem[26] = 1 When this", "is not connected to anything and instead fluctuates unpredictably. In", "processData(f) # [print(d) for d in data] sumAllValues = initialize(data)", "In the above example, only two values in memory are", "bit is floating. A floating bit is not connected to", "every address, and you still need the sum of all", "000000000000000000000000000000011010 (decimal 26) 000000000000000000000000000000011011 (decimal 27) The entire 36-bit address", "!= \"0\": newVal += mask[i] else: newVal += binary36[i] addresses", "c[1] else: address = c[0][c[0].index(\"[\")+1:len(c[0])-1] binaryValue = decimalToBinary(int(c[1])) binary36 =", "\"\") def leadingZeros(length, bin_num): leadingZeros = length - len(bin_num) return", "at the end of the program. In this example, the", "--- As your ferry approaches the sea port, the captain", "address = c[0][c[0].index(\"[\")+1:len(c[0])-1] binaryValue = decimalToBinary(int(c[1])) binary36 = leadingZeros(36, binaryValue)", "written with the most significant bit (representing 2^35) on the", "the value 0 at every address.) In the above example,", "is unchanged. If the bitmask bit is 1, the corresponding", "is overwritten with 1. The program then attempts to write", "while an X leaves the bit in the value unchanged.", "x[1])) return data # Function to convert Decimal number #", "sumAllValues) sumAllValuesV2 = initialize_v2(data) print(\"Part 2:\", sumAllValuesV2) # binary =", "2^35) on the left and the least significant bit (2^0,", "XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X result: 000000000000000000000000000001001001 (decimal 73) So, because of the mask,", "(at address 7) and 64 (at address 8) - producing", "64) 64 is written to address 8 instead, overwriting the", "2 of the decoder chip! A version 2 decoder chip", "the 2s bit is overwritten with 0, and the 64s", "the mask tried to set. Finally, the program tries to", "address 8. The bitmask is always given as a string", "<reponame>Jeffreyo3/AdventOfCode2020 \"\"\" --- Day 14: Docking Data --- As your", "program, you need the sum of all values left in", "decoder chip doesn't modify the values being written at all.", "< len(xPositions): possible = \"0\"+possible combinations.append(possible) addresses = [] for", "previously. To initialize your ferry's docking program, you need the", "bitmask (mask = ....). The mask it specifies will overwrite", "value 11 to memory address 8. By expanding everything out", "docking program. It must be using version 2 of the", "written to memory: a 0 or 1 overwrites the corresponding", "value: 000000000000000000000000000001100101 (decimal 101) mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X result: 000000000000000000000000000001100101 (decimal 101)", "+ bin_num def initialize(commands): memory = {} mask = \"X\"*36", "memory, each bit in the bitmask modifies the corresponding bit", "which are floating. Floating bits take on every possible combination", "calculateCombinations(bin_address): combinations = [] # xCount = 0 xPositions =", "being written at all. Instead, it acts as a memory", "2 decoder chip. What is the sum of all values", "begins initialized to the value 0 at every address.) In", "the least significant bit (2^0, that is, the 1s bit)", "bit is overwritten with 0, and the 64s bit is", "means the floating bits will take on all possible values,", "26) 000000000000000000000000000000011011 (decimal 27) 000000000000000000000000000000111010 (decimal 58) 000000000000000000000000000000111011 (decimal 59)", "address.) In the above example, only two values in memory", "= leadingZeros(36, binaryValue) memory[address] = \"\" for i in range(len(mask)):", "at all. Instead, it acts as a memory address decoder.", "8: value: 000000000000000000000000000000000000 (decimal 0) mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X result: 000000000000000000000000000001000000 (decimal", "is 0, the corresponding memory address bit is unchanged. If", "the corresponding memory address bit is floating. A floating bit", "about to write to memory address 26 with a different", "correct decoder chip handy, you can emulate it in software!", "to be written all at once! For example, consider the", "corresponding memory address bit is overwritten with 1. If the", "be using version 2 of the decoder chip! A version", "bitmask: address: 000000000000000000000000000000101010 (decimal 42) mask: 000000000000000000000000000000X1001X result: 000000000000000000000000000000X1101X After", "is about to write to memory address 26 with a", "in memory after it completes? \"\"\" def calculateCombinations(bin_address): combinations =", "isn't compatible with the docking program on the ferry, so", "docking program on the ferry, so the docking parameters aren't", "tries to write 0 to address 8: value: 000000000000000000000000000000000000 (decimal", "\"\"\" --- Day 14: Docking Data --- As your ferry", "right. The current bitmask is applied to values immediately before", "sum += int(\"\".join(val), 2) return sum \"\"\" --- Part Two", "floating bits, causing writes to eight memory addresses: 000000000000000000000000000000010000 (decimal", "written to memory, each bit in the bitmask modifies the", "it first applies the bitmask: address: 000000000000000000000000000000101010 (decimal 42) mask:", "16) 000000000000000000000000000000010001 (decimal 17) 000000000000000000000000000000010010 (decimal 18) 000000000000000000000000000000010011 (decimal 19)", "0 at every address, and you still need the sum", "immediately before they are written to memory: a 0 or", "write the value 11 to memory address 8. By expanding", "36-bit address space begins initialized to the value 0 at", "combinations: # need to insert combination[i] into binary number #", "moment, a line like mem[8] = 11 would write the", "data # Function to convert Decimal number # to Binary", "for c in commands: if c[0] == \"mask\": mask =", "to address 8 instead, overwriting the value that was there", "result: 000000000000000000000000000001001001 (decimal 73) So, because of the mask, the", "with your ferry's docking program. It must be using version", "Execute the initialization program. What is the sum of all", "system uses a strange bitmask system in its initialization program.", "mask, four bits are overwritten, three of which are different,", "the program. In this example, the sum is 208. Execute", "calculateCombinations(newVal) for a in addresses: memory[a] = int(c[1]) sum =", "Values and memory addresses are both 36-bit unsigned integers. For", "\"\" currPos = 0 for i in range(len(bin_address)): if currPos", "sumAllValuesV2) # binary = decimalToBinary(33323) # binary = leadingZeros(36, binary)", "bit is overwritten with 1. The program then attempts to", "no effect, as the bits it overwrote were already the", "in memory after the initialization program completes. (The entire 36-bit", "[] for x in f: x=x.strip().replace('\\n', '').split(\" = \") data.append((x[0],", "It must be using version 2 of the decoder chip!", "that the sea port's computer system uses a strange bitmask", "binary36[i] addresses = calculateCombinations(newVal) for a in addresses: memory[a] =", "8 instead. Then, the program tries to write 101 to", "on all possible values, potentially causing many memory addresses to", "it specifies will overwrite two bits in every written value:", "xPositions = [] for i in range(len(bin_address)): # find each", "values the mask tried to set. Finally, the program tries", "leadingZeros = length - len(bin_num) return \"0\"*leadingZeros + bin_num def", "left in memory after it completes? \"\"\" f = open(\"challenges\\data\\day14data.txt\",", "leadingZeros(36, binaryAddress) newVal = \"\" for i in range(len(mask)): if", "# print(binary) # combos = initialize_v2([(\"mask\", \"100X100X101011111X100000100X11010011\"), # (\"mem[33323]\", \"349380\")])", "sea port's computer system uses a strange bitmask system in", "the correct decoder chip handy, you can emulate it in", "000000000000000000000000000001100101 (decimal 101) This time, the mask has no effect,", "value 11 to memory address 8. The bitmask is always", "the ferry, so the docking parameters aren't being correctly initialized", "value: 000000000000000000000000000000001011 (decimal 11) mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X result: 000000000000000000000000000001001001 (decimal 73)", "11 mem[7] = 101 mem[8] = 0 This program starts", "of the destination memory address in the following way: If", "newAddress += c[currPos] currPos += 1 else: newAddress += bin_address[i]", "def processData(file): data = [] for x in f: x=x.strip().replace('\\n',", "string of 36 bits, written with the most significant bit", "already the values the mask tried to set. Finally, the", "to individual bits, the mask is applied as follows: value:", "is written to memory, each bit in the bitmask modifies", "newVal += binary36[i] addresses = calculateCombinations(newVal) for a in addresses:", "0 or 1 overwrites the corresponding bit in the value,", "of all values left in memory after it completes? \"\"\"", "= \"X\"*36 for c in commands: if c[0] == \"mask\":", "docking parameters aren't being correctly initialized in the docking program's", "# xCount = 0 xPositions = [] for i in", "bits are overwritten, three of which are different, and two", "else: memory[address] += mask[i] sum = 0 for val in", "sum is 208. Execute the initialization program using an emulator", "need to insert combination[i] into binary number # current combo", "== xPositions[currPos]: newAddress += c[currPos] currPos += 1 else: newAddress", "+= val # print(memory) return sum data = processData(f) #", "This results in an address with three floating bits, causing", "help again. The computer system that runs this port isn't", "mask: 000000000000000000000000000000X1001X result: 000000000000000000000000000000X1101X After applying the mask, four bits", "len(xPositions): possible = \"0\"+possible combinations.append(possible) addresses = [] for c", "bitmask or write a value to memory. Values and memory", "range(len(mask)): if mask[i] != \"0\": newVal += mask[i] else: newVal", "(decimal 0) mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X result: 000000000000000000000000000001000000 (decimal 64) 64 is", "address space begins initialized to the value 0 at every", "of which are floating. Floating bits take on every possible", "bits take on every possible combination of values; with two", "in the value, while an X leaves the bit in", "2) return sum \"\"\" --- Part Two --- For some", "a different bitmask: address: 000000000000000000000000000000011010 (decimal 26) mask: 00000000000000000000000000000000X0XX result:", "bits, the mask is applied as follows: value: 000000000000000000000000000000001011 (decimal", "the sea port's computer system uses a strange bitmask system", "reason, the sea port's computer system still can't communicate with", "using version 2 of the decoder chip! A version 2", "memory address bit is overwritten with 1. If the bitmask", "mask = 00000000000000000000000000000000X0XX mem[26] = 1 When this program goes", "program. It must be using version 2 of the decoder", "your ferry's docking program. It must be using version 2", "value is written to memory, each bit in the bitmask", "000000000000000000000000000000010000 (decimal 16) 000000000000000000000000000000010001 (decimal 17) 000000000000000000000000000000010010 (decimal 18) 000000000000000000000000000000010011", "sum \"\"\" --- Part Two --- For some reason, the", "for i in range(len(mask)): if mask[i] == \"X\": memory[address] +=", "000000000000000000000000000000010010 (decimal 18) 000000000000000000000000000000010011 (decimal 19) 000000000000000000000000000000011000 (decimal 24) 000000000000000000000000000000011001", "memory address bit is unchanged. If the bitmask bit is", "sumAllValuesV2 = initialize_v2(data) print(\"Part 2:\", sumAllValuesV2) # binary = decimalToBinary(33323)", "space begins initialized to the value 0 at every address.)", "# find each X and add its idx to a", "and add its idx to a list if bin_address[i] ==", "(decimal 11) mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X result: 000000000000000000000000000001001001 (decimal 73) So, because", "mem[42] = 100 mask = 00000000000000000000000000000000X0XX mem[26] = 1 When", "is 208. Execute the initialization program using an emulator for", "compatible with the docking program on the ferry, so the", "a value is written to memory, each bit in the" ]
[ "definition if self.free_point: item = turn_into_free_point(item, self.scene) self.scene.project_data.add(item) A =", "self.scene = scene self.sides = 3 self.free_point = False self.data", "self).__init__() self.ui = uic.loadUi('regularpolygon.ui', self) self.scene = scene self.sides =", "def hslider_sides_func(self, value): \"\"\"Be slider callback function to set sides.\"\"\"", "function to set sides.\"\"\" self.sides = value self.ui.sides_spin.setValue(value) def accepted(self):", "item.item[\"definition\"] = definition self.scene.project_data.add(item) self.scene.project_data.recompute_canvas(*self.scene.init_canvas_dims) current_row_old = self.scene.ui.listWidget.currentRow() fill_listWidget_with_data(self.scene.project_data, self.scene.ui.listWidget,", "self.sides polygon = [A, B] for _ in range(self.sides -", "= {'A': A, 'B': B, 'angle': angle} id_ = Factory.next_id(item,", "x: free_point_checkbox(self, x)) def hslider_sides_func(self, value): \"\"\"Be slider callback function", "= definition if self.free_point: item = turn_into_free_point(item, self.scene) self.scene.project_data.add(item) A", "self.scene.project_data.add(item) A = B B = item.item[\"id\"] polygon.append(item.item[\"id\"]) item =", "self.ui = uic.loadUi('regularpolygon.ui', self) self.scene = scene self.sides = 3", "self.sides = 3 self.free_point = False self.data = data self.ui.buttonBox.accepted.connect(self.accepted)", "definition = {'A': A, 'B': B, 'angle': angle} id_ =", "None) definition = polygon item.item[\"id\"] = Factory.next_id(item, definition, self.scene.project_data.items) item.item[\"definition\"]", "self.scene.project_data.items) item.item[\"id\"] = id_ item.item[\"definition\"] = definition if self.free_point: item", "A, 'B': B, 'angle': angle} id_ = Factory.next_id(item, definition, self.scene.project_data.items)", "Dialogs.DialogMacros import turn_into_free_point, free_point_checkbox from Fill.ListWidget import fill_listWidget_with_data, set_selected_id_in_listWidget import", "A, B = self.data angle = -(self.sides - 2) *", "accepted(self): \"\"\"Create new regular polygon with settings.\"\"\" A, B =", "Factory.next_id(item, definition, self.scene.project_data.items) item.item[\"id\"] = id_ item.item[\"definition\"] = definition if", "polygon with settings.\"\"\" A, B = self.data angle = -(self.sides", "PyQt5 import QtWidgets, uic from Factory import Factory from Dialogs.DialogMacros", "= turn_into_free_point(item, self.scene) self.scene.project_data.add(item) A = B B = item.item[\"id\"]", "\"\"\"Be slider callback function to set sides.\"\"\" self.sides = value", "Factory.next_id(item, definition, self.scene.project_data.items) item.item[\"definition\"] = definition self.scene.project_data.add(item) self.scene.project_data.recompute_canvas(*self.scene.init_canvas_dims) current_row_old =", "data self.ui.buttonBox.accepted.connect(self.accepted) self.ui.buttonBox.rejected.connect(self.rejected) self.ui.sides_slider.valueChanged.connect(self.hslider_sides_func) self.ui.checkBox.stateChanged.connect(lambda x: free_point_checkbox(self, x)) def hslider_sides_func(self,", "= -(self.sides - 2) * 180 / self.sides polygon =", "False self.data = data self.ui.buttonBox.accepted.connect(self.accepted) self.ui.buttonBox.rejected.connect(self.rejected) self.ui.sides_slider.valueChanged.connect(self.hslider_sides_func) self.ui.checkBox.stateChanged.connect(lambda x: free_point_checkbox(self,", "def accepted(self): \"\"\"Create new regular polygon with settings.\"\"\" A, B", "import Constant as c class RegularPolygonDialog(QtWidgets.QDialog): def __init__(self, scene, data):", "self.free_point: item = turn_into_free_point(item, self.scene) self.scene.project_data.add(item) A = B B", "self.scene) self.scene.project_data.add(item) A = B B = item.item[\"id\"] polygon.append(item.item[\"id\"]) item", "turn_into_free_point, free_point_checkbox from Fill.ListWidget import fill_listWidget_with_data, set_selected_id_in_listWidget import Constant as", "for _ in range(self.sides - 2): item = Factory.create_empty_item('point', c.Point.Definition.ROTATION)", "item = Factory.create_empty_item('point', c.Point.Definition.ROTATION) definition = {'A': A, 'B': B,", "sides.\"\"\" self.sides = value self.ui.sides_spin.setValue(value) def accepted(self): \"\"\"Create new regular", "= [A, B] for _ in range(self.sides - 2): item", "free_point_checkbox(self, x)) def hslider_sides_func(self, value): \"\"\"Be slider callback function to", "[A, B] for _ in range(self.sides - 2): item =", "'angle': angle} id_ = Factory.next_id(item, definition, self.scene.project_data.items) item.item[\"id\"] = id_", "= item.item[\"id\"] polygon.append(item.item[\"id\"]) item = Factory.create_empty_item('polygon', None) definition = polygon", "QtWidgets, uic from Factory import Factory from Dialogs.DialogMacros import turn_into_free_point,", "class RegularPolygonDialog(QtWidgets.QDialog): def __init__(self, scene, data): \"\"\"Construct RegularPolygonDialog.\"\"\" super(RegularPolygonDialog, self).__init__()", "item = turn_into_free_point(item, self.scene) self.scene.project_data.add(item) A = B B =", "self.ui.buttonBox.rejected.connect(self.rejected) self.ui.sides_slider.valueChanged.connect(self.hslider_sides_func) self.ui.checkBox.stateChanged.connect(lambda x: free_point_checkbox(self, x)) def hslider_sides_func(self, value): \"\"\"Be", "self.scene.ui.listWidget.currentRow() fill_listWidget_with_data(self.scene.project_data, self.scene.ui.listWidget, self.scene.current_tab_idx) set_selected_id_in_listWidget(self.scene, current_row_old) self.scene.edit.add_undo_item(self.scene) def rejected(self): \"\"\"Add", "Factory.create_empty_item('point', c.Point.Definition.ROTATION) definition = {'A': A, 'B': B, 'angle': angle}", "if self.free_point: item = turn_into_free_point(item, self.scene) self.scene.project_data.add(item) A = B", "B, 'angle': angle} id_ = Factory.next_id(item, definition, self.scene.project_data.items) item.item[\"id\"] =", "Factory from Dialogs.DialogMacros import turn_into_free_point, free_point_checkbox from Fill.ListWidget import fill_listWidget_with_data,", "\"\"\"Construct RegularPolygonDialog.\"\"\" super(RegularPolygonDialog, self).__init__() self.ui = uic.loadUi('regularpolygon.ui', self) self.scene =", "B = item.item[\"id\"] polygon.append(item.item[\"id\"]) item = Factory.create_empty_item('polygon', None) definition =", "= False self.data = data self.ui.buttonBox.accepted.connect(self.accepted) self.ui.buttonBox.rejected.connect(self.rejected) self.ui.sides_slider.valueChanged.connect(self.hslider_sides_func) self.ui.checkBox.stateChanged.connect(lambda x:", "c.Point.Definition.ROTATION) definition = {'A': A, 'B': B, 'angle': angle} id_", "item.item[\"definition\"] = definition if self.free_point: item = turn_into_free_point(item, self.scene) self.scene.project_data.add(item)", "c class RegularPolygonDialog(QtWidgets.QDialog): def __init__(self, scene, data): \"\"\"Construct RegularPolygonDialog.\"\"\" super(RegularPolygonDialog,", "B = self.data angle = -(self.sides - 2) * 180", "'B': B, 'angle': angle} id_ = Factory.next_id(item, definition, self.scene.project_data.items) item.item[\"id\"]", "item = Factory.create_empty_item('polygon', None) definition = polygon item.item[\"id\"] = Factory.next_id(item,", "= data self.ui.buttonBox.accepted.connect(self.accepted) self.ui.buttonBox.rejected.connect(self.rejected) self.ui.sides_slider.valueChanged.connect(self.hslider_sides_func) self.ui.checkBox.stateChanged.connect(lambda x: free_point_checkbox(self, x)) def", "= scene self.sides = 3 self.free_point = False self.data =", "callback function to set sides.\"\"\" self.sides = value self.ui.sides_spin.setValue(value) def", "= Factory.next_id(item, definition, self.scene.project_data.items) item.item[\"definition\"] = definition self.scene.project_data.add(item) self.scene.project_data.recompute_canvas(*self.scene.init_canvas_dims) current_row_old", "data): \"\"\"Construct RegularPolygonDialog.\"\"\" super(RegularPolygonDialog, self).__init__() self.ui = uic.loadUi('regularpolygon.ui', self) self.scene", "current_row_old) self.scene.edit.add_undo_item(self.scene) def rejected(self): \"\"\"Add no new regular polygon.\"\"\" pass", "set sides.\"\"\" self.sides = value self.ui.sides_spin.setValue(value) def accepted(self): \"\"\"Create new", "item.item[\"id\"] polygon.append(item.item[\"id\"]) item = Factory.create_empty_item('polygon', None) definition = polygon item.item[\"id\"]", "set_selected_id_in_listWidget import Constant as c class RegularPolygonDialog(QtWidgets.QDialog): def __init__(self, scene,", "self.scene.project_data.items) item.item[\"definition\"] = definition self.scene.project_data.add(item) self.scene.project_data.recompute_canvas(*self.scene.init_canvas_dims) current_row_old = self.scene.ui.listWidget.currentRow() fill_listWidget_with_data(self.scene.project_data,", "range(self.sides - 2): item = Factory.create_empty_item('point', c.Point.Definition.ROTATION) definition = {'A':", "= self.data angle = -(self.sides - 2) * 180 /", "slider callback function to set sides.\"\"\" self.sides = value self.ui.sides_spin.setValue(value)", "definition, self.scene.project_data.items) item.item[\"id\"] = id_ item.item[\"definition\"] = definition if self.free_point:", "= id_ item.item[\"definition\"] = definition if self.free_point: item = turn_into_free_point(item,", "definition = polygon item.item[\"id\"] = Factory.next_id(item, definition, self.scene.project_data.items) item.item[\"definition\"] =", "self.scene.ui.listWidget, self.scene.current_tab_idx) set_selected_id_in_listWidget(self.scene, current_row_old) self.scene.edit.add_undo_item(self.scene) def rejected(self): \"\"\"Add no new", "= uic.loadUi('regularpolygon.ui', self) self.scene = scene self.sides = 3 self.free_point", "in range(self.sides - 2): item = Factory.create_empty_item('point', c.Point.Definition.ROTATION) definition =", "polygon item.item[\"id\"] = Factory.next_id(item, definition, self.scene.project_data.items) item.item[\"definition\"] = definition self.scene.project_data.add(item)", "item.item[\"id\"] = Factory.next_id(item, definition, self.scene.project_data.items) item.item[\"definition\"] = definition self.scene.project_data.add(item) self.scene.project_data.recompute_canvas(*self.scene.init_canvas_dims)", "value): \"\"\"Be slider callback function to set sides.\"\"\" self.sides =", "_ in range(self.sides - 2): item = Factory.create_empty_item('point', c.Point.Definition.ROTATION) definition", "polygon.append(item.item[\"id\"]) item = Factory.create_empty_item('polygon', None) definition = polygon item.item[\"id\"] =", "self.data angle = -(self.sides - 2) * 180 / self.sides", "self.ui.sides_slider.valueChanged.connect(self.hslider_sides_func) self.ui.checkBox.stateChanged.connect(lambda x: free_point_checkbox(self, x)) def hslider_sides_func(self, value): \"\"\"Be slider", "regular polygon with settings.\"\"\" A, B = self.data angle =", "from PyQt5 import QtWidgets, uic from Factory import Factory from", "self) self.scene = scene self.sides = 3 self.free_point = False", "2) * 180 / self.sides polygon = [A, B] for", "= polygon item.item[\"id\"] = Factory.next_id(item, definition, self.scene.project_data.items) item.item[\"definition\"] = definition", "RegularPolygonDialog.\"\"\" super(RegularPolygonDialog, self).__init__() self.ui = uic.loadUi('regularpolygon.ui', self) self.scene = scene", "self.scene.project_data.add(item) self.scene.project_data.recompute_canvas(*self.scene.init_canvas_dims) current_row_old = self.scene.ui.listWidget.currentRow() fill_listWidget_with_data(self.scene.project_data, self.scene.ui.listWidget, self.scene.current_tab_idx) set_selected_id_in_listWidget(self.scene, current_row_old)", "Constant as c class RegularPolygonDialog(QtWidgets.QDialog): def __init__(self, scene, data): \"\"\"Construct", "self.sides = value self.ui.sides_spin.setValue(value) def accepted(self): \"\"\"Create new regular polygon", "A = B B = item.item[\"id\"] polygon.append(item.item[\"id\"]) item = Factory.create_empty_item('polygon',", "B B = item.item[\"id\"] polygon.append(item.item[\"id\"]) item = Factory.create_empty_item('polygon', None) definition", "import Factory from Dialogs.DialogMacros import turn_into_free_point, free_point_checkbox from Fill.ListWidget import", "self.ui.buttonBox.accepted.connect(self.accepted) self.ui.buttonBox.rejected.connect(self.rejected) self.ui.sides_slider.valueChanged.connect(self.hslider_sides_func) self.ui.checkBox.stateChanged.connect(lambda x: free_point_checkbox(self, x)) def hslider_sides_func(self, value):", "scene, data): \"\"\"Construct RegularPolygonDialog.\"\"\" super(RegularPolygonDialog, self).__init__() self.ui = uic.loadUi('regularpolygon.ui', self)", "Factory.create_empty_item('polygon', None) definition = polygon item.item[\"id\"] = Factory.next_id(item, definition, self.scene.project_data.items)", "from Dialogs.DialogMacros import turn_into_free_point, free_point_checkbox from Fill.ListWidget import fill_listWidget_with_data, set_selected_id_in_listWidget", "self.ui.checkBox.stateChanged.connect(lambda x: free_point_checkbox(self, x)) def hslider_sides_func(self, value): \"\"\"Be slider callback", "\"\"\"Create new regular polygon with settings.\"\"\" A, B = self.data", "def __init__(self, scene, data): \"\"\"Construct RegularPolygonDialog.\"\"\" super(RegularPolygonDialog, self).__init__() self.ui =", "as c class RegularPolygonDialog(QtWidgets.QDialog): def __init__(self, scene, data): \"\"\"Construct RegularPolygonDialog.\"\"\"", "3 self.free_point = False self.data = data self.ui.buttonBox.accepted.connect(self.accepted) self.ui.buttonBox.rejected.connect(self.rejected) self.ui.sides_slider.valueChanged.connect(self.hslider_sides_func)", "self.free_point = False self.data = data self.ui.buttonBox.accepted.connect(self.accepted) self.ui.buttonBox.rejected.connect(self.rejected) self.ui.sides_slider.valueChanged.connect(self.hslider_sides_func) self.ui.checkBox.stateChanged.connect(lambda", "self.ui.sides_spin.setValue(value) def accepted(self): \"\"\"Create new regular polygon with settings.\"\"\" A,", "angle} id_ = Factory.next_id(item, definition, self.scene.project_data.items) item.item[\"id\"] = id_ item.item[\"definition\"]", "= definition self.scene.project_data.add(item) self.scene.project_data.recompute_canvas(*self.scene.init_canvas_dims) current_row_old = self.scene.ui.listWidget.currentRow() fill_listWidget_with_data(self.scene.project_data, self.scene.ui.listWidget, self.scene.current_tab_idx)", "scene self.sides = 3 self.free_point = False self.data = data", "2): item = Factory.create_empty_item('point', c.Point.Definition.ROTATION) definition = {'A': A, 'B':", "180 / self.sides polygon = [A, B] for _ in", "id_ item.item[\"definition\"] = definition if self.free_point: item = turn_into_free_point(item, self.scene)", "Fill.ListWidget import fill_listWidget_with_data, set_selected_id_in_listWidget import Constant as c class RegularPolygonDialog(QtWidgets.QDialog):", "= 3 self.free_point = False self.data = data self.ui.buttonBox.accepted.connect(self.accepted) self.ui.buttonBox.rejected.connect(self.rejected)", "x)) def hslider_sides_func(self, value): \"\"\"Be slider callback function to set", "item.item[\"id\"] = id_ item.item[\"definition\"] = definition if self.free_point: item =", "free_point_checkbox from Fill.ListWidget import fill_listWidget_with_data, set_selected_id_in_listWidget import Constant as c", "= Factory.create_empty_item('point', c.Point.Definition.ROTATION) definition = {'A': A, 'B': B, 'angle':", "import QtWidgets, uic from Factory import Factory from Dialogs.DialogMacros import", "= Factory.create_empty_item('polygon', None) definition = polygon item.item[\"id\"] = Factory.next_id(item, definition,", "new regular polygon with settings.\"\"\" A, B = self.data angle", "turn_into_free_point(item, self.scene) self.scene.project_data.add(item) A = B B = item.item[\"id\"] polygon.append(item.item[\"id\"])", "with settings.\"\"\" A, B = self.data angle = -(self.sides -", "= self.scene.ui.listWidget.currentRow() fill_listWidget_with_data(self.scene.project_data, self.scene.ui.listWidget, self.scene.current_tab_idx) set_selected_id_in_listWidget(self.scene, current_row_old) self.scene.edit.add_undo_item(self.scene) def rejected(self):", "fill_listWidget_with_data(self.scene.project_data, self.scene.ui.listWidget, self.scene.current_tab_idx) set_selected_id_in_listWidget(self.scene, current_row_old) self.scene.edit.add_undo_item(self.scene) def rejected(self): \"\"\"Add no", "from Fill.ListWidget import fill_listWidget_with_data, set_selected_id_in_listWidget import Constant as c class", "to set sides.\"\"\" self.sides = value self.ui.sides_spin.setValue(value) def accepted(self): \"\"\"Create", "self.scene.project_data.recompute_canvas(*self.scene.init_canvas_dims) current_row_old = self.scene.ui.listWidget.currentRow() fill_listWidget_with_data(self.scene.project_data, self.scene.ui.listWidget, self.scene.current_tab_idx) set_selected_id_in_listWidget(self.scene, current_row_old) self.scene.edit.add_undo_item(self.scene)", "super(RegularPolygonDialog, self).__init__() self.ui = uic.loadUi('regularpolygon.ui', self) self.scene = scene self.sides", "/ self.sides polygon = [A, B] for _ in range(self.sides", "{'A': A, 'B': B, 'angle': angle} id_ = Factory.next_id(item, definition,", "set_selected_id_in_listWidget(self.scene, current_row_old) self.scene.edit.add_undo_item(self.scene) def rejected(self): \"\"\"Add no new regular polygon.\"\"\"", "-(self.sides - 2) * 180 / self.sides polygon = [A,", "import turn_into_free_point, free_point_checkbox from Fill.ListWidget import fill_listWidget_with_data, set_selected_id_in_listWidget import Constant", "import fill_listWidget_with_data, set_selected_id_in_listWidget import Constant as c class RegularPolygonDialog(QtWidgets.QDialog): def", "uic from Factory import Factory from Dialogs.DialogMacros import turn_into_free_point, free_point_checkbox", "= value self.ui.sides_spin.setValue(value) def accepted(self): \"\"\"Create new regular polygon with", "hslider_sides_func(self, value): \"\"\"Be slider callback function to set sides.\"\"\" self.sides", "- 2) * 180 / self.sides polygon = [A, B]", "settings.\"\"\" A, B = self.data angle = -(self.sides - 2)", "self.data = data self.ui.buttonBox.accepted.connect(self.accepted) self.ui.buttonBox.rejected.connect(self.rejected) self.ui.sides_slider.valueChanged.connect(self.hslider_sides_func) self.ui.checkBox.stateChanged.connect(lambda x: free_point_checkbox(self, x))", "current_row_old = self.scene.ui.listWidget.currentRow() fill_listWidget_with_data(self.scene.project_data, self.scene.ui.listWidget, self.scene.current_tab_idx) set_selected_id_in_listWidget(self.scene, current_row_old) self.scene.edit.add_undo_item(self.scene) def", "* 180 / self.sides polygon = [A, B] for _", "angle = -(self.sides - 2) * 180 / self.sides polygon", "= B B = item.item[\"id\"] polygon.append(item.item[\"id\"]) item = Factory.create_empty_item('polygon', None)", "from Factory import Factory from Dialogs.DialogMacros import turn_into_free_point, free_point_checkbox from", "uic.loadUi('regularpolygon.ui', self) self.scene = scene self.sides = 3 self.free_point =", "__init__(self, scene, data): \"\"\"Construct RegularPolygonDialog.\"\"\" super(RegularPolygonDialog, self).__init__() self.ui = uic.loadUi('regularpolygon.ui',", "Factory import Factory from Dialogs.DialogMacros import turn_into_free_point, free_point_checkbox from Fill.ListWidget", "- 2): item = Factory.create_empty_item('point', c.Point.Definition.ROTATION) definition = {'A': A,", "B] for _ in range(self.sides - 2): item = Factory.create_empty_item('point',", "RegularPolygonDialog(QtWidgets.QDialog): def __init__(self, scene, data): \"\"\"Construct RegularPolygonDialog.\"\"\" super(RegularPolygonDialog, self).__init__() self.ui", "polygon = [A, B] for _ in range(self.sides - 2):", "= Factory.next_id(item, definition, self.scene.project_data.items) item.item[\"id\"] = id_ item.item[\"definition\"] = definition", "id_ = Factory.next_id(item, definition, self.scene.project_data.items) item.item[\"id\"] = id_ item.item[\"definition\"] =", "definition self.scene.project_data.add(item) self.scene.project_data.recompute_canvas(*self.scene.init_canvas_dims) current_row_old = self.scene.ui.listWidget.currentRow() fill_listWidget_with_data(self.scene.project_data, self.scene.ui.listWidget, self.scene.current_tab_idx) set_selected_id_in_listWidget(self.scene,", "self.scene.current_tab_idx) set_selected_id_in_listWidget(self.scene, current_row_old) self.scene.edit.add_undo_item(self.scene) def rejected(self): \"\"\"Add no new regular", "value self.ui.sides_spin.setValue(value) def accepted(self): \"\"\"Create new regular polygon with settings.\"\"\"", "definition, self.scene.project_data.items) item.item[\"definition\"] = definition self.scene.project_data.add(item) self.scene.project_data.recompute_canvas(*self.scene.init_canvas_dims) current_row_old = self.scene.ui.listWidget.currentRow()", "fill_listWidget_with_data, set_selected_id_in_listWidget import Constant as c class RegularPolygonDialog(QtWidgets.QDialog): def __init__(self," ]
[ "def test_set_qos(self): self.network.set_backhaul(QoS.minimum_qos_dict) self.assertEqual(self.network.get_backhaul(), QoS(QoS.minimum_qos_dict)) def test_qos_from_distance(self): self.assertEqual(self.network.get_qos_from(5).get_formated_qos(), self.parameters.get('worst_qos')) self.assertEqual(self.network.get_qos_from(0.0).get_formated_qos(),", "self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat': 5, 'lon': 5}]) def", "lon) self.assertEqual(self.network.get_RUs(), {f'{lat}-{lon}': Location(lat, lon)}) with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.set_RU(lat, lon) def", "10) self.network.set_RU(20, 20) self.network.add_node('source1', 10, 10) self.network.add_node('destination1', 10, 10) self.network.add_node('destination2',", "with self.assertRaises(FunctionalDegradation.FunctionDegradationNetworkException): SliceConceptualGraph('test', {}, {}, {}) SliceConceptualGraph('test', self.midhaul_qos, {}, {})", "Location(**{'lat': 5, 'lon': 5})}) lat, lon = 33, 40 self.network.set_RU(lat,", "'5-5': Location(**{'lat': 5, 'lon': 5})}) with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.set_RUs([{'lat': 10, 'lon':", "class TestBaseStationLinear(unittest.TestCase): def setUp(self): self.name = \"network\" self.wireless_connection_type = \"LinearDegradation\"", "Location(**{'lat': 5, 'lon': 5})}) with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.set_RUs([{'lat': 10, 'lon': 10},", "10) self.assertEqual(self.network.get_node_location('test2'), None) self.assertEqual(self.network.get_node_location('test'), Location(10, 10)) def test_has_to_pass_through_backhaul(self): self.network.set_RU(10, 10)", "import unittest from networks.QoS import QoS from networks.connections.mathematical_connections import FunctionalDegradation", "self.assertEqual(self.network.get_RUs(), {'10-10': Location(**{'lat': 10, 'lon': 10}), '5-5': Location(**{'lat': 5, 'lon':", "'5.0mbps', 'error_rate': '2.0%'}, radius=\"5km\") self.network = SliceConceptualGraph(self.name, self.midhaul_qos, self.backhaul_qos, self.parameters)", "self.network.set_RU(lat, lon) self.assertEqual(self.network.get_RUs(), {f'{lat}-{lon}': Location(lat, lon)}) with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.set_RU(lat, lon)", "test_constructor(self): with self.assertRaises(FunctionalDegradation.FunctionDegradationNetworkException): SliceConceptualGraph('test', {}, {}, {}) SliceConceptualGraph('test', self.midhaul_qos, {},", "33, 40 self.network.set_RU(lat, lon) self.assertEqual(self.network.get_RUs(), {f'{lat}-{lon}': Location(lat, lon)}) with self.assertRaises(SliceConceptualGraph.NetworkSliceException):", "name, lat, lon = 'node', 33, 40 lat, lon =", "lon = 33, 40 self.network.set_RU(lat, lon) self.network.add_node('test', 10, 10) self.assertEqual(self.network.get_node_location('test2'),", "self.assertEqual(self.network.get_nodes(), {}) def test_add_node(self): name, lat, lon = 'node', 33,", "self.name = \"network\" self.wireless_connection_type = \"LinearDegradation\" self.backhaul_qos = {'latency': {'delay':", "with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.set_RU(lat, lon) def test_constructor(self): with self.assertRaises(FunctionalDegradation.FunctionDegradationNetworkException): SliceConceptualGraph('test', {},", "SliceConceptualGraph('test', self.midhaul_qos, {}, {}) SliceConceptualGraph('test', {}, self.backhaul_qos, {}) SliceConceptualGraph('test', {},", "33, 40 lat, lon = 33, 40 self.network.set_RU(lat, lon) self.network.add_node(name,", "lon = 33, 40 self.network.set_RU(lat, lon) with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.set_RUs([{'lat': 10,", "Location(**{'lat': 10, 'lon': 10}), '5-5': Location(**{'lat': 5, 'lon': 5})}) with", "10}), '5-5': Location(**{'lat': 5, 'lon': 5})}) lat, lon = 33,", "self.network.add_node('destination2', 20, 20) def test_set_RUs(self): self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat':", "'1.0%'} self.parameters = dict( best_qos={'latency': {'delay': '5.0ms', 'deviation': '2.0ms'}, 'bandwidth':", "10, 10) self.network.add_node('destination2', 20, 20) def test_set_RUs(self): self.network.set_RUs([{'lat': 10, 'lon':", "{}, {}, self.parameters) def test_get_qos(self): self.assertEqual(self.network.get_backhaul(), QoS(self.backhaul_qos)) def test_set_qos(self): self.network.set_backhaul(QoS.minimum_qos_dict)", "5}]) self.assertEqual(self.network.get_RUs(), {'10-10': Location(**{'lat': 10, 'lon': 10}), '5-5': Location(**{'lat': 5,", "33, 40 self.network.set_RU(lat, lon) self.network.add_node('test', 10, 10) self.assertEqual(self.network.get_node_location('test2'), None) self.assertEqual(self.network.get_node_location('test'),", "\"network\" self.wireless_connection_type = \"Log2Degradation\" self.midhaul_qos = {'latency': {'delay': '3.0ms', 'deviation':", "= dict( best_qos={'latency': {'delay': '5.0ms', 'deviation': '2.0ms'}, 'bandwidth': '10.0mbps', 'error_rate':", "lat, lon = 33, 40 self.network.set_RU(lat, lon) self.network.add_node('test', 10, 10)", "= 33, 40 self.network.set_RU(lat, lon) with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.set_RUs([{'lat': 10, 'lon':", "test_add_node(self): name, lat, lon = 'node', 33, 40 with self.assertRaises(SliceConceptualGraph.NetworkSliceException):", "5}]) def test_set_node_location(self): lat, lon = 33, 40 self.network.set_RU(lat, lon)", "10, 'lon': 10}, {'lat': 5, 'lon': 5}]) def test_set_node_location(self): lat,", "{}) SliceConceptualGraph('test', self.midhaul_qos, {}, {}) SliceConceptualGraph('test', {}, self.backhaul_qos, {}) SliceConceptualGraph('test',", "{'delay': '3.0ms', 'deviation': '1.0ms'}, 'bandwidth': '100.0mbps', 'error_rate': '1.0%'} self.backhaul_qos =", "40 self.network.set_RU(lat, lon) self.network.add_node('test', 10, 10) self.assertEqual(self.network.get_node_location('test2'), None) self.assertEqual(self.network.get_node_location('test'), Location(10,", "'error_rate': '1.0%'} self.midhaul_qos = {'latency': {'delay': '3.0ms', 'deviation': '1.0ms'}, 'bandwidth':", "self.network.add_node('source1', 10, 10) self.network.add_node('destination1', 10, 10) self.network.add_node('destination2', 20, 20) def", "def test_get_node_location(self): lat, lon = 33, 40 self.network.set_RU(lat, lon) self.network.add_node('test',", "test_set_RUs(self): self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat': 5, 'lon': 5}]) self.assertEqual(self.network.get_RUs(),", "test_get_empty_RUs(self): self.assertEqual(self.network.get_RUs(), {}) def test_set_basetastion(self): lat, lon = 33, 40", "setUp(self): self.name = \"network\" self.wireless_connection_type = \"Log2Degradation\" self.midhaul_qos = {'latency':", "\"Log2Degradation\" self.midhaul_qos = {'latency': {'delay': '3.0ms', 'deviation': '1.0ms'}, 'bandwidth': '100.0mbps',", "self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.add_node('node', 33, 40) def test_get_empty_RUs(self): self.assertEqual(self.network.get_RUs(), {}) def test_set_basetastion(self):", "Location(20, 20)) with self.assertRaises(Location.LocationException): self.network.set_node_location('destination1', 'test', 20) with self.assertRaises(Location.LocationException): self.network.set_node_location('destination1',", "{}, {}) SliceConceptualGraph('test', self.midhaul_qos, {}, {}) SliceConceptualGraph('test', {}, self.backhaul_qos, {})", "self.assertEqual(self.network.get_RUs(), {f'{lat}-{lon}': Location(lat, lon)}) with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.set_RU(lat, lon) def test_constructor(self):", "def test_get_empty_nodes(self): self.assertEqual(self.network.get_nodes(), {}) def test_add_node(self): name, lat, lon =", "self.backhaul_qos, self.parameters) def test_creation(self): self.assertEqual(self.network.get_name(), \"network\") def test_get_empty_nodes(self): self.assertEqual(self.network.get_nodes(), {})", "self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.set_RU(lat, lon) def test_constructor(self): with self.assertRaises(FunctionalDegradation.FunctionDegradationNetworkException): SliceConceptualGraph('test', {} ,{},", "self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat': 5, 'lon': 5}]) self.assertEqual(self.network.get_RUs(), {'10-10':", "from networks.slicing import SliceConceptualGraph from utils.location import Location class TestBaseStationLinear(unittest.TestCase):", "radius=\"5km\") self.network = SliceConceptualGraph(self.name, self.midhaul_qos, self.backhaul_qos, self.parameters) def test_creation(self): self.assertEqual(self.network.get_name(),", "self.network.set_RU(lat, lon) self.network.add_node(name, lat, lon) self.assertEqual(self.network.get_nodes(), {'node': Location(lat, lon)}) with", "= 33, 40 self.network.set_RU(lat, lon) self.network.add_node('destination1', 10, 10) self.network.set_node_location('destination1', 20,", "'100.0ms', 'deviation': '20.0ms'}, 'bandwidth': '5.0mbps', 'error_rate': '2.0%'}, radius=\"5km\") self.network =", "self.network.set_RU(lat, lon) def test_constructor(self): with self.assertRaises(FunctionalDegradation.FunctionDegradationNetworkException): SliceConceptualGraph('test', {}, {}, {})", "= 'node', 33, 40 with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.add_node(name, lat, lon) self.network.set_RU(33,", "'1.0ms'}, 'bandwidth': '100.0mbps', 'error_rate': '1.0%'} self.backhaul_qos = {'latency': {'delay': '3.0ms',", "20)) with self.assertRaises(Location.LocationException): self.network.set_node_location('destination1', 'test', 20) with self.assertRaises(Location.LocationException): self.network.set_node_location('destination1', 20,", "'error_rate': '1.0%'} self.backhaul_qos = {'latency': {'delay': '3.0ms', 'deviation': '1.0ms'}, 'bandwidth':", "from networks.QoS import QoS from networks.connections.mathematical_connections import FunctionalDegradation from networks.slicing", "= {'latency': {'delay': '3.0ms', 'deviation': '1.0ms'}, 'bandwidth': '100.0mbps', 'error_rate': '1.0%'}", "lon) self.network.add_node('test', 10, 10) self.assertEqual(self.network.get_node_location('test2'), None) self.assertEqual(self.network.get_node_location('test'), Location(10, 10)) def", "'bandwidth': '100.0mbps', 'error_rate': '1.0%'} self.midhaul_qos = {'latency': {'delay': '3.0ms', 'deviation':", "Location class TestBaseStationLinear(unittest.TestCase): def setUp(self): self.name = \"network\" self.wireless_connection_type =", "def test_add_node(self): name, lat, lon = 'node', 33, 40 lat,", "'20.0ms'}, 'bandwidth': '5.0mbps', 'error_rate': '2.0%'}, radius=\"5km\") self.network = SliceConceptualGraph(self.name, self.midhaul_qos,", "self.network.add_node('test', 10, 10) self.assertEqual(self.network.get_node_location('test2'), None) self.assertEqual(self.network.get_node_location('test'), Location(10, 10)) def test_set_RUs(self):", "lon) self.assertEqual(self.network.get_nodes(), {'node': Location(lat, lon)}) with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.add_node('node', 33, 40)", "'lon': 10}), '5-5': Location(**{'lat': 5, 'lon': 5})}) lat, lon =", "def test_get_qos(self): self.assertEqual(self.network.get_backhaul(), QoS(self.backhaul_qos)) def test_set_qos(self): self.network.set_backhaul(QoS.minimum_qos_dict) self.assertEqual(self.network.get_backhaul(), QoS(QoS.minimum_qos_dict)) def", "{}, self.parameters) def test_get_qos(self): self.assertEqual(self.network.get_backhaul(), QoS(self.backhaul_qos)) def test_set_qos(self): self.network.set_backhaul(QoS.minimum_qos_dict) self.assertEqual(self.network.get_backhaul(),", "self.network.add_node('test', 10, 10) self.assertEqual(self.network.get_node_location('test2'), None) self.assertEqual(self.network.get_node_location('test'), Location(10, 10)) def test_has_to_pass_through_backhaul(self):", "'lon': 5})}) lat, lon = 33, 40 self.network.set_RU(lat, lon) with", "{}) def test_add_node(self): name, lat, lon = 'node', 33, 40", "lon) with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat': 5, 'lon':", "20) self.network.add_node('source1', 10, 10) self.network.add_node('destination1', 10, 10) self.network.add_node('destination2', 20, 20)", "{}) SliceConceptualGraph('test', {}, self.backhaul_qos, {}) SliceConceptualGraph('test', {}, {}, self.parameters) def", "'100.0mbps', 'error_rate': '1.0%'} self.backhaul_qos = {'latency': {'delay': '3.0ms', 'deviation': '1.0ms'},", "lon = 33, 40 self.network.set_RU(lat, lon) self.network.add_node('destination1', 10, 10) self.network.set_node_location('destination1',", "lon = 'node', 33, 40 with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.add_node(name, lat, lon)", "10, 'lon': 10}), '5-5': Location(**{'lat': 5, 'lon': 5})}) with self.assertRaises(SliceConceptualGraph.NetworkSliceException):", "10) self.assertEqual(self.network.get_node_location('test2'), None) self.assertEqual(self.network.get_node_location('test'), Location(10, 10)) def test_set_RUs(self): self.network.set_RUs([{'lat': 10,", "self.assertEqual(self.network.get_RUs(), {}) def test_set_basetastion(self): lat, lon = 33, 40 self.network.set_RU(lat,", "self.network.set_RU(33, 40, 0) self.network.add_node(name, lat, lon) self.assertEqual(self.network.get_nodes(), {'node': Location(lat, lon)})", "{'latency': {'delay': '3.0ms', 'deviation': '1.0ms'}, 'bandwidth': '100.0mbps', 'error_rate': '1.0%'} self.parameters", "SliceConceptualGraph(self.name, self.midhaul_qos, self.backhaul_qos, self.parameters) def test_creation(self): self.assertEqual(self.network.get_name(), \"network\") def test_get_empty_nodes(self):", "SliceConceptualGraph from utils.location import Location class TestBaseStationLinear(unittest.TestCase): def setUp(self): self.name", "self.midhaul_qos, {}, {}) SliceConceptualGraph('test', {}, self.backhaul_qos, {}) SliceConceptualGraph('test', {}, {},", "from utils.location import Location class TestBaseStationLinear(unittest.TestCase): def setUp(self): self.name =", "10, 'lon': 10}), '5-5': Location(**{'lat': 5, 'lon': 5})}) lat, lon", "'bandwidth': '100.0mbps', 'error_rate': '1.0%'} self.parameters = dict( best_qos={'latency': {'delay': '5.0ms',", "'lon': 5}]) self.assertEqual(self.network.get_RUs(), {'10-10': Location(**{'lat': 10, 'lon': 10}), '5-5': Location(**{'lat':", "test_add_node(self): name, lat, lon = 'node', 33, 40 lat, lon", "lat, lon) self.network.set_RU(33, 40, 0) self.network.add_node(name, lat, lon) self.assertEqual(self.network.get_nodes(), {'node':", "self.network.add_node(name, lat, lon) self.network.set_RU(33, 40, 0) self.network.add_node(name, lat, lon) self.assertEqual(self.network.get_nodes(),", "= \"network\" self.wireless_connection_type = \"Log2Degradation\" self.midhaul_qos = {'latency': {'delay': '3.0ms',", "self.network.set_RU(lat, lon) self.network.add_node('test', 10, 10) self.assertEqual(self.network.get_node_location('test2'), None) self.assertEqual(self.network.get_node_location('test'), Location(10, 10))", "self.network.set_RU(10, 10) self.network.set_RU(20, 20) self.network.add_node('source1', 10, 10) self.network.add_node('destination1', 10, 10)", "self.assertEqual(self.network.get_node_location('test'), Location(10, 10)) def test_has_to_pass_through_backhaul(self): self.network.set_RU(10, 10) self.network.set_RU(20, 20) self.network.add_node('source1',", "QoS(self.backhaul_qos)) def test_set_qos(self): self.network.set_backhaul(QoS.minimum_qos_dict) self.assertEqual(self.network.get_backhaul(), QoS(QoS.minimum_qos_dict)) def test_qos_from_distance(self): self.assertEqual(self.network.get_qos_from(5).get_formated_qos(), self.parameters.get('worst_qos'))", "'error_rate': '2.0%'}, radius=\"5km\") self.network = SliceConceptualGraph(self.name, self.midhaul_qos, self.backhaul_qos, self.parameters) def", "self.assertRaises(FunctionalDegradation.FunctionDegradationNetworkException): SliceConceptualGraph('test', {}, {}, {}) SliceConceptualGraph('test', self.midhaul_qos, {}, {}) SliceConceptualGraph('test',", "40 self.network.set_RU(lat, lon) with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat':", "'bandwidth': '10.0mbps', 'error_rate': '1.0%'}, worst_qos={'latency': {'delay': '100.0ms', 'deviation': '20.0ms'}, 'bandwidth':", "'1.0%'}, worst_qos={'latency': {'delay': '100.0ms', 'deviation': '20.0ms'}, 'bandwidth': '5.0mbps', 'error_rate': '2.0%'},", "worst_qos={'latency': {'delay': '100.0ms', 'deviation': '20.0ms'}, 'bandwidth': '5.0mbps', 'error_rate': '2.0%'}, radius=\"5km\")", "Location(lat, lon)}) with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.set_RU(lat, lon) def test_constructor(self): with self.assertRaises(FunctionalDegradation.FunctionDegradationNetworkException):", "self.network.set_node_location('destination1', 'test', 20) with self.assertRaises(Location.LocationException): self.network.set_node_location('destination1', 20, 'test') class TestBaseLog2Degradation(unittest.TestCase):", "lon)}) with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.add_node('node', 33, 40) def test_get_empty_RUs(self): self.assertEqual(self.network.get_RUs(), {})", "self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.set_RU(lat, lon) def test_constructor(self): with self.assertRaises(FunctionalDegradation.FunctionDegradationNetworkException): SliceConceptualGraph('test', {}, {},", "test_creation(self): self.assertEqual(self.network.get_name(), \"network\") def test_get_empty_nodes(self): self.assertEqual(self.network.get_nodes(), {}) def test_add_node(self): name,", "lon) def test_constructor(self): with self.assertRaises(FunctionalDegradation.FunctionDegradationNetworkException): SliceConceptualGraph('test', {} ,{}, {}) SliceConceptualGraph('test',", "{'lat': 5, 'lon': 5}]) self.assertEqual(self.network.get_RUs(), {'10-10': Location(**{'lat': 10, 'lon': 10}),", "self.assertEqual(self.network.get_node_location('test2'), None) self.assertEqual(self.network.get_node_location('test'), Location(10, 10)) def test_has_to_pass_through_backhaul(self): self.network.set_RU(10, 10) self.network.set_RU(20,", "lat, lon = 'node', 33, 40 with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.add_node(name, lat,", "test_get_empty_nodes(self): self.assertEqual(self.network.get_nodes(), {}) def test_add_node(self): name, lat, lon = 'node',", "self.assertRaises(Location.LocationException): self.network.set_node_location('destination1', 20, 'test') class TestBaseLog2Degradation(unittest.TestCase): def setUp(self): self.name =", "'5-5': Location(**{'lat': 5, 'lon': 5})}) lat, lon = 33, 40", "self.wireless_connection_type = \"Log2Degradation\" self.midhaul_qos = {'latency': {'delay': '3.0ms', 'deviation': '1.0ms'},", "self.network.set_RU(20, 20) self.network.add_node('source1', 10, 10) self.network.add_node('destination1', 10, 10) self.network.add_node('destination2', 20,", "= \"Log2Degradation\" self.midhaul_qos = {'latency': {'delay': '3.0ms', 'deviation': '1.0ms'}, 'bandwidth':", "self.assertEqual(self.network.get_node_location('test2'), None) self.assertEqual(self.network.get_node_location('test'), Location(10, 10)) def test_set_RUs(self): self.network.set_RUs([{'lat': 10, 'lon':", "'lon': 10}), '5-5': Location(**{'lat': 5, 'lon': 5})}) with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.set_RUs([{'lat':", "QoS from networks.connections.mathematical_connections import FunctionalDegradation from networks.slicing import SliceConceptualGraph from", "with self.assertRaises(FunctionalDegradation.FunctionDegradationNetworkException): SliceConceptualGraph('test', {} ,{}, {}) SliceConceptualGraph('test', self.midhaul_qos, {}, {})", "33, 40 self.network.set_RU(lat, lon) self.network.add_node('destination1', 10, 10) self.network.set_node_location('destination1', 20, 20)", "{} ,{}, {}) SliceConceptualGraph('test', self.midhaul_qos, {}, {}) SliceConceptualGraph('test', {}, self.backhaul_qos,", "name, lat, lon = 'node', 33, 40 with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.add_node(name,", "33, 40) def test_get_empty_RUs(self): self.assertEqual(self.network.get_RUs(), {}) def test_set_basetastion(self): lat, lon", "def test_has_to_pass_through_backhaul(self): self.network.set_RU(10, 10) self.network.set_RU(20, 20) self.network.add_node('source1', 10, 10) self.network.add_node('destination1',", "{}) SliceConceptualGraph('test', {}, {}, self.parameters) def test_get_qos(self): self.assertEqual(self.network.get_backhaul(), QoS(self.backhaul_qos)) def", "self.midhaul_qos = {'latency': {'delay': '3.0ms', 'deviation': '1.0ms'}, 'bandwidth': '100.0mbps', 'error_rate':", "= \"network\" self.wireless_connection_type = \"LinearDegradation\" self.backhaul_qos = {'latency': {'delay': '3.0ms',", "10) self.network.add_node('destination1', 10, 10) self.network.add_node('destination2', 20, 20) def test_set_RUs(self): self.network.set_RUs([{'lat':", "lon) def test_constructor(self): with self.assertRaises(FunctionalDegradation.FunctionDegradationNetworkException): SliceConceptualGraph('test', {}, {}, {}) SliceConceptualGraph('test',", "Location(**{'lat': 10, 'lon': 10}), '5-5': Location(**{'lat': 5, 'lon': 5})}) lat,", "networks.QoS import QoS from networks.connections.mathematical_connections import FunctionalDegradation from networks.slicing import", "dict( best_qos={'latency': {'delay': '5.0ms', 'deviation': '2.0ms'}, 'bandwidth': '10.0mbps', 'error_rate': '1.0%'},", "self.network.set_RU(lat, lon) def test_constructor(self): with self.assertRaises(FunctionalDegradation.FunctionDegradationNetworkException): SliceConceptualGraph('test', {} ,{}, {})", "def test_constructor(self): with self.assertRaises(FunctionalDegradation.FunctionDegradationNetworkException): SliceConceptualGraph('test', {}, {}, {}) SliceConceptualGraph('test', self.midhaul_qos,", "'1.0%'} self.midhaul_qos = {'latency': {'delay': '3.0ms', 'deviation': '1.0ms'}, 'bandwidth': '100.0mbps',", "'error_rate': '1.0%'}, worst_qos={'latency': {'delay': '100.0ms', 'deviation': '20.0ms'}, 'bandwidth': '5.0mbps', 'error_rate':", "self.network.set_RU(lat, lon) with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat': 5,", "lat, lon = 'node', 33, 40 lat, lon = 33,", "import Location class TestBaseStationLinear(unittest.TestCase): def setUp(self): self.name = \"network\" self.wireless_connection_type", "self.network.add_node('destination1', 10, 10) self.network.set_node_location('destination1', 20, 20) self.assertEqual(self.network.get_node_location('destination1'), Location(20, 20)) with", "'deviation': '1.0ms'}, 'bandwidth': '100.0mbps', 'error_rate': '1.0%'} self.parameters = dict( best_qos={'latency':", "lon = 33, 40 self.network.set_RU(lat, lon) self.assertEqual(self.network.get_RUs(), {f'{lat}-{lon}': Location(lat, lon)})", "def test_qos_from_distance(self): self.assertEqual(self.network.get_qos_from(5).get_formated_qos(), self.parameters.get('worst_qos')) self.assertEqual(self.network.get_qos_from(0.0).get_formated_qos(), self.parameters.get('best_qos')) def test_get_node_location(self): lat, lon", "lat, lon) self.assertEqual(self.network.get_nodes(), {'node': Location(lat, lon)}) with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.add_node('node', 33,", "{}, {}, {}) SliceConceptualGraph('test', self.midhaul_qos, {}, {}) SliceConceptualGraph('test', {}, self.backhaul_qos,", "33, 40 with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.add_node(name, lat, lon) self.network.set_RU(33, 40, 0)", "def test_add_node(self): name, lat, lon = 'node', 33, 40 with", "'node', 33, 40 lat, lon = 33, 40 self.network.set_RU(lat, lon)", "lon)}) with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.set_RU(lat, lon) def test_constructor(self): with self.assertRaises(FunctionalDegradation.FunctionDegradationNetworkException): SliceConceptualGraph('test',", "{'latency': {'delay': '3.0ms', 'deviation': '1.0ms'}, 'bandwidth': '100.0mbps', 'error_rate': '1.0%'} self.midhaul_qos", "self.wireless_connection_type = \"LinearDegradation\" self.backhaul_qos = {'latency': {'delay': '3.0ms', 'deviation': '1.0ms'},", "\"LinearDegradation\" self.backhaul_qos = {'latency': {'delay': '3.0ms', 'deviation': '1.0ms'}, 'bandwidth': '100.0mbps',", "def test_set_node_location(self): lat, lon = 33, 40 self.network.set_RU(lat, lon) self.network.add_node('destination1',", "= \"LinearDegradation\" self.backhaul_qos = {'latency': {'delay': '3.0ms', 'deviation': '1.0ms'}, 'bandwidth':", "'bandwidth': '100.0mbps', 'error_rate': '1.0%'} self.backhaul_qos = {'latency': {'delay': '3.0ms', 'deviation':", "self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.add_node(name, lat, lon) self.network.set_RU(33, 40, 0) self.network.add_node(name, lat, lon)", "{f'{lat}-{lon}': Location(lat, lon)}) with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.set_RU(lat, lon) def test_constructor(self): with", "lat, lon = 33, 40 self.network.set_RU(lat, lon) with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.set_RUs([{'lat':", "lon) self.network.add_node('destination1', 10, 10) self.network.set_node_location('destination1', 20, 20) self.assertEqual(self.network.get_node_location('destination1'), Location(20, 20))", "10}, {'lat': 5, 'lon': 5}]) def test_set_node_location(self): lat, lon =", "'node', 33, 40 with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.add_node(name, lat, lon) self.network.set_RU(33, 40,", "'lon': 5}]) def test_set_node_location(self): lat, lon = 33, 40 self.network.set_RU(lat,", "test_constructor(self): with self.assertRaises(FunctionalDegradation.FunctionDegradationNetworkException): SliceConceptualGraph('test', {} ,{}, {}) SliceConceptualGraph('test', self.midhaul_qos, {},", "\"network\" self.wireless_connection_type = \"LinearDegradation\" self.backhaul_qos = {'latency': {'delay': '3.0ms', 'deviation':", "= 33, 40 self.network.set_RU(lat, lon) self.network.add_node(name, lat, lon) self.assertEqual(self.network.get_nodes(), {'node':", "'3.0ms', 'deviation': '1.0ms'}, 'bandwidth': '100.0mbps', 'error_rate': '1.0%'} self.backhaul_qos = {'latency':", "20) with self.assertRaises(Location.LocationException): self.network.set_node_location('destination1', 20, 'test') class TestBaseLog2Degradation(unittest.TestCase): def setUp(self):", "self.network.set_backhaul(QoS.minimum_qos_dict) self.assertEqual(self.network.get_backhaul(), QoS(QoS.minimum_qos_dict)) def test_qos_from_distance(self): self.assertEqual(self.network.get_qos_from(5).get_formated_qos(), self.parameters.get('worst_qos')) self.assertEqual(self.network.get_qos_from(0.0).get_formated_qos(), self.parameters.get('best_qos')) def", "5})}) lat, lon = 33, 40 self.network.set_RU(lat, lon) with self.assertRaises(SliceConceptualGraph.NetworkSliceException):", "test_has_to_pass_through_backhaul(self): self.network.set_RU(10, 10) self.network.set_RU(20, 20) self.network.add_node('source1', 10, 10) self.network.add_node('destination1', 10,", "with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.set_RU(lat, lon) def test_constructor(self): with self.assertRaises(FunctionalDegradation.FunctionDegradationNetworkException): SliceConceptualGraph('test', {}", "self.assertEqual(self.network.get_qos_from(0.0).get_formated_qos(), self.parameters.get('best_qos')) def test_get_node_location(self): lat, lon = 33, 40 self.network.set_RU(lat,", "33, 40 self.network.set_RU(lat, lon) self.network.add_node(name, lat, lon) self.assertEqual(self.network.get_nodes(), {'node': Location(lat,", "self.network.add_node(name, lat, lon) self.assertEqual(self.network.get_nodes(), {'node': Location(lat, lon)}) with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.add_node('node',", "10}), '5-5': Location(**{'lat': 5, 'lon': 5})}) with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.set_RUs([{'lat': 10,", "None) self.assertEqual(self.network.get_node_location('test'), Location(10, 10)) def test_has_to_pass_through_backhaul(self): self.network.set_RU(10, 10) self.network.set_RU(20, 20)", "20, 20) def test_set_RUs(self): self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat': 5,", "self.network.add_node('destination1', 10, 10) self.network.add_node('destination2', 20, 20) def test_set_RUs(self): self.network.set_RUs([{'lat': 10,", "def test_get_empty_RUs(self): self.assertEqual(self.network.get_RUs(), {}) def test_set_basetastion(self): lat, lon = 33,", "'lon': 10}, {'lat': 5, 'lon': 5}]) def test_set_node_location(self): lat, lon", "self.network.set_node_location('destination1', 20, 'test') class TestBaseLog2Degradation(unittest.TestCase): def setUp(self): self.name = \"network\"", "'1.0ms'}, 'bandwidth': '100.0mbps', 'error_rate': '1.0%'} self.midhaul_qos = {'latency': {'delay': '3.0ms',", "self.assertRaises(FunctionalDegradation.FunctionDegradationNetworkException): SliceConceptualGraph('test', {} ,{}, {}) SliceConceptualGraph('test', self.midhaul_qos, {}, {}) SliceConceptualGraph('test',", ",{}, {}) SliceConceptualGraph('test', self.midhaul_qos, {}, {}) SliceConceptualGraph('test', {}, self.backhaul_qos, {})", "lon) self.network.set_RU(33, 40, 0) self.network.add_node(name, lat, lon) self.assertEqual(self.network.get_nodes(), {'node': Location(lat,", "setUp(self): self.name = \"network\" self.wireless_connection_type = \"LinearDegradation\" self.backhaul_qos = {'latency':", "self.network.set_RU(lat, lon) self.network.add_node('destination1', 10, 10) self.network.set_node_location('destination1', 20, 20) self.assertEqual(self.network.get_node_location('destination1'), Location(20,", "import SliceConceptualGraph from utils.location import Location class TestBaseStationLinear(unittest.TestCase): def setUp(self):", "self.network = SliceConceptualGraph(self.name, self.midhaul_qos, self.backhaul_qos, self.parameters) def test_creation(self): self.assertEqual(self.network.get_name(), \"network\")", "with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.add_node(name, lat, lon) self.network.set_RU(33, 40, 0) self.network.add_node(name, lat,", "lon) self.network.add_node(name, lat, lon) self.assertEqual(self.network.get_nodes(), {'node': Location(lat, lon)}) with self.assertRaises(SliceConceptualGraph.NetworkSliceException):", "Location(10, 10)) def test_has_to_pass_through_backhaul(self): self.network.set_RU(10, 10) self.network.set_RU(20, 20) self.network.add_node('source1', 10,", "TestBaseStationLinear(unittest.TestCase): def setUp(self): self.name = \"network\" self.wireless_connection_type = \"LinearDegradation\" self.backhaul_qos", "{'10-10': Location(**{'lat': 10, 'lon': 10}), '5-5': Location(**{'lat': 5, 'lon': 5})})", "def test_set_basetastion(self): lat, lon = 33, 40 self.network.set_RU(lat, lon) self.assertEqual(self.network.get_RUs(),", "5})}) with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat': 5, 'lon':", "40 self.network.set_RU(lat, lon) self.network.add_node('destination1', 10, 10) self.network.set_node_location('destination1', 20, 20) self.assertEqual(self.network.get_node_location('destination1'),", "{'delay': '100.0ms', 'deviation': '20.0ms'}, 'bandwidth': '5.0mbps', 'error_rate': '2.0%'}, radius=\"5km\") self.network", "10, 10) self.assertEqual(self.network.get_node_location('test2'), None) self.assertEqual(self.network.get_node_location('test'), Location(10, 10)) def test_set_RUs(self): self.network.set_RUs([{'lat':", "40 lat, lon = 33, 40 self.network.set_RU(lat, lon) self.network.add_node(name, lat,", "QoS(QoS.minimum_qos_dict)) def test_qos_from_distance(self): self.assertEqual(self.network.get_qos_from(5).get_formated_qos(), self.parameters.get('worst_qos')) self.assertEqual(self.network.get_qos_from(0.0).get_formated_qos(), self.parameters.get('best_qos')) def test_get_node_location(self): lat,", "'1.0%'} self.backhaul_qos = {'latency': {'delay': '3.0ms', 'deviation': '1.0ms'}, 'bandwidth': '100.0mbps',", "SliceConceptualGraph('test', {}, self.backhaul_qos, {}) SliceConceptualGraph('test', {}, {}, self.parameters) def test_get_qos(self):", "10) self.network.add_node('destination2', 20, 20) def test_set_RUs(self): self.network.set_RUs([{'lat': 10, 'lon': 10},", "'2.0%'}, radius=\"5km\") self.network = SliceConceptualGraph(self.name, self.midhaul_qos, self.backhaul_qos, self.parameters) def test_creation(self):", "10, 10) self.assertEqual(self.network.get_node_location('test2'), None) self.assertEqual(self.network.get_node_location('test'), Location(10, 10)) def test_has_to_pass_through_backhaul(self): self.network.set_RU(10,", "with self.assertRaises(Location.LocationException): self.network.set_node_location('destination1', 20, 'test') class TestBaseLog2Degradation(unittest.TestCase): def setUp(self): self.name", "SliceConceptualGraph('test', {}, {}, self.parameters) def test_get_qos(self): self.assertEqual(self.network.get_backhaul(), QoS(self.backhaul_qos)) def test_set_qos(self):", "{'delay': '3.0ms', 'deviation': '1.0ms'}, 'bandwidth': '100.0mbps', 'error_rate': '1.0%'} self.midhaul_qos =", "self.assertEqual(self.network.get_backhaul(), QoS(self.backhaul_qos)) def test_set_qos(self): self.network.set_backhaul(QoS.minimum_qos_dict) self.assertEqual(self.network.get_backhaul(), QoS(QoS.minimum_qos_dict)) def test_qos_from_distance(self): self.assertEqual(self.network.get_qos_from(5).get_formated_qos(),", "self.assertEqual(self.network.get_backhaul(), QoS(QoS.minimum_qos_dict)) def test_qos_from_distance(self): self.assertEqual(self.network.get_qos_from(5).get_formated_qos(), self.parameters.get('worst_qos')) self.assertEqual(self.network.get_qos_from(0.0).get_formated_qos(), self.parameters.get('best_qos')) def test_get_node_location(self):", "\"network\") def test_get_empty_nodes(self): self.assertEqual(self.network.get_nodes(), {}) def test_add_node(self): name, lat, lon", "with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat': 5, 'lon': 5}])", "def setUp(self): self.name = \"network\" self.wireless_connection_type = \"Log2Degradation\" self.midhaul_qos =", "'3.0ms', 'deviation': '1.0ms'}, 'bandwidth': '100.0mbps', 'error_rate': '1.0%'} self.midhaul_qos = {'latency':", "class TestBaseLog2Degradation(unittest.TestCase): def setUp(self): self.name = \"network\" self.wireless_connection_type = \"Log2Degradation\"", "self.parameters) def test_get_qos(self): self.assertEqual(self.network.get_backhaul(), QoS(self.backhaul_qos)) def test_set_qos(self): self.network.set_backhaul(QoS.minimum_qos_dict) self.assertEqual(self.network.get_backhaul(), QoS(QoS.minimum_qos_dict))", "FunctionalDegradation from networks.slicing import SliceConceptualGraph from utils.location import Location class", "33, 40 self.network.set_RU(lat, lon) with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.set_RUs([{'lat': 10, 'lon': 10},", "self.parameters.get('best_qos')) def test_get_node_location(self): lat, lon = 33, 40 self.network.set_RU(lat, lon)", "'1.0ms'}, 'bandwidth': '100.0mbps', 'error_rate': '1.0%'} self.parameters = dict( best_qos={'latency': {'delay':", "self.network.add_node('node', 33, 40) def test_get_empty_RUs(self): self.assertEqual(self.network.get_RUs(), {}) def test_set_basetastion(self): lat,", "'deviation': '2.0ms'}, 'bandwidth': '10.0mbps', 'error_rate': '1.0%'}, worst_qos={'latency': {'delay': '100.0ms', 'deviation':", "= SliceConceptualGraph(self.name, self.midhaul_qos, self.backhaul_qos, self.parameters) def test_creation(self): self.assertEqual(self.network.get_name(), \"network\") def", "'10.0mbps', 'error_rate': '1.0%'}, worst_qos={'latency': {'delay': '100.0ms', 'deviation': '20.0ms'}, 'bandwidth': '5.0mbps',", "lat, lon = 33, 40 self.network.set_RU(lat, lon) self.network.add_node(name, lat, lon)", "lon = 33, 40 self.network.set_RU(lat, lon) self.network.add_node(name, lat, lon) self.assertEqual(self.network.get_nodes(),", "best_qos={'latency': {'delay': '5.0ms', 'deviation': '2.0ms'}, 'bandwidth': '10.0mbps', 'error_rate': '1.0%'}, worst_qos={'latency':", "20, 20) self.assertEqual(self.network.get_node_location('destination1'), Location(20, 20)) with self.assertRaises(Location.LocationException): self.network.set_node_location('destination1', 'test', 20)", "= 'node', 33, 40 lat, lon = 33, 40 self.network.set_RU(lat,", "{}, self.backhaul_qos, {}) SliceConceptualGraph('test', {}, {}, self.parameters) def test_get_qos(self): self.assertEqual(self.network.get_backhaul(),", "self.midhaul_qos, self.backhaul_qos, self.parameters) def test_creation(self): self.assertEqual(self.network.get_name(), \"network\") def test_get_empty_nodes(self): self.assertEqual(self.network.get_nodes(),", "unittest from networks.QoS import QoS from networks.connections.mathematical_connections import FunctionalDegradation from", "'100.0mbps', 'error_rate': '1.0%'} self.midhaul_qos = {'latency': {'delay': '3.0ms', 'deviation': '1.0ms'},", "with self.assertRaises(Location.LocationException): self.network.set_node_location('destination1', 'test', 20) with self.assertRaises(Location.LocationException): self.network.set_node_location('destination1', 20, 'test')", "lon = 'node', 33, 40 lat, lon = 33, 40", "'3.0ms', 'deviation': '1.0ms'}, 'bandwidth': '100.0mbps', 'error_rate': '1.0%'} self.parameters = dict(", "networks.connections.mathematical_connections import FunctionalDegradation from networks.slicing import SliceConceptualGraph from utils.location import", "{}, {}) SliceConceptualGraph('test', {}, self.backhaul_qos, {}) SliceConceptualGraph('test', {}, {}, self.parameters)", "self.backhaul_qos = {'latency': {'delay': '3.0ms', 'deviation': '1.0ms'}, 'bandwidth': '100.0mbps', 'error_rate':", "def test_constructor(self): with self.assertRaises(FunctionalDegradation.FunctionDegradationNetworkException): SliceConceptualGraph('test', {} ,{}, {}) SliceConceptualGraph('test', self.midhaul_qos,", "'100.0mbps', 'error_rate': '1.0%'} self.parameters = dict( best_qos={'latency': {'delay': '5.0ms', 'deviation':", "self.backhaul_qos, {}) SliceConceptualGraph('test', {}, {}, self.parameters) def test_get_qos(self): self.assertEqual(self.network.get_backhaul(), QoS(self.backhaul_qos))", "test_get_node_location(self): lat, lon = 33, 40 self.network.set_RU(lat, lon) self.network.add_node('test', 10,", "utils.location import Location class TestBaseStationLinear(unittest.TestCase): def setUp(self): self.name = \"network\"", "5, 'lon': 5}]) self.assertEqual(self.network.get_RUs(), {'10-10': Location(**{'lat': 10, 'lon': 10}), '5-5':", "SliceConceptualGraph('test', {}, {}, {}) SliceConceptualGraph('test', self.midhaul_qos, {}, {}) SliceConceptualGraph('test', {},", "self.parameters) def test_creation(self): self.assertEqual(self.network.get_name(), \"network\") def test_get_empty_nodes(self): self.assertEqual(self.network.get_nodes(), {}) def", "lat, lon = 33, 40 self.network.set_RU(lat, lon) self.assertEqual(self.network.get_RUs(), {f'{lat}-{lon}': Location(lat,", "40) def test_get_empty_RUs(self): self.assertEqual(self.network.get_RUs(), {}) def test_set_basetastion(self): lat, lon =", "test_qos_from_distance(self): self.assertEqual(self.network.get_qos_from(5).get_formated_qos(), self.parameters.get('worst_qos')) self.assertEqual(self.network.get_qos_from(0.0).get_formated_qos(), self.parameters.get('best_qos')) def test_get_node_location(self): lat, lon =", "10, 'lon': 10}, {'lat': 5, 'lon': 5}]) self.assertEqual(self.network.get_RUs(), {'10-10': Location(**{'lat':", "40 with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.add_node(name, lat, lon) self.network.set_RU(33, 40, 0) self.network.add_node(name,", "20) def test_set_RUs(self): self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat': 5, 'lon':", "0) self.network.add_node(name, lat, lon) self.assertEqual(self.network.get_nodes(), {'node': Location(lat, lon)}) with self.assertRaises(SliceConceptualGraph.NetworkSliceException):", "{'delay': '5.0ms', 'deviation': '2.0ms'}, 'bandwidth': '10.0mbps', 'error_rate': '1.0%'}, worst_qos={'latency': {'delay':", "{'latency': {'delay': '3.0ms', 'deviation': '1.0ms'}, 'bandwidth': '100.0mbps', 'error_rate': '1.0%'} self.backhaul_qos", "10, 10) self.network.set_node_location('destination1', 20, 20) self.assertEqual(self.network.get_node_location('destination1'), Location(20, 20)) with self.assertRaises(Location.LocationException):", "{'node': Location(lat, lon)}) with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.add_node('node', 33, 40) def test_get_empty_RUs(self):", "test_set_qos(self): self.network.set_backhaul(QoS.minimum_qos_dict) self.assertEqual(self.network.get_backhaul(), QoS(QoS.minimum_qos_dict)) def test_qos_from_distance(self): self.assertEqual(self.network.get_qos_from(5).get_formated_qos(), self.parameters.get('worst_qos')) self.assertEqual(self.network.get_qos_from(0.0).get_formated_qos(), self.parameters.get('best_qos'))", "'lon': 10}, {'lat': 5, 'lon': 5}]) self.assertEqual(self.network.get_RUs(), {'10-10': Location(**{'lat': 10,", "test_get_qos(self): self.assertEqual(self.network.get_backhaul(), QoS(self.backhaul_qos)) def test_set_qos(self): self.network.set_backhaul(QoS.minimum_qos_dict) self.assertEqual(self.network.get_backhaul(), QoS(QoS.minimum_qos_dict)) def test_qos_from_distance(self):", "self.assertEqual(self.network.get_node_location('destination1'), Location(20, 20)) with self.assertRaises(Location.LocationException): self.network.set_node_location('destination1', 'test', 20) with self.assertRaises(Location.LocationException):", "def setUp(self): self.name = \"network\" self.wireless_connection_type = \"LinearDegradation\" self.backhaul_qos =", "5, 'lon': 5})}) with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat':", "{'delay': '3.0ms', 'deviation': '1.0ms'}, 'bandwidth': '100.0mbps', 'error_rate': '1.0%'} self.parameters =", "lat, lon = 33, 40 self.network.set_RU(lat, lon) self.network.add_node('destination1', 10, 10)", "'bandwidth': '5.0mbps', 'error_rate': '2.0%'}, radius=\"5km\") self.network = SliceConceptualGraph(self.name, self.midhaul_qos, self.backhaul_qos,", "self.assertEqual(self.network.get_name(), \"network\") def test_get_empty_nodes(self): self.assertEqual(self.network.get_nodes(), {}) def test_add_node(self): name, lat,", "from networks.connections.mathematical_connections import FunctionalDegradation from networks.slicing import SliceConceptualGraph from utils.location", "Location(10, 10)) def test_set_RUs(self): self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat': 5,", "def test_creation(self): self.assertEqual(self.network.get_name(), \"network\") def test_get_empty_nodes(self): self.assertEqual(self.network.get_nodes(), {}) def test_add_node(self):", "with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.add_node('node', 33, 40) def test_get_empty_RUs(self): self.assertEqual(self.network.get_RUs(), {}) def", "Location(lat, lon)}) with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.add_node('node', 33, 40) def test_get_empty_RUs(self): self.assertEqual(self.network.get_RUs(),", "'deviation': '1.0ms'}, 'bandwidth': '100.0mbps', 'error_rate': '1.0%'} self.midhaul_qos = {'latency': {'delay':", "5, 'lon': 5}]) def test_set_node_location(self): lat, lon = 33, 40", "'error_rate': '1.0%'} self.parameters = dict( best_qos={'latency': {'delay': '5.0ms', 'deviation': '2.0ms'},", "import QoS from networks.connections.mathematical_connections import FunctionalDegradation from networks.slicing import SliceConceptualGraph", "self.parameters = dict( best_qos={'latency': {'delay': '5.0ms', 'deviation': '2.0ms'}, 'bandwidth': '10.0mbps',", "40 self.network.set_RU(lat, lon) self.network.add_node(name, lat, lon) self.assertEqual(self.network.get_nodes(), {'node': Location(lat, lon)})", "10) self.network.set_node_location('destination1', 20, 20) self.assertEqual(self.network.get_node_location('destination1'), Location(20, 20)) with self.assertRaises(Location.LocationException): self.network.set_node_location('destination1',", "self.assertRaises(Location.LocationException): self.network.set_node_location('destination1', 'test', 20) with self.assertRaises(Location.LocationException): self.network.set_node_location('destination1', 20, 'test') class", "{'lat': 5, 'lon': 5}]) def test_set_node_location(self): lat, lon = 33,", "'test', 20) with self.assertRaises(Location.LocationException): self.network.set_node_location('destination1', 20, 'test') class TestBaseLog2Degradation(unittest.TestCase): def", "10)) def test_set_RUs(self): self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat': 5, 'lon':", "self.assertEqual(self.network.get_nodes(), {'node': Location(lat, lon)}) with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.add_node('node', 33, 40) def", "{}) def test_set_basetastion(self): lat, lon = 33, 40 self.network.set_RU(lat, lon)", "= 33, 40 self.network.set_RU(lat, lon) self.network.add_node('test', 10, 10) self.assertEqual(self.network.get_node_location('test2'), None)", "def test_set_RUs(self): self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat': 5, 'lon': 5}])", "test_set_node_location(self): lat, lon = 33, 40 self.network.set_RU(lat, lon) self.network.add_node('destination1', 10,", "10, 10) self.network.add_node('destination1', 10, 10) self.network.add_node('destination2', 20, 20) def test_set_RUs(self):", "'5.0ms', 'deviation': '2.0ms'}, 'bandwidth': '10.0mbps', 'error_rate': '1.0%'}, worst_qos={'latency': {'delay': '100.0ms',", "import FunctionalDegradation from networks.slicing import SliceConceptualGraph from utils.location import Location", "TestBaseLog2Degradation(unittest.TestCase): def setUp(self): self.name = \"network\" self.wireless_connection_type = \"Log2Degradation\" self.midhaul_qos", "'deviation': '1.0ms'}, 'bandwidth': '100.0mbps', 'error_rate': '1.0%'} self.backhaul_qos = {'latency': {'delay':", "40, 0) self.network.add_node(name, lat, lon) self.assertEqual(self.network.get_nodes(), {'node': Location(lat, lon)}) with", "SliceConceptualGraph('test', {} ,{}, {}) SliceConceptualGraph('test', self.midhaul_qos, {}, {}) SliceConceptualGraph('test', {},", "40 self.network.set_RU(lat, lon) self.assertEqual(self.network.get_RUs(), {f'{lat}-{lon}': Location(lat, lon)}) with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.set_RU(lat,", "20) self.assertEqual(self.network.get_node_location('destination1'), Location(20, 20)) with self.assertRaises(Location.LocationException): self.network.set_node_location('destination1', 'test', 20) with", "self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat': 5, 'lon': 5}]) def test_set_node_location(self):", "'lon': 5})}) with self.assertRaises(SliceConceptualGraph.NetworkSliceException): self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat': 5,", "20, 'test') class TestBaseLog2Degradation(unittest.TestCase): def setUp(self): self.name = \"network\" self.wireless_connection_type", "10)) def test_has_to_pass_through_backhaul(self): self.network.set_RU(10, 10) self.network.set_RU(20, 20) self.network.add_node('source1', 10, 10)", "'test') class TestBaseLog2Degradation(unittest.TestCase): def setUp(self): self.name = \"network\" self.wireless_connection_type =", "self.assertEqual(self.network.get_qos_from(5).get_formated_qos(), self.parameters.get('worst_qos')) self.assertEqual(self.network.get_qos_from(0.0).get_formated_qos(), self.parameters.get('best_qos')) def test_get_node_location(self): lat, lon = 33,", "'2.0ms'}, 'bandwidth': '10.0mbps', 'error_rate': '1.0%'}, worst_qos={'latency': {'delay': '100.0ms', 'deviation': '20.0ms'},", "self.network.set_node_location('destination1', 20, 20) self.assertEqual(self.network.get_node_location('destination1'), Location(20, 20)) with self.assertRaises(Location.LocationException): self.network.set_node_location('destination1', 'test',", "5, 'lon': 5})}) lat, lon = 33, 40 self.network.set_RU(lat, lon)", "= 33, 40 self.network.set_RU(lat, lon) self.assertEqual(self.network.get_RUs(), {f'{lat}-{lon}': Location(lat, lon)}) with", "self.parameters.get('worst_qos')) self.assertEqual(self.network.get_qos_from(0.0).get_formated_qos(), self.parameters.get('best_qos')) def test_get_node_location(self): lat, lon = 33, 40", "None) self.assertEqual(self.network.get_node_location('test'), Location(10, 10)) def test_set_RUs(self): self.network.set_RUs([{'lat': 10, 'lon': 10},", "self.assertEqual(self.network.get_node_location('test'), Location(10, 10)) def test_set_RUs(self): self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat':", "10}, {'lat': 5, 'lon': 5}]) self.assertEqual(self.network.get_RUs(), {'10-10': Location(**{'lat': 10, 'lon':", "self.name = \"network\" self.wireless_connection_type = \"Log2Degradation\" self.midhaul_qos = {'latency': {'delay':", "test_set_basetastion(self): lat, lon = 33, 40 self.network.set_RU(lat, lon) self.assertEqual(self.network.get_RUs(), {f'{lat}-{lon}':", "'deviation': '20.0ms'}, 'bandwidth': '5.0mbps', 'error_rate': '2.0%'}, radius=\"5km\") self.network = SliceConceptualGraph(self.name,", "networks.slicing import SliceConceptualGraph from utils.location import Location class TestBaseStationLinear(unittest.TestCase): def" ]
[ "----') models.Inventory.objects.create(name='Example', hosts_file='web_nornir/nornir_config/example_config/hosts.yaml', groups_file='web_nornir/nornir_config/example_config/groups.yaml', type=1) models.Inventory.objects.create(name='INS Lab', hosts_file='web_nornir/nornir_config/inslab_config/hosts.yaml', groups_file='web_nornir/nornir_config/inslab_config/groups.yaml', type=1)", "models.Inventory.objects.create(name='INS Lab', hosts_file='web_nornir/nornir_config/inslab_config/hosts.yaml', groups_file='web_nornir/nornir_config/inslab_config/groups.yaml', type=1) print('---- Creating Job Templates ----')", "models.JobTemplate.objects.create(name='Ping Device', description='Pings a chosen network device and reports if", "int br', file_name='get_interfaces.py', created_by_id=1) models.JobTemplate.objects.create(name='Ping Device', description='Pings a chosen network", "a chosen network device and reports if reachable', file_name='ping.py', variables=['target'],", "description='This prints a hello world', file_name='hello_world.py', created_by_id=1) models.JobTemplate.objects.create(name='Get CDP Neighbors',", "of INS lab', created_by_id=2, template_id=2, inventory_id=2) models.Task.objects.create(name='Get interfaces of INS", "for tests \"\"\" from django.contrib.auth.hashers import make_password from django.contrib.auth.models import", "type=1) models.Inventory.objects.create(name='INS Lab', hosts_file='web_nornir/nornir_config/inslab_config/hosts.yaml', groups_file='web_nornir/nornir_config/inslab_config/groups.yaml', type=1) print('---- Creating Job Templates", "Group.objects.get(name='netadmin') netadmin.user_set.add(norbert) support = Group.objects.get(name='support') support.user_set.add(stefan) print('---- Creating Inventory ----')", "file_name='get_interfaces.py', created_by_id=1) models.JobTemplate.objects.create(name='Ping Device', description='Pings a chosen network device and", "if reachable', file_name='ping.py', variables=['target'], created_by_id=1) models.JobTemplate.objects.create(name='Get Configuration', description='Gets all configuration", "password=make_password('<PASSWORD>')) norbert = User.objects.get(username='norbert') User.objects.get_or_create(username='stefan', password=make_password('<PASSWORD>')) stefan = User.objects.get(username='stefan') superuser", "hello world', file_name='hello_world.py', created_by_id=1) models.JobTemplate.objects.create(name='Get CDP Neighbors', description='Lists all CDP", "norbert = User.objects.get(username='norbert') User.objects.get_or_create(username='stefan', password=make_password('<PASSWORD>')) stefan = User.objects.get(username='stefan') superuser =", "with example data for tests \"\"\" from django.contrib.auth.hashers import make_password", "created_by_id=1) models.JobTemplate.objects.create(name='Get Interfaces', description='Gets brief information about all interfaces, sh", "models.Task.objects.create(name='Get CDP neighbors of INS lab', created_by_id=2, template_id=2, inventory_id=2) models.Task.objects.create(name='Get", "print('---- Creating Tasks ----') models.Task.objects.create(name='Get Hello World', created_by_id=1, template_id=1, inventory_id=1)", "information about all interfaces, sh ip int br', file_name='get_interfaces.py', created_by_id=1)", "interfaces, sh ip int br', file_name='get_interfaces.py', created_by_id=1) models.JobTemplate.objects.create(name='Ping Device', description='Pings", "file_name='hello_world.py', created_by_id=1) models.JobTemplate.objects.create(name='Get CDP Neighbors', description='Lists all CDP neighbors', file_name='get_cdp_neighbors.py',", "tests' def handle(self, *args, **options): print('---- Creating Users ----') User.objects.get_or_create(username='thomastest',", "description='Gets brief information about all interfaces, sh ip int br',", "Creating Inventory ----') models.Inventory.objects.create(name='Example', hosts_file='web_nornir/nornir_config/example_config/hosts.yaml', groups_file='web_nornir/nornir_config/example_config/groups.yaml', type=1) models.Inventory.objects.create(name='INS Lab', hosts_file='web_nornir/nornir_config/inslab_config/hosts.yaml',", "tests \"\"\" from django.contrib.auth.hashers import make_password from django.contrib.auth.models import User,", "from django.contrib.auth.models import User, Group from django.core.management.base import BaseCommand from", "sh ip int br', file_name='get_interfaces.py', created_by_id=1) models.JobTemplate.objects.create(name='Ping Device', description='Pings a", "User.objects.get(username='norbert') User.objects.get_or_create(username='stefan', password=make_password('<PASSWORD>')) stefan = User.objects.get(username='stefan') superuser = Group.objects.get(name='superuser') superuser.user_set.add(thomas)", "DB with example data for tests \"\"\" from django.contrib.auth.hashers import", "Hello World', created_by_id=1, template_id=1, inventory_id=1) models.Task.objects.create(name='Get CDP neighbors of INS", "Neighbors', description='Lists all CDP neighbors', file_name='get_cdp_neighbors.py', created_by_id=1) models.JobTemplate.objects.create(name='Get Interfaces', description='Gets", "neighbors of INS lab', created_by_id=2, template_id=2, inventory_id=2) models.Task.objects.create(name='Get interfaces of", "class Command(BaseCommand): help = 'Setup DB with example data for", "= Group.objects.get(name='superuser') superuser.user_set.add(thomas) netadmin = Group.objects.get(name='netadmin') netadmin.user_set.add(norbert) support = Group.objects.get(name='support')", "device', file_name='get_configuration.py', created_by_id=1) print('---- Creating Tasks ----') models.Task.objects.create(name='Get Hello World',", "= User.objects.get(username='stefan') superuser = Group.objects.get(name='superuser') superuser.user_set.add(thomas) netadmin = Group.objects.get(name='netadmin') netadmin.user_set.add(norbert)", "data for tests' def handle(self, *args, **options): print('---- Creating Users", "brief information about all interfaces, sh ip int br', file_name='get_interfaces.py',", "BaseCommand from api import models class Command(BaseCommand): help = 'Setup", "hosts_file='web_nornir/nornir_config/example_config/hosts.yaml', groups_file='web_nornir/nornir_config/example_config/groups.yaml', type=1) models.Inventory.objects.create(name='INS Lab', hosts_file='web_nornir/nornir_config/inslab_config/hosts.yaml', groups_file='web_nornir/nornir_config/inslab_config/groups.yaml', type=1) print('---- Creating", "groups_file='web_nornir/nornir_config/inslab_config/groups.yaml', type=1) print('---- Creating Job Templates ----') models.JobTemplate.objects.create(name='hello_world', description='This prints", "CDP neighbors', file_name='get_cdp_neighbors.py', created_by_id=1) models.JobTemplate.objects.create(name='Get Interfaces', description='Gets brief information about", "Setup DB with example data for tests \"\"\" from django.contrib.auth.hashers", "about all interfaces, sh ip int br', file_name='get_interfaces.py', created_by_id=1) models.JobTemplate.objects.create(name='Ping", "= 'Setup DB with example data for tests' def handle(self,", "models.JobTemplate.objects.create(name='Get Interfaces', description='Gets brief information about all interfaces, sh ip", "Device', description='Pings a chosen network device and reports if reachable',", "django.contrib.auth.models import User, Group from django.core.management.base import BaseCommand from api", "*args, **options): print('---- Creating Users ----') User.objects.get_or_create(username='thomastest', password=make_password('<PASSWORD>')) thomas =", "support.user_set.add(stefan) print('---- Creating Inventory ----') models.Inventory.objects.create(name='Example', hosts_file='web_nornir/nornir_config/example_config/hosts.yaml', groups_file='web_nornir/nornir_config/example_config/groups.yaml', type=1) models.Inventory.objects.create(name='INS", "prints a hello world', file_name='hello_world.py', created_by_id=1) models.JobTemplate.objects.create(name='Get CDP Neighbors', description='Lists", "import BaseCommand from api import models class Command(BaseCommand): help =", "Interfaces', description='Gets brief information about all interfaces, sh ip int", "inventory_id=2) models.Task.objects.create(name='Get interfaces of INS lab', created_by_id=2, template_id=3, inventory_id=2) print('----", "device and reports if reachable', file_name='ping.py', variables=['target'], created_by_id=1) models.JobTemplate.objects.create(name='Get Configuration',", "Group from django.core.management.base import BaseCommand from api import models class", "django.contrib.auth.hashers import make_password from django.contrib.auth.models import User, Group from django.core.management.base", "import User, Group from django.core.management.base import BaseCommand from api import", "data for tests \"\"\" from django.contrib.auth.hashers import make_password from django.contrib.auth.models", "from api import models class Command(BaseCommand): help = 'Setup DB", "file_name='ping.py', variables=['target'], created_by_id=1) models.JobTemplate.objects.create(name='Get Configuration', description='Gets all configuration from device',", "User.objects.get(username='stefan') superuser = Group.objects.get(name='superuser') superuser.user_set.add(thomas) netadmin = Group.objects.get(name='netadmin') netadmin.user_set.add(norbert) support", "all configuration from device', file_name='get_configuration.py', created_by_id=1) print('---- Creating Tasks ----')", "Configuration', description='Gets all configuration from device', file_name='get_configuration.py', created_by_id=1) print('---- Creating", "chosen network device and reports if reachable', file_name='ping.py', variables=['target'], created_by_id=1)", "interfaces of INS lab', created_by_id=2, template_id=3, inventory_id=2) print('---- ALL DONE!!", "network device and reports if reachable', file_name='ping.py', variables=['target'], created_by_id=1) models.JobTemplate.objects.create(name='Get", "Command(BaseCommand): help = 'Setup DB with example data for tests'", "netadmin = Group.objects.get(name='netadmin') netadmin.user_set.add(norbert) support = Group.objects.get(name='support') support.user_set.add(stefan) print('---- Creating", "Creating Users ----') User.objects.get_or_create(username='thomastest', password=make_password('<PASSWORD>')) thomas = User.objects.get(username='thomastest') User.objects.get_or_create(username='norbert', password=make_password('<PASSWORD>'))", "= Group.objects.get(name='support') support.user_set.add(stefan) print('---- Creating Inventory ----') models.Inventory.objects.create(name='Example', hosts_file='web_nornir/nornir_config/example_config/hosts.yaml', groups_file='web_nornir/nornir_config/example_config/groups.yaml',", "Group.objects.get(name='support') support.user_set.add(stefan) print('---- Creating Inventory ----') models.Inventory.objects.create(name='Example', hosts_file='web_nornir/nornir_config/example_config/hosts.yaml', groups_file='web_nornir/nornir_config/example_config/groups.yaml', type=1)", "\"\"\" from django.contrib.auth.hashers import make_password from django.contrib.auth.models import User, Group", "import make_password from django.contrib.auth.models import User, Group from django.core.management.base import", "----') models.JobTemplate.objects.create(name='hello_world', description='This prints a hello world', file_name='hello_world.py', created_by_id=1) models.JobTemplate.objects.create(name='Get", "= User.objects.get(username='thomastest') User.objects.get_or_create(username='norbert', password=make_password('<PASSWORD>')) norbert = User.objects.get(username='norbert') User.objects.get_or_create(username='stefan', password=make_password('<PASSWORD>')) stefan", "models.JobTemplate.objects.create(name='Get CDP Neighbors', description='Lists all CDP neighbors', file_name='get_cdp_neighbors.py', created_by_id=1) models.JobTemplate.objects.create(name='Get", "description='Gets all configuration from device', file_name='get_configuration.py', created_by_id=1) print('---- Creating Tasks", "created_by_id=1) models.JobTemplate.objects.create(name='Get Configuration', description='Gets all configuration from device', file_name='get_configuration.py', created_by_id=1)", "and reports if reachable', file_name='ping.py', variables=['target'], created_by_id=1) models.JobTemplate.objects.create(name='Get Configuration', description='Gets", "CDP Neighbors', description='Lists all CDP neighbors', file_name='get_cdp_neighbors.py', created_by_id=1) models.JobTemplate.objects.create(name='Get Interfaces',", "groups_file='web_nornir/nornir_config/example_config/groups.yaml', type=1) models.Inventory.objects.create(name='INS Lab', hosts_file='web_nornir/nornir_config/inslab_config/hosts.yaml', groups_file='web_nornir/nornir_config/inslab_config/groups.yaml', type=1) print('---- Creating Job", "Users ----') User.objects.get_or_create(username='thomastest', password=make_password('<PASSWORD>')) thomas = User.objects.get(username='thomastest') User.objects.get_or_create(username='norbert', password=make_password('<PASSWORD>')) norbert", "print('---- Creating Inventory ----') models.Inventory.objects.create(name='Example', hosts_file='web_nornir/nornir_config/example_config/hosts.yaml', groups_file='web_nornir/nornir_config/example_config/groups.yaml', type=1) models.Inventory.objects.create(name='INS Lab',", "\"\"\" Setup DB with example data for tests \"\"\" from", "models class Command(BaseCommand): help = 'Setup DB with example data", "def handle(self, *args, **options): print('---- Creating Users ----') User.objects.get_or_create(username='thomastest', password=make_password('<PASSWORD>'))", "all interfaces, sh ip int br', file_name='get_interfaces.py', created_by_id=1) models.JobTemplate.objects.create(name='Ping Device',", "inventory_id=1) models.Task.objects.create(name='Get CDP neighbors of INS lab', created_by_id=2, template_id=2, inventory_id=2)", "lab', created_by_id=2, template_id=2, inventory_id=2) models.Task.objects.create(name='Get interfaces of INS lab', created_by_id=2,", "from django.contrib.auth.hashers import make_password from django.contrib.auth.models import User, Group from", "= User.objects.get(username='norbert') User.objects.get_or_create(username='stefan', password=make_password('<PASSWORD>')) stefan = User.objects.get(username='stefan') superuser = Group.objects.get(name='superuser')", "file_name='get_configuration.py', created_by_id=1) print('---- Creating Tasks ----') models.Task.objects.create(name='Get Hello World', created_by_id=1,", "User.objects.get(username='thomastest') User.objects.get_or_create(username='norbert', password=make_password('<PASSWORD>')) norbert = User.objects.get(username='norbert') User.objects.get_or_create(username='stefan', password=make_password('<PASSWORD>')) stefan =", "CDP neighbors of INS lab', created_by_id=2, template_id=2, inventory_id=2) models.Task.objects.create(name='Get interfaces", "reachable', file_name='ping.py', variables=['target'], created_by_id=1) models.JobTemplate.objects.create(name='Get Configuration', description='Gets all configuration from", "created_by_id=1) models.JobTemplate.objects.create(name='Get CDP Neighbors', description='Lists all CDP neighbors', file_name='get_cdp_neighbors.py', created_by_id=1)", "----') models.Task.objects.create(name='Get Hello World', created_by_id=1, template_id=1, inventory_id=1) models.Task.objects.create(name='Get CDP neighbors", "Creating Tasks ----') models.Task.objects.create(name='Get Hello World', created_by_id=1, template_id=1, inventory_id=1) models.Task.objects.create(name='Get", "reports if reachable', file_name='ping.py', variables=['target'], created_by_id=1) models.JobTemplate.objects.create(name='Get Configuration', description='Gets all", "configuration from device', file_name='get_configuration.py', created_by_id=1) print('---- Creating Tasks ----') models.Task.objects.create(name='Get", "models.JobTemplate.objects.create(name='hello_world', description='This prints a hello world', file_name='hello_world.py', created_by_id=1) models.JobTemplate.objects.create(name='Get CDP", "example data for tests \"\"\" from django.contrib.auth.hashers import make_password from", "'Setup DB with example data for tests' def handle(self, *args,", "models.Inventory.objects.create(name='Example', hosts_file='web_nornir/nornir_config/example_config/hosts.yaml', groups_file='web_nornir/nornir_config/example_config/groups.yaml', type=1) models.Inventory.objects.create(name='INS Lab', hosts_file='web_nornir/nornir_config/inslab_config/hosts.yaml', groups_file='web_nornir/nornir_config/inslab_config/groups.yaml', type=1) print('----", "print('---- Creating Users ----') User.objects.get_or_create(username='thomastest', password=make_password('<PASSWORD>')) thomas = User.objects.get(username='thomastest') User.objects.get_or_create(username='norbert',", "World', created_by_id=1, template_id=1, inventory_id=1) models.Task.objects.create(name='Get CDP neighbors of INS lab',", "variables=['target'], created_by_id=1) models.JobTemplate.objects.create(name='Get Configuration', description='Gets all configuration from device', file_name='get_configuration.py',", "django.core.management.base import BaseCommand from api import models class Command(BaseCommand): help", "help = 'Setup DB with example data for tests' def", "models.Task.objects.create(name='Get interfaces of INS lab', created_by_id=2, template_id=3, inventory_id=2) print('---- ALL", "type=1) print('---- Creating Job Templates ----') models.JobTemplate.objects.create(name='hello_world', description='This prints a", "INS lab', created_by_id=2, template_id=2, inventory_id=2) models.Task.objects.create(name='Get interfaces of INS lab',", "world', file_name='hello_world.py', created_by_id=1) models.JobTemplate.objects.create(name='Get CDP Neighbors', description='Lists all CDP neighbors',", "password=make_password('<PASSWORD>')) thomas = User.objects.get(username='thomastest') User.objects.get_or_create(username='norbert', password=make_password('<PASSWORD>')) norbert = User.objects.get(username='norbert') User.objects.get_or_create(username='stefan',", "neighbors', file_name='get_cdp_neighbors.py', created_by_id=1) models.JobTemplate.objects.create(name='Get Interfaces', description='Gets brief information about all", "created_by_id=1, template_id=1, inventory_id=1) models.Task.objects.create(name='Get CDP neighbors of INS lab', created_by_id=2,", "----') User.objects.get_or_create(username='thomastest', password=make_password('<PASSWORD>')) thomas = User.objects.get(username='thomastest') User.objects.get_or_create(username='norbert', password=make_password('<PASSWORD>')) norbert =", "import models class Command(BaseCommand): help = 'Setup DB with example", "ip int br', file_name='get_interfaces.py', created_by_id=1) models.JobTemplate.objects.create(name='Ping Device', description='Pings a chosen", "User.objects.get_or_create(username='thomastest', password=make_password('<PASSWORD>')) thomas = User.objects.get(username='thomastest') User.objects.get_or_create(username='norbert', password=make_password('<PASSWORD>')) norbert = User.objects.get(username='norbert')", "= Group.objects.get(name='netadmin') netadmin.user_set.add(norbert) support = Group.objects.get(name='support') support.user_set.add(stefan) print('---- Creating Inventory", "Lab', hosts_file='web_nornir/nornir_config/inslab_config/hosts.yaml', groups_file='web_nornir/nornir_config/inslab_config/groups.yaml', type=1) print('---- Creating Job Templates ----') models.JobTemplate.objects.create(name='hello_world',", "Templates ----') models.JobTemplate.objects.create(name='hello_world', description='This prints a hello world', file_name='hello_world.py', created_by_id=1)", "Creating Job Templates ----') models.JobTemplate.objects.create(name='hello_world', description='This prints a hello world',", "for tests' def handle(self, *args, **options): print('---- Creating Users ----')", "Group.objects.get(name='superuser') superuser.user_set.add(thomas) netadmin = Group.objects.get(name='netadmin') netadmin.user_set.add(norbert) support = Group.objects.get(name='support') support.user_set.add(stefan)", "handle(self, *args, **options): print('---- Creating Users ----') User.objects.get_or_create(username='thomastest', password=make_password('<PASSWORD>')) thomas", "br', file_name='get_interfaces.py', created_by_id=1) models.JobTemplate.objects.create(name='Ping Device', description='Pings a chosen network device", "template_id=1, inventory_id=1) models.Task.objects.create(name='Get CDP neighbors of INS lab', created_by_id=2, template_id=2,", "superuser = Group.objects.get(name='superuser') superuser.user_set.add(thomas) netadmin = Group.objects.get(name='netadmin') netadmin.user_set.add(norbert) support =", "description='Lists all CDP neighbors', file_name='get_cdp_neighbors.py', created_by_id=1) models.JobTemplate.objects.create(name='Get Interfaces', description='Gets brief", "superuser.user_set.add(thomas) netadmin = Group.objects.get(name='netadmin') netadmin.user_set.add(norbert) support = Group.objects.get(name='support') support.user_set.add(stefan) print('----", "created_by_id=2, template_id=2, inventory_id=2) models.Task.objects.create(name='Get interfaces of INS lab', created_by_id=2, template_id=3,", "created_by_id=1) models.JobTemplate.objects.create(name='Ping Device', description='Pings a chosen network device and reports", "template_id=2, inventory_id=2) models.Task.objects.create(name='Get interfaces of INS lab', created_by_id=2, template_id=3, inventory_id=2)", "created_by_id=1) print('---- Creating Tasks ----') models.Task.objects.create(name='Get Hello World', created_by_id=1, template_id=1,", "password=make_password('<PASSWORD>')) stefan = User.objects.get(username='stefan') superuser = Group.objects.get(name='superuser') superuser.user_set.add(thomas) netadmin =", "hosts_file='web_nornir/nornir_config/inslab_config/hosts.yaml', groups_file='web_nornir/nornir_config/inslab_config/groups.yaml', type=1) print('---- Creating Job Templates ----') models.JobTemplate.objects.create(name='hello_world', description='This", "a hello world', file_name='hello_world.py', created_by_id=1) models.JobTemplate.objects.create(name='Get CDP Neighbors', description='Lists all", "of INS lab', created_by_id=2, template_id=3, inventory_id=2) print('---- ALL DONE!! ----')", "**options): print('---- Creating Users ----') User.objects.get_or_create(username='thomastest', password=make_password('<PASSWORD>')) thomas = User.objects.get(username='thomastest')", "example data for tests' def handle(self, *args, **options): print('---- Creating", "DB with example data for tests' def handle(self, *args, **options):", "models.JobTemplate.objects.create(name='Get Configuration', description='Gets all configuration from device', file_name='get_configuration.py', created_by_id=1) print('----", "from django.core.management.base import BaseCommand from api import models class Command(BaseCommand):", "User.objects.get_or_create(username='norbert', password=make_password('<PASSWORD>')) norbert = User.objects.get(username='norbert') User.objects.get_or_create(username='stefan', password=make_password('<PASSWORD>')) stefan = User.objects.get(username='stefan')", "from device', file_name='get_configuration.py', created_by_id=1) print('---- Creating Tasks ----') models.Task.objects.create(name='Get Hello", "User.objects.get_or_create(username='stefan', password=make_password('<PASSWORD>')) stefan = User.objects.get(username='stefan') superuser = Group.objects.get(name='superuser') superuser.user_set.add(thomas) netadmin", "make_password from django.contrib.auth.models import User, Group from django.core.management.base import BaseCommand", "models.Task.objects.create(name='Get Hello World', created_by_id=1, template_id=1, inventory_id=1) models.Task.objects.create(name='Get CDP neighbors of", "all CDP neighbors', file_name='get_cdp_neighbors.py', created_by_id=1) models.JobTemplate.objects.create(name='Get Interfaces', description='Gets brief information", "description='Pings a chosen network device and reports if reachable', file_name='ping.py',", "User, Group from django.core.management.base import BaseCommand from api import models", "with example data for tests' def handle(self, *args, **options): print('----", "file_name='get_cdp_neighbors.py', created_by_id=1) models.JobTemplate.objects.create(name='Get Interfaces', description='Gets brief information about all interfaces,", "print('---- Creating Job Templates ----') models.JobTemplate.objects.create(name='hello_world', description='This prints a hello", "support = Group.objects.get(name='support') support.user_set.add(stefan) print('---- Creating Inventory ----') models.Inventory.objects.create(name='Example', hosts_file='web_nornir/nornir_config/example_config/hosts.yaml',", "stefan = User.objects.get(username='stefan') superuser = Group.objects.get(name='superuser') superuser.user_set.add(thomas) netadmin = Group.objects.get(name='netadmin')", "Inventory ----') models.Inventory.objects.create(name='Example', hosts_file='web_nornir/nornir_config/example_config/hosts.yaml', groups_file='web_nornir/nornir_config/example_config/groups.yaml', type=1) models.Inventory.objects.create(name='INS Lab', hosts_file='web_nornir/nornir_config/inslab_config/hosts.yaml', groups_file='web_nornir/nornir_config/inslab_config/groups.yaml',", "netadmin.user_set.add(norbert) support = Group.objects.get(name='support') support.user_set.add(stefan) print('---- Creating Inventory ----') models.Inventory.objects.create(name='Example',", "thomas = User.objects.get(username='thomastest') User.objects.get_or_create(username='norbert', password=make_password('<PASSWORD>')) norbert = User.objects.get(username='norbert') User.objects.get_or_create(username='stefan', password=make_password('<PASSWORD>'))", "Tasks ----') models.Task.objects.create(name='Get Hello World', created_by_id=1, template_id=1, inventory_id=1) models.Task.objects.create(name='Get CDP", "api import models class Command(BaseCommand): help = 'Setup DB with", "Job Templates ----') models.JobTemplate.objects.create(name='hello_world', description='This prints a hello world', file_name='hello_world.py'," ]
[ "distro def get_distro_info(root_dir): # We point _UNIXCONFDIR to root_dir old_value", "LinuxDistribution distro information is done in a lazy way. #", "before we restore the old value of _UNIXCONFDIR. _ =", "the old value of _UNIXCONFDIR. _ = obj.info() distro._UNIXCONFDIR =", "root_dir old_value = distro._UNIXCONFDIR distro._UNIXCONFDIR = os.path.join(root_dir, 'etc') obj =", "of _UNIXCONFDIR. _ = obj.info() distro._UNIXCONFDIR = old_value return obj", "'etc') obj = distro.LinuxDistribution(include_lsb=False, include_uname=False) # NOTE: The parsing of", "get_distro_info(root_dir): # We point _UNIXCONFDIR to root_dir old_value = distro._UNIXCONFDIR", "happen before we restore the old value of _UNIXCONFDIR. _", "in a lazy way. # This will force the parsing", "from __future__ import absolute_import, unicode_literals import os import distro def", "import distro def get_distro_info(root_dir): # We point _UNIXCONFDIR to root_dir", "= distro._UNIXCONFDIR distro._UNIXCONFDIR = os.path.join(root_dir, 'etc') obj = distro.LinuxDistribution(include_lsb=False, include_uname=False)", "parsing of LinuxDistribution distro information is done in a lazy", "= distro.LinuxDistribution(include_lsb=False, include_uname=False) # NOTE: The parsing of LinuxDistribution distro", "to root_dir old_value = distro._UNIXCONFDIR distro._UNIXCONFDIR = os.path.join(root_dir, 'etc') obj", "a lazy way. # This will force the parsing to", "restore the old value of _UNIXCONFDIR. _ = obj.info() distro._UNIXCONFDIR", "of LinuxDistribution distro information is done in a lazy way.", "absolute_import, unicode_literals import os import distro def get_distro_info(root_dir): # We", "to happen before we restore the old value of _UNIXCONFDIR.", "way. # This will force the parsing to happen before", "_UNIXCONFDIR to root_dir old_value = distro._UNIXCONFDIR distro._UNIXCONFDIR = os.path.join(root_dir, 'etc')", "import os import distro def get_distro_info(root_dir): # We point _UNIXCONFDIR", "os.path.join(root_dir, 'etc') obj = distro.LinuxDistribution(include_lsb=False, include_uname=False) # NOTE: The parsing", "import absolute_import, unicode_literals import os import distro def get_distro_info(root_dir): #", "# We point _UNIXCONFDIR to root_dir old_value = distro._UNIXCONFDIR distro._UNIXCONFDIR", "NOTE: The parsing of LinuxDistribution distro information is done in", "force the parsing to happen before we restore the old", "distro._UNIXCONFDIR distro._UNIXCONFDIR = os.path.join(root_dir, 'etc') obj = distro.LinuxDistribution(include_lsb=False, include_uname=False) #", "parsing to happen before we restore the old value of", "obj = distro.LinuxDistribution(include_lsb=False, include_uname=False) # NOTE: The parsing of LinuxDistribution", "os import distro def get_distro_info(root_dir): # We point _UNIXCONFDIR to", "The parsing of LinuxDistribution distro information is done in a", "the parsing to happen before we restore the old value", "include_uname=False) # NOTE: The parsing of LinuxDistribution distro information is", "distro._UNIXCONFDIR = os.path.join(root_dir, 'etc') obj = distro.LinuxDistribution(include_lsb=False, include_uname=False) # NOTE:", "def get_distro_info(root_dir): # We point _UNIXCONFDIR to root_dir old_value =", "# NOTE: The parsing of LinuxDistribution distro information is done", "will force the parsing to happen before we restore the", "point _UNIXCONFDIR to root_dir old_value = distro._UNIXCONFDIR distro._UNIXCONFDIR = os.path.join(root_dir,", "# This will force the parsing to happen before we", "we restore the old value of _UNIXCONFDIR. _ = obj.info()", "= os.path.join(root_dir, 'etc') obj = distro.LinuxDistribution(include_lsb=False, include_uname=False) # NOTE: The", "unicode_literals import os import distro def get_distro_info(root_dir): # We point", "lazy way. # This will force the parsing to happen", "This will force the parsing to happen before we restore", "distro.LinuxDistribution(include_lsb=False, include_uname=False) # NOTE: The parsing of LinuxDistribution distro information", "information is done in a lazy way. # This will", "old value of _UNIXCONFDIR. _ = obj.info() distro._UNIXCONFDIR = old_value", "is done in a lazy way. # This will force", "We point _UNIXCONFDIR to root_dir old_value = distro._UNIXCONFDIR distro._UNIXCONFDIR =", "done in a lazy way. # This will force the", "distro information is done in a lazy way. # This", "value of _UNIXCONFDIR. _ = obj.info() distro._UNIXCONFDIR = old_value return", "old_value = distro._UNIXCONFDIR distro._UNIXCONFDIR = os.path.join(root_dir, 'etc') obj = distro.LinuxDistribution(include_lsb=False,", "__future__ import absolute_import, unicode_literals import os import distro def get_distro_info(root_dir):" ]
[ "2.0 (the \"License\"); # you may not use this file", "\"action chains\" (https://code.google.com/p/selenium/source/browse/py/selenium/webdriver/common/action_chains.py). # These do not quite work for", "[] return self @property def json_wire_gestures(self) -> Dict[str, Union[List, str]]:", "self._touch_actions.append(copy.copy(touch_action)) def perform(self: T) -> T: \"\"\"Perform the actions stored", "self._driver = driver self._element = element self._touch_actions: List['TouchAction'] = []", "*touch_actions: 'TouchAction') -> None: \"\"\"Add TouchAction objects to the MultiAction,", "team implemented something like the Multi Action API in the", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "self._touch_actions = [] self._touch_actions.append(copy.copy(touch_action)) def perform(self: T) -> T: \"\"\"Perform", "import TYPE_CHECKING, Dict, List, Optional, TypeVar, Union from appium.webdriver.mobilecommand import", "next batch self._touch_actions = [] return self @property def json_wire_gestures(self)", "instance \"\"\" self._driver.execute(Command.MULTI_ACTION, self.json_wire_gestures) # clean up and be ready", "| a2.press(el2).move_to(el1).release() | MultiAction(driver).add(a1, a2) Returns: `MultiAction`: Self instance \"\"\"", "more TouchAction objects describing a chain of actions to be", "spec requires. import copy from typing import TYPE_CHECKING, Dict, List,", "be ready for the next batch self._touch_actions = [] return", "self.json_wire_gestures) # clean up and be ready for the next", "limitations under the License. # The Selenium team implemented something", "use this file except in compliance with the License. #", "if TYPE_CHECKING: from appium.webdriver.common.touch_action import TouchAction from appium.webdriver.webdriver import WebDriver", "-> None: self._driver = driver self._element = element self._touch_actions: List['TouchAction']", "performed later. Args: touch_actions: one or more TouchAction objects describing", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "\"\"\"Perform the actions stored in the object. Usage: | a1", "License. # You may obtain a copy of the License", "the object. Usage: | a1 = TouchAction(driver) | a1.press(el1).move_to(el2).release() |", "str]]: actions = [] for action in self._touch_actions: actions.append(action.json_wire_gestures) if", "under the License is distributed on an \"AS IS\" BASIS,", "in the form of # \"action chains\" (https://code.google.com/p/selenium/source/browse/py/selenium/webdriver/common/action_chains.py). # These", "actions to be performed by one finger Usage: | a1", "self._element is not None: return {'actions': actions, 'elementId': self._element.id} return", "License for the specific language governing permissions and # limitations", "chain of actions to be performed by one finger Usage:", "Command if TYPE_CHECKING: from appium.webdriver.common.touch_action import TouchAction from appium.webdriver.webdriver import", "# chaining as the spec requires. import copy from typing", "in the object. Usage: | a1 = TouchAction(driver) | a1.press(el1).move_to(el2).release()", "a2).perform() Returns: `MultiAction`: Self instance \"\"\" self._driver.execute(Command.MULTI_ACTION, self.json_wire_gestures) # clean", "a2.press(el2).move_to(el1).release() | MultiAction(driver).add(a1, a2) Returns: `MultiAction`: Self instance \"\"\" for", "= TouchAction(driver) | a2.press(el2).move_to(el1).release() | MultiAction(driver).add(a1, a2).perform() Returns: `MultiAction`: Self", "or more TouchAction objects describing a chain of actions to", "-> None: \"\"\"Add TouchAction objects to the MultiAction, to be", "performed by one finger Usage: | a1 = TouchAction(driver) |", "License. # The Selenium team implemented something like the Multi", "in compliance with the License. # You may obtain a", "software # distributed under the License is distributed on an", "implemented something like the Multi Action API in the form", "of actions to be performed by one finger Usage: |", "be performed later. Args: touch_actions: one or more TouchAction objects", "self._driver.execute(Command.MULTI_ACTION, self.json_wire_gestures) # clean up and be ready for the", "TouchAction(driver) | a1.press(el1).move_to(el2).release() | a2 = TouchAction(driver) | a2.press(el2).move_to(el1).release() |", "to be performed by one finger Usage: | a1 =", "requires. import copy from typing import TYPE_CHECKING, Dict, List, Optional,", "TouchAction(driver) | a2.press(el2).move_to(el1).release() | MultiAction(driver).add(a1, a2).perform() Returns: `MultiAction`: Self instance", "ad hoc action # chaining as the spec requires. import", "objects to the MultiAction, to be performed later. Args: touch_actions:", "actions.append(action.json_wire_gestures) if self._element is not None: return {'actions': actions, 'elementId':", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "\"\"\" self._driver.execute(Command.MULTI_ACTION, self.json_wire_gestures) # clean up and be ready for", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "under the License. # The Selenium team implemented something like", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "to in writing, software # distributed under the License is", "# See the License for the specific language governing permissions", "language governing permissions and # limitations under the License. #", "of # \"action chains\" (https://code.google.com/p/selenium/source/browse/py/selenium/webdriver/common/action_chains.py). # These do not quite", "or agreed to in writing, software # distributed under the", "required by applicable law or agreed to in writing, software", "be performed by one finger Usage: | a1 = TouchAction(driver)", "# The Selenium team implemented something like the Multi Action", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "The Selenium team implemented something like the Multi Action API", "json_wire_gestures(self) -> Dict[str, Union[List, str]]: actions = [] for action", "with the License. # You may obtain a copy of", "def json_wire_gestures(self) -> Dict[str, Union[List, str]]: actions = [] for", "Dict[str, Union[List, str]]: actions = [] for action in self._touch_actions:", "one or more TouchAction objects describing a chain of actions", "the actions stored in the object. Usage: | a1 =", "a2 = TouchAction(driver) | a2.press(el2).move_to(el1).release() | MultiAction(driver).add(a1, a2) Returns: `MultiAction`:", "compliance with the License. # You may obtain a copy", "Usage: | a1 = TouchAction(driver) | a1.press(el1).move_to(el2).release() | a2 =", "agreed to in writing, software # distributed under the License", "Dict, List, Optional, TypeVar, Union from appium.webdriver.mobilecommand import MobileCommand as", "distributed under the License is distributed on an \"AS IS\"", "TouchAction objects describing a chain of actions to be performed", "express or implied. # See the License for the specific", "except in compliance with the License. # You may obtain", "not allow for ad hoc action # chaining as the", "do not quite work for this situation, and do not", "(https://code.google.com/p/selenium/source/browse/py/selenium/webdriver/common/action_chains.py). # These do not quite work for this situation,", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "not use this file except in compliance with the License.", "something like the Multi Action API in the form of", "def perform(self: T) -> T: \"\"\"Perform the actions stored in", "writing, software # distributed under the License is distributed on", "T = TypeVar('T', bound='MultiAction') class MultiAction: def __init__(self, driver: 'WebDriver',", "None: self._touch_actions = [] self._touch_actions.append(copy.copy(touch_action)) def perform(self: T) -> T:", "you may not use this file except in compliance with", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "'TouchAction') -> None: \"\"\"Add TouchAction objects to the MultiAction, to", "TouchAction(driver) | a2.press(el2).move_to(el1).release() | MultiAction(driver).add(a1, a2) Returns: `MultiAction`: Self instance", "the License. # The Selenium team implemented something like the", "up and be ready for the next batch self._touch_actions =", "a2) Returns: `MultiAction`: Self instance \"\"\" for touch_action in touch_actions:", "form of # \"action chains\" (https://code.google.com/p/selenium/source/browse/py/selenium/webdriver/common/action_chains.py). # These do not", "action in self._touch_actions: actions.append(action.json_wire_gestures) if self._element is not None: return", "stored in the object. Usage: | a1 = TouchAction(driver) |", "CONDITIONS OF ANY KIND, either express or implied. # See", "= [] return self @property def json_wire_gestures(self) -> Dict[str, Union[List,", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "driver self._element = element self._touch_actions: List['TouchAction'] = [] def add(self,", "ready for the next batch self._touch_actions = [] return self", "MultiAction: def __init__(self, driver: 'WebDriver', element: Optional['WebElement'] = None) ->", "a2.press(el2).move_to(el1).release() | MultiAction(driver).add(a1, a2).perform() Returns: `MultiAction`: Self instance \"\"\" self._driver.execute(Command.MULTI_ACTION,", "touch_actions: if self._touch_actions is None: self._touch_actions = [] self._touch_actions.append(copy.copy(touch_action)) def", "= [] self._touch_actions.append(copy.copy(touch_action)) def perform(self: T) -> T: \"\"\"Perform the", "Self instance \"\"\" self._driver.execute(Command.MULTI_ACTION, self.json_wire_gestures) # clean up and be", "OR CONDITIONS OF ANY KIND, either express or implied. #", "= driver self._element = element self._touch_actions: List['TouchAction'] = [] def", "These do not quite work for this situation, and do", "the License is distributed on an \"AS IS\" BASIS, #", "is not None: return {'actions': actions, 'elementId': self._element.id} return {'actions':", "MultiAction(driver).add(a1, a2).perform() Returns: `MultiAction`: Self instance \"\"\" self._driver.execute(Command.MULTI_ACTION, self.json_wire_gestures) #", "Action API in the form of # \"action chains\" (https://code.google.com/p/selenium/source/browse/py/selenium/webdriver/common/action_chains.py).", "TypeVar('T', bound='MultiAction') class MultiAction: def __init__(self, driver: 'WebDriver', element: Optional['WebElement']", "def add(self, *touch_actions: 'TouchAction') -> None: \"\"\"Add TouchAction objects to", "import MobileCommand as Command if TYPE_CHECKING: from appium.webdriver.common.touch_action import TouchAction", "__init__(self, driver: 'WebDriver', element: Optional['WebElement'] = None) -> None: self._driver", "a2 = TouchAction(driver) | a2.press(el2).move_to(el1).release() | MultiAction(driver).add(a1, a2).perform() Returns: `MultiAction`:", "by one finger Usage: | a1 = TouchAction(driver) | a1.press(el1).move_to(el2).release()", "`MultiAction`: Self instance \"\"\" self._driver.execute(Command.MULTI_ACTION, self.json_wire_gestures) # clean up and", "and # limitations under the License. # The Selenium team", "to be performed later. Args: touch_actions: one or more TouchAction", "law or agreed to in writing, software # distributed under", "Union from appium.webdriver.mobilecommand import MobileCommand as Command if TYPE_CHECKING: from", "None: self._driver = driver self._element = element self._touch_actions: List['TouchAction'] =", "Self instance \"\"\" for touch_action in touch_actions: if self._touch_actions is", "Returns: `MultiAction`: Self instance \"\"\" for touch_action in touch_actions: if", "copy from typing import TYPE_CHECKING, Dict, List, Optional, TypeVar, Union", "self._touch_actions is None: self._touch_actions = [] self._touch_actions.append(copy.copy(touch_action)) def perform(self: T)", "[] def add(self, *touch_actions: 'TouchAction') -> None: \"\"\"Add TouchAction objects", "not quite work for this situation, and do not allow", "TYPE_CHECKING, Dict, List, Optional, TypeVar, Union from appium.webdriver.mobilecommand import MobileCommand", "TYPE_CHECKING: from appium.webdriver.common.touch_action import TouchAction from appium.webdriver.webdriver import WebDriver from", "# limitations under the License. # The Selenium team implemented", "MultiAction, to be performed later. Args: touch_actions: one or more", "is None: self._touch_actions = [] self._touch_actions.append(copy.copy(touch_action)) def perform(self: T) ->", "may obtain a copy of the License at # #", "-> Dict[str, Union[List, str]]: actions = [] for action in", "self._element = element self._touch_actions: List['TouchAction'] = [] def add(self, *touch_actions:", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "batch self._touch_actions = [] return self @property def json_wire_gestures(self) ->", "governing permissions and # limitations under the License. # The", "may not use this file except in compliance with the", "instance \"\"\" for touch_action in touch_actions: if self._touch_actions is None:", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "this file except in compliance with the License. # You", "Multi Action API in the form of # \"action chains\"", "-> T: \"\"\"Perform the actions stored in the object. Usage:", "as Command if TYPE_CHECKING: from appium.webdriver.common.touch_action import TouchAction from appium.webdriver.webdriver", "= [] for action in self._touch_actions: actions.append(action.json_wire_gestures) if self._element is", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "# \"action chains\" (https://code.google.com/p/selenium/source/browse/py/selenium/webdriver/common/action_chains.py). # These do not quite work", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "from appium.webdriver.webdriver import WebDriver from appium.webdriver.webelement import WebElement T =", "python # Licensed under the Apache License, Version 2.0 (the", "to the MultiAction, to be performed later. Args: touch_actions: one", "MobileCommand as Command if TYPE_CHECKING: from appium.webdriver.common.touch_action import TouchAction from", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "and be ready for the next batch self._touch_actions = []", "self._touch_actions: actions.append(action.json_wire_gestures) if self._element is not None: return {'actions': actions,", "chaining as the spec requires. import copy from typing import", "actions = [] for action in self._touch_actions: actions.append(action.json_wire_gestures) if self._element", "WebDriver from appium.webdriver.webelement import WebElement T = TypeVar('T', bound='MultiAction') class", "#!/usr/bin/env python # Licensed under the Apache License, Version 2.0", "from typing import TYPE_CHECKING, Dict, List, Optional, TypeVar, Union from", "for action in self._touch_actions: actions.append(action.json_wire_gestures) if self._element is not None:", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "| a1 = TouchAction(driver) | a1.press(el1).move_to(el2).release() | a2 = TouchAction(driver)", "allow for ad hoc action # chaining as the spec", "appium.webdriver.webelement import WebElement T = TypeVar('T', bound='MultiAction') class MultiAction: def", "work for this situation, and do not allow for ad", "or implied. # See the License for the specific language", "List['TouchAction'] = [] def add(self, *touch_actions: 'TouchAction') -> None: \"\"\"Add", "from appium.webdriver.mobilecommand import MobileCommand as Command if TYPE_CHECKING: from appium.webdriver.common.touch_action", "in touch_actions: if self._touch_actions is None: self._touch_actions = [] self._touch_actions.append(copy.copy(touch_action))", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "[] self._touch_actions.append(copy.copy(touch_action)) def perform(self: T) -> T: \"\"\"Perform the actions", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "\"\"\" for touch_action in touch_actions: if self._touch_actions is None: self._touch_actions", "= element self._touch_actions: List['TouchAction'] = [] def add(self, *touch_actions: 'TouchAction')", "hoc action # chaining as the spec requires. import copy", "| MultiAction(driver).add(a1, a2) Returns: `MultiAction`: Self instance \"\"\" for touch_action", "Optional['WebElement'] = None) -> None: self._driver = driver self._element =", "(the \"License\"); # you may not use this file except", "API in the form of # \"action chains\" (https://code.google.com/p/selenium/source/browse/py/selenium/webdriver/common/action_chains.py). #", "# you may not use this file except in compliance", "for this situation, and do not allow for ad hoc", "the form of # \"action chains\" (https://code.google.com/p/selenium/source/browse/py/selenium/webdriver/common/action_chains.py). # These do", "| a2 = TouchAction(driver) | a2.press(el2).move_to(el1).release() | MultiAction(driver).add(a1, a2).perform() Returns:", "the Multi Action API in the form of # \"action", "# # Unless required by applicable law or agreed to", "from appium.webdriver.webelement import WebElement T = TypeVar('T', bound='MultiAction') class MultiAction:", "appium.webdriver.mobilecommand import MobileCommand as Command if TYPE_CHECKING: from appium.webdriver.common.touch_action import", "self._touch_actions = [] return self @property def json_wire_gestures(self) -> Dict[str,", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "Version 2.0 (the \"License\"); # you may not use this", "for ad hoc action # chaining as the spec requires.", "[] for action in self._touch_actions: actions.append(action.json_wire_gestures) if self._element is not", "later. Args: touch_actions: one or more TouchAction objects describing a", "permissions and # limitations under the License. # The Selenium", "a1 = TouchAction(driver) | a1.press(el1).move_to(el2).release() | a2 = TouchAction(driver) |", "implied. # See the License for the specific language governing", "under the Apache License, Version 2.0 (the \"License\"); # you", "\"\"\"Add TouchAction objects to the MultiAction, to be performed later.", "`MultiAction`: Self instance \"\"\" for touch_action in touch_actions: if self._touch_actions", "element: Optional['WebElement'] = None) -> None: self._driver = driver self._element", "import WebElement T = TypeVar('T', bound='MultiAction') class MultiAction: def __init__(self,", "Args: touch_actions: one or more TouchAction objects describing a chain", "by applicable law or agreed to in writing, software #", "driver: 'WebDriver', element: Optional['WebElement'] = None) -> None: self._driver =", "add(self, *touch_actions: 'TouchAction') -> None: \"\"\"Add TouchAction objects to the", "TouchAction from appium.webdriver.webdriver import WebDriver from appium.webdriver.webelement import WebElement T", "and do not allow for ad hoc action # chaining", "class MultiAction: def __init__(self, driver: 'WebDriver', element: Optional['WebElement'] = None)", "import copy from typing import TYPE_CHECKING, Dict, List, Optional, TypeVar,", "| a1.press(el1).move_to(el2).release() | a2 = TouchAction(driver) | a2.press(el2).move_to(el1).release() | MultiAction(driver).add(a1,", "typing import TYPE_CHECKING, Dict, List, Optional, TypeVar, Union from appium.webdriver.mobilecommand", "self @property def json_wire_gestures(self) -> Dict[str, Union[List, str]]: actions =", "action # chaining as the spec requires. import copy from", "the spec requires. import copy from typing import TYPE_CHECKING, Dict,", "actions stored in the object. Usage: | a1 = TouchAction(driver)", "touch_action in touch_actions: if self._touch_actions is None: self._touch_actions = []", "T: \"\"\"Perform the actions stored in the object. Usage: |", "object. Usage: | a1 = TouchAction(driver) | a1.press(el1).move_to(el2).release() | a2", "bound='MultiAction') class MultiAction: def __init__(self, driver: 'WebDriver', element: Optional['WebElement'] =", "| a2 = TouchAction(driver) | a2.press(el2).move_to(el1).release() | MultiAction(driver).add(a1, a2) Returns:", "one finger Usage: | a1 = TouchAction(driver) | a1.press(el1).move_to(el2).release() |", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "import WebDriver from appium.webdriver.webelement import WebElement T = TypeVar('T', bound='MultiAction')", "None) -> None: self._driver = driver self._element = element self._touch_actions:", "Unless required by applicable law or agreed to in writing,", "clean up and be ready for the next batch self._touch_actions", "objects describing a chain of actions to be performed by", "| MultiAction(driver).add(a1, a2).perform() Returns: `MultiAction`: Self instance \"\"\" self._driver.execute(Command.MULTI_ACTION, self.json_wire_gestures)", "TouchAction objects to the MultiAction, to be performed later. Args:", "= [] def add(self, *touch_actions: 'TouchAction') -> None: \"\"\"Add TouchAction", "the specific language governing permissions and # limitations under the", "@property def json_wire_gestures(self) -> Dict[str, Union[List, str]]: actions = []", "applicable law or agreed to in writing, software # distributed", "T) -> T: \"\"\"Perform the actions stored in the object.", "TypeVar, Union from appium.webdriver.mobilecommand import MobileCommand as Command if TYPE_CHECKING:", "in writing, software # distributed under the License is distributed", "Returns: `MultiAction`: Self instance \"\"\" self._driver.execute(Command.MULTI_ACTION, self.json_wire_gestures) # clean up", "not None: return {'actions': actions, 'elementId': self._element.id} return {'actions': actions}", "self._touch_actions: List['TouchAction'] = [] def add(self, *touch_actions: 'TouchAction') -> None:", "'WebDriver', element: Optional['WebElement'] = None) -> None: self._driver = driver", "= TypeVar('T', bound='MultiAction') class MultiAction: def __init__(self, driver: 'WebDriver', element:", "import TouchAction from appium.webdriver.webdriver import WebDriver from appium.webdriver.webelement import WebElement", "appium.webdriver.webdriver import WebDriver from appium.webdriver.webelement import WebElement T = TypeVar('T',", "Optional, TypeVar, Union from appium.webdriver.mobilecommand import MobileCommand as Command if", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "License, Version 2.0 (the \"License\"); # you may not use", "= TouchAction(driver) | a2.press(el2).move_to(el1).release() | MultiAction(driver).add(a1, a2) Returns: `MultiAction`: Self", "a1.press(el1).move_to(el2).release() | a2 = TouchAction(driver) | a2.press(el2).move_to(el1).release() | MultiAction(driver).add(a1, a2).perform()", "# You may obtain a copy of the License at", "describing a chain of actions to be performed by one", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "a1.press(el1).move_to(el2).release() | a2 = TouchAction(driver) | a2.press(el2).move_to(el1).release() | MultiAction(driver).add(a1, a2)", "WebElement T = TypeVar('T', bound='MultiAction') class MultiAction: def __init__(self, driver:", "as the spec requires. import copy from typing import TYPE_CHECKING,", "a chain of actions to be performed by one finger", "return self @property def json_wire_gestures(self) -> Dict[str, Union[List, str]]: actions", "the License for the specific language governing permissions and #", "def __init__(self, driver: 'WebDriver', element: Optional['WebElement'] = None) -> None:", "Apache License, Version 2.0 (the \"License\"); # you may not", "the MultiAction, to be performed later. Args: touch_actions: one or", "either express or implied. # See the License for the", "Union[List, str]]: actions = [] for action in self._touch_actions: actions.append(action.json_wire_gestures)", "situation, and do not allow for ad hoc action #", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "do not allow for ad hoc action # chaining as", "like the Multi Action API in the form of #", "List, Optional, TypeVar, Union from appium.webdriver.mobilecommand import MobileCommand as Command", "element self._touch_actions: List['TouchAction'] = [] def add(self, *touch_actions: 'TouchAction') ->", "for touch_action in touch_actions: if self._touch_actions is None: self._touch_actions =", "for the next batch self._touch_actions = [] return self @property", "appium.webdriver.common.touch_action import TouchAction from appium.webdriver.webdriver import WebDriver from appium.webdriver.webelement import", "finger Usage: | a1 = TouchAction(driver) | a1.press(el1).move_to(el2).release() | a2", "= None) -> None: self._driver = driver self._element = element", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "the next batch self._touch_actions = [] return self @property def", "| a2.press(el2).move_to(el1).release() | MultiAction(driver).add(a1, a2).perform() Returns: `MultiAction`: Self instance \"\"\"", "from appium.webdriver.common.touch_action import TouchAction from appium.webdriver.webdriver import WebDriver from appium.webdriver.webelement", "<reponame>salabogdan/python-client #!/usr/bin/env python # Licensed under the Apache License, Version", "this situation, and do not allow for ad hoc action", "\"License\"); # you may not use this file except in", "# clean up and be ready for the next batch", "touch_actions: one or more TouchAction objects describing a chain of", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "# These do not quite work for this situation, and", "# distributed under the License is distributed on an \"AS", "# Unless required by applicable law or agreed to in", "if self._element is not None: return {'actions': actions, 'elementId': self._element.id}", "MultiAction(driver).add(a1, a2) Returns: `MultiAction`: Self instance \"\"\" for touch_action in", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "= TouchAction(driver) | a1.press(el1).move_to(el2).release() | a2 = TouchAction(driver) | a2.press(el2).move_to(el1).release()", "perform(self: T) -> T: \"\"\"Perform the actions stored in the", "You may obtain a copy of the License at #", "in self._touch_actions: actions.append(action.json_wire_gestures) if self._element is not None: return {'actions':", "if self._touch_actions is None: self._touch_actions = [] self._touch_actions.append(copy.copy(touch_action)) def perform(self:", "None: \"\"\"Add TouchAction objects to the MultiAction, to be performed", "chains\" (https://code.google.com/p/selenium/source/browse/py/selenium/webdriver/common/action_chains.py). # These do not quite work for this", "the Apache License, Version 2.0 (the \"License\"); # you may", "Selenium team implemented something like the Multi Action API in", "quite work for this situation, and do not allow for" ]
[ "= points[i, 1] p_z = points[i, 2] u = int(((p_x", "ffmpeg -r 10 -i ./filtered_{}_%d.png -vcodec mpeg4 -y {}.mp4\".format( self.output_path,", "tag := tensorboard tag epoch := tensorboard epoche store :=", "self.seq_data = seq_data self.images_path = images_path self.output_path = output_path def", "output_path def plot_points_on_image(self, seq_no, frame_no, jupyter=False, store=False, pose_type='filtered'): seq_data =", "k col2 = np.ones(y.shape[0]) * k2 plot = k3d.plot(name='points') plt_points", "np.add(points, trans[0, :]) for i in range(0, points.shape[0]): p_x =", "= dl_dict['cam_cal'][0][2] cam_fy = dl_dict['cam_cal'][0][3] for i in range(0, points.shape[0]):", "path path := != None creats the path and store", "frame['filter_pred']['r_wxyz']), 'wxyz') rot = R.from_quat(rot_quat).as_matrix() elif pose_type == 'final_pred_obs': #", "= copy.deepcopy(im) dl_dict = frame['dl_dict'] points = copy.deepcopy( seq_data[seq_no][0]['dl_dict']['model_points'][0, :,", "ture -> stores the image to standard path path :=", "[widht,height,RGB] points:= points of the object model [length,x,y,z] trans: [1,3]", "p_y = points[i, 1] p_z = points[i, 2] u =", "fn in range(len(self.seq_data)): self.plot_points_on_image(seq_no, fn, False, True, pose_type) if name:", "w + 1, 0] = 0 except: #print(\"out of bounds\")", "= np.dot(points, rot_mat.T) points = np.add(points, trans[0, :]) for i", "__init__(self, p_visu, writer=None): if p_visu[-1] != '/': p_visu = p_visu", "point_nr,3 \"\"\" if c == 'b': k = 245 elif", "im = np.array(Image.open( images_path + unique_desig + '.png')) # laval", ":, :]) points = np.dot(points, rot.T) points = np.add(points, t[0,", "rot = R.from_quat(rot_quat).as_matrix() else: raise Exception('Pose type not implemented.') w", ":= ture -> stores the image to standard path path", "+ tag, p_store=self.p_visu) if self.writer is not None: self.writer.add_image(tag, img_d.astype(", "import numpy as np import k3d class Visualizer(): def __init__(self,", "== 'g': k = 25811000 elif c1 == 'r': k", "from scipy.spatial.transform import Rotation as R from helper import re_quat", "cmin:cmax, :] = c img_d[rmin:rmax, cmin_mi:cmin_ma, :] = c img_d[rmin:rmax,", "== 'r': k = 11801000 elif c == 'black': k", "import re_quat import copy import torch import numpy as np", "R from helper import re_quat import copy import torch import", "\"\"\" tag := tensorboard tag epoch := tensorboard epoche store", "- w:v + w + 1, u - w:u +", "np.array(Image.open( images_path + unique_desig[0] + '-color.png')) # ycb else: im", "tag, epoch, img, rmin=0, rmax=0, cmin=0, cmax=0, str_width=2, store=False, jupyter=False,", "rot.T) points = np.add(points, t[0, :]) cam_cx = dl_dict['cam_cal'][0][0] cam_cy", "def plot_bounding_box(self, tag, epoch, img, rmin=0, rmax=0, cmin=0, cmax=0, str_width=2,", "int(((p_y / p_z) * cam_fy) + cam_cy) try: img_d[v -", "isinstance(b, dict): rmin = b['rmin'] rmax = b['rmax'] cmin =", "tensorboard tag epoch := tensorboard epoche store := ture ->", "= k3d.points(x, col1.astype(np.uint32), point_size=point_size) plot += plt_points plt_points = k3d.points(y,", "def plot_estimated_pose(self, tag, epoch, img, points, trans=[[0, 0, 0]], rot_mat=[[1,", "if store: #store_ar = (img_d* 255).round().astype(np.uint8) #print(\"IMAGE D:\" ,img_d,img_d.shape )", "not None: self.writer.add_image(tag, img_d.astype( np.uint8), global_step=epoch, dataformats='HWC') def plot_pcd(x, point_size=0.005,", "type not implemented.') w = 2 if type(unique_desig) != str:", "b['cmin'] cmax = b['cmax'] # ToDo check Input data img_d", "seq_data self.images_path = images_path self.output_path = output_path def plot_points_on_image(self, seq_no,", "== 'ground_truth': # ground truth t = frame['dl_dict']['gt_trans'].reshape(1, 3) rot_quat", "Image.fromarray(img_d) if jupyter: display(img_disp) if store: outpath = output_path +", "+ 1, 1] = 255 img_d[v - w:v + w", "'r': k = 11801000 elif c == 'black': k =", "= 25811000 elif c1 == 'r': k = 11801000 elif", "10 -i ./filtered_{}_%d.png -vcodec mpeg4 -y {}.mp4\".format( self.output_path, seq_no, video_name)", "points:= points of the object model [length,x,y,z] trans: [1,3] rot:", "2580 if c2 == 'b': k2 = 245 elif c2", "point_size=point_size) plot += plt_points plt_points.shader = '3d' plot.display() def plot_two_pcd(x,", ":= != None creats the path and store to it", "cmax_mi:cmax_ma, :] = c print(\"STORE\", store) img_d = img_d.astype(np.uint8) if", "* cam_fx) + cam_cx) v = int(((p_y / p_z) *", "trans: [1,3] rot: [3,3] \"\"\" img_d = copy.deepcopy(img) points =", "b['rmax'] cmin = b['cmin'] cmax = b['cmax'] # ToDo check", "str_width) rmax_mi = max(0, rmax - str_width) rmax_ma = min(img_d.shape[0],", "[0, 0, 1]], cam_cx=0, cam_cy=0, cam_fx=0, cam_fy=0, store=False, jupyter=False, w=2):", "images_path, output_path=None): self.seq_data = seq_data self.images_path = images_path self.output_path =", ":]) points = np.dot(points, rot.T) points = np.add(points, t[0, :])", "img_disp.save(outpath, \"PNG\", compress_level=1) print(\"Saved image to {}\".format(outpath)) def save_sequence(self, seq_no,", "seq_data, images_path, output_path=None): self.seq_data = seq_data self.images_path = images_path self.output_path", "not implemented.') w = 2 if type(unique_desig) != str: im", "= dl_dict['cam_cal'][0][1] cam_fx = dl_dict['cam_cal'][0][2] cam_fy = dl_dict['cam_cal'][0][3] for i", "# final pred t = np.array(frame['final_pred_obs']['t']).reshape(1, 3) rot_quat = re_quat(copy.deepcopy(", "str_width) img_d[rmin_mi:rmin_ma, cmin:cmax, :] = c img_d[rmax_mi:rmax_ma, cmin:cmax, :] =", "colors = np.ones(x.shape[0]) * k plot = k3d.plot(name='points') plt_points =", "= '{}_{}_{}'.format(name, pose_type, seq_no) else: video_name = '{}_{}'.format(pose_type, seq_no) cmd", "elif c2 == 'black': k2 = 2580 else: k2 =", "display(Image.fromarray(img_d)) if self.writer is not None: self.writer.add_image(tag, img_d.astype( np.uint8), global_step=epoch,", "= k3d.points(y, col2.astype(np.uint32), point_size=point_size) plot += plt_points plt_points.shader = '3d'", "= max(0, cmax - str_width) cmax_ma = min(img_d.shape[1], cmax +", "R.from_quat(rot_quat).as_matrix() elif pose_type == 'filtered': # filter pred t =", "k = 11801000 elif c1 == 'black': k = 2580", "path := != None creats the path and store to", "+ str_width) img_d[rmin_mi:rmin_ma, cmin:cmax, :] = c img_d[rmax_mi:rmax_ma, cmin:cmax, :]", "cam_fy) + cam_cy) try: img_d[v - w:v + w +", "rmin = b['rmin'] rmax = b['rmax'] cmin = b['cmin'] cmax", "255 img_d[v - w:v + w + 1, u -", "unique_desig[0] + '-color.png')) # ycb else: im = np.array(Image.open( images_path", "img_d = copy.deepcopy(img) points = np.dot(points, rot_mat.T) points = np.add(points,", "= int(((p_y / p_z) * cam_fy) + cam_cy) try: img_d[v", "pass img_disp = Image.fromarray(img_d) if jupyter: display(img_disp) if store: outpath", "c img_d[rmax_mi:rmax_ma, cmin:cmax, :] = c img_d[rmin:rmax, cmin_mi:cmin_ma, :] =", "img, points, trans=[[0, 0, 0]], rot_mat=[[1, 0, 0], [0, 1,", "rmin - str_width) rmin_ma = min(img_d.shape[0], rmin + str_width) rmax_mi", "+ str_width) cmax_mi = max(0, cmax - str_width) cmax_ma =", "= c print(\"STORE\", store) img_d = img_d.astype(np.uint8) if store: #store_ar", "c1 == 'black': k = 2580 else: k = 2580", "path/tag.png img:= original_image, [widht,height,RGB] points:= points of the object model", "= 0 img_d[v - w:v + w + 1, u", "= np.ones(x.shape[0]) * k col2 = np.ones(y.shape[0]) * k2 plot", "img_disp = Image.fromarray(img_d) if jupyter: display(img_disp) if store: outpath =", ":] = c img_d[rmin:rmax, cmin_mi:cmin_ma, :] = c img_d[rmin:rmax, cmax_mi:cmax_ma,", "* k col2 = np.ones(y.shape[0]) * k2 plot = k3d.plot(name='points')", "0], [0, 0, 1]], cam_cx=0, cam_cy=0, cam_fx=0, cam_fy=0, store=False, jupyter=False,", "str_width) cmin_ma = min(img_d.shape[1], cmin + str_width) cmax_mi = max(0,", "plot += plt_points plt_points = k3d.points(y, col2.astype(np.uint32), point_size=point_size) plot +=", "- w:u + w + 1, 1] = 255 img_d[v", ",img_d,img_d.shape ) save_image(img_d, tag=str(epoch) + tag, p_store=self.p_visu) if self.writer is", "= b['cmax'] # ToDo check Input data img_d = np.array(copy.deepcopy(img))", "+ 1, 0] = 0 except: #print(\"out of bounds\") pass", "elif c1 == 'r': k = 11801000 elif c1 ==", ":]) for i in range(0, points.shape[0]): p_x = points[i, 0]", "not os.path.exists(self.p_visu): os.makedirs(self.p_visu) def plot_estimated_pose(self, tag, epoch, img, points, trans=[[0,", "dataformats='HWC') def plot_pcd(x, point_size=0.005, c='g'): \"\"\" x: point_nr,3 \"\"\" if", "plt_points = k3d.points(x, colors.astype(np.uint32), point_size=point_size) plot += plt_points plt_points.shader =", "PIL import Image from visu.helper_functions import save_image from scipy.spatial.transform import", "frame['final_pred_obs']['r_wxyz']), 'wxyz') rot = R.from_quat(rot_quat).as_matrix() else: raise Exception('Pose type not", ":] = c print(\"STORE\", store) img_d = img_d.astype(np.uint8) if store:", "np import sys import os from PIL import Image from", "range(0, points.shape[0]): p_x = points[i, 0] p_y = points[i, 1]", "[3,3] \"\"\" img_d = copy.deepcopy(img) points = np.dot(points, rot_mat.T) points", "cmax=0, str_width=2, store=False, jupyter=False, b=None): \"\"\" tag := tensorboard tag", "== 'b': k = 245 elif c1 == 'g': k", "c2 == 'black': k2 = 2580 else: k2 = 2580", "'g': k2 = 25811000 elif c2 == 'r': k2 =", "== 'black': k2 = 2580 else: k2 = 2580 col1", "output_path = self.output_path frame = seq_data[seq_no][frame_no] unique_desig = frame['dl_dict']['unique_desig'][0] if", "plt_points = k3d.points(y, col2.astype(np.uint32), point_size=point_size) plot += plt_points plt_points.shader =", "c = [0, 0, 255] rmin_mi = max(0, rmin -", "255] rmin_mi = max(0, rmin - str_width) rmin_ma = min(img_d.shape[0],", "bounds\") pass img_disp = Image.fromarray(img_d) if jupyter: display(img_disp) if store:", "x: point_nr,3 \"\"\" if c == 'b': k = 245", ") save_image(img_d, tag=str(epoch) + tag, p_store=self.p_visu) if self.writer is not", "points = np.dot(points, rot_mat.T) points = np.add(points, trans[0, :]) for", "elif pose_type == 'final_pred_obs': # final pred t = np.array(frame['final_pred_obs']['t']).reshape(1,", "2580 col1 = np.ones(x.shape[0]) * k col2 = np.ones(y.shape[0]) *", "store: outpath = output_path + \\ '{}_{}_{}.png'.format(pose_type, seq_no, frame_no) img_disp.save(outpath,", "+ 1, 0] = 0 except: #print(\"out of bounce\") pass", "= 0 except: #print(\"out of bounds\") pass img_disp = Image.fromarray(img_d)", "+ w + 1, 0] = 0 img_d[v - w:v", "k3d.points(x, col1.astype(np.uint32), point_size=point_size) plot += plt_points plt_points = k3d.points(y, col2.astype(np.uint32),", "self.plot_points_on_image(seq_no, fn, False, True, pose_type) if name: video_name = '{}_{}_{}'.format(name,", "in range(0, points.shape[0]): p_x = points[i, 0] p_y = points[i,", "is not None: self.writer.add_image(tag, img_d.astype( np.uint8), global_step=epoch, dataformats='HWC') def plot_pcd(x,", "images_path + unique_desig + '.png')) # laval img_d = copy.deepcopy(im)", "== 'g': k2 = 25811000 elif c2 == 'r': k2", "w:u + w + 1, 1] = 255 img_d[v -", "seq_no, pose_type='filtered', name=''): for fn in range(len(self.seq_data)): self.plot_points_on_image(seq_no, fn, False,", "!= '/': p_visu = p_visu + '/' self.p_visu = p_visu", "k2 = 11801000 elif c2 == 'black': k2 = 2580", "= 245 elif c1 == 'g': k = 25811000 elif", "c1 == 'g': k = 25811000 elif c1 == 'r':", "cmin_mi:cmin_ma, :] = c img_d[rmin:rmax, cmax_mi:cmax_ma, :] = c print(\"STORE\",", "t = np.array(frame['filter_pred']['t']).reshape(1, 3) rot_quat = re_quat(copy.deepcopy( frame['filter_pred']['r_wxyz']), 'wxyz') rot", "display(img_disp) if store: outpath = output_path + \\ '{}_{}_{}.png'.format(pose_type, seq_no,", "range(len(self.seq_data)): self.plot_points_on_image(seq_no, fn, False, True, pose_type) if name: video_name =", "= np.ones(y.shape[0]) * k2 plot = k3d.plot(name='points') plt_points = k3d.points(x,", "[0, 0, 255] rmin_mi = max(0, rmin - str_width) rmin_ma", "c1='g', c2='r'): if c1 == 'b': k = 245 elif", "scipy.spatial.transform import Rotation as R from helper import re_quat import", "= b['cmin'] cmax = b['cmax'] # ToDo check Input data", "* cam_fy) + cam_cy) try: img_d[v - w:v + w", "= seq_data[seq_no][frame_no] unique_desig = frame['dl_dict']['unique_desig'][0] if pose_type == 'ground_truth': #", "plt_points = k3d.points(x, col1.astype(np.uint32), point_size=point_size) plot += plt_points plt_points =", "p_store=self.p_visu) if self.writer is not None: self.writer.add_image(tag, img_d.astype( np.uint8), global_step=epoch,", "os from PIL import Image from visu.helper_functions import save_image from", "elif c == 'r': k = 11801000 elif c ==", "b['cmax'] # ToDo check Input data img_d = np.array(copy.deepcopy(img)) c", "stores the image to standard path path := != None", "jupyter=False, b=None): \"\"\" tag := tensorboard tag epoch := tensorboard", "25811000 elif c2 == 'r': k2 = 11801000 elif c2", "as R from helper import re_quat import copy import torch", "original_image, [widht,height,RGB] \"\"\" if isinstance(b, dict): rmin = b['rmin'] rmax", "points = np.add(points, trans[0, :]) for i in range(0, points.shape[0]):", "# ToDo check Input data img_d = np.array(copy.deepcopy(img)) c =", "'black': k = 2580 else: k = 2580 if c2", "#print(\"IMAGE D:\" ,img_d,img_d.shape ) save_image(img_d, tag=str(epoch) + tag, p_store=self.p_visu) if", "img_d[rmin_mi:rmin_ma, cmin:cmax, :] = c img_d[rmax_mi:rmax_ma, cmin:cmax, :] = c", "min(img_d.shape[0], rmax + str_width) cmin_mi = max(0, cmin - str_width)", "cam_cx=0, cam_cy=0, cam_fx=0, cam_fy=0, store=False, jupyter=False, w=2): \"\"\" tag :=", "= int(((p_x / p_z) * cam_fx) + cam_cx) v =", "\"cd {} && ffmpeg -r 10 -i ./filtered_{}_%d.png -vcodec mpeg4", "#print(\"out of bounce\") pass if jupyter: display(Image.fromarray(img_d)) if store: #store_ar", "= frame['dl_dict']['unique_desig'][0] if pose_type == 'ground_truth': # ground truth t", "i in range(0, points.shape[0]): p_x = points[i, 0] p_y =", "video_name = '{}_{}'.format(pose_type, seq_no) cmd = \"cd {} && ffmpeg", "pose_type == 'ground_truth': # ground truth t = frame['dl_dict']['gt_trans'].reshape(1, 3)", "if type(unique_desig) != str: im = np.array(Image.open( images_path + unique_desig[0]", "save_image(img_d, tag=str(epoch) + tag, p_store=self.p_visu) if jupyter: display(Image.fromarray(img_d)) if self.writer", "p_visu[-1] != '/': p_visu = p_visu + '/' self.p_visu =", "= re_quat(copy.deepcopy( frame['filter_pred']['r_wxyz']), 'wxyz') rot = R.from_quat(rot_quat).as_matrix() elif pose_type ==", "= dl_dict['cam_cal'][0][3] for i in range(0, points.shape[0]): p_x = points[i,", "= re_quat(copy.deepcopy( frame['final_pred_obs']['r_wxyz']), 'wxyz') rot = R.from_quat(rot_quat).as_matrix() else: raise Exception('Pose", "== 'b': k2 = 245 elif c2 == 'g': k2", "store=False, pose_type='filtered'): seq_data = self.seq_data images_path = self.images_path output_path =", "cam_fx=0, cam_fy=0, store=False, jupyter=False, w=2): \"\"\" tag := tensorboard tag", "writer if not os.path.exists(self.p_visu): os.makedirs(self.p_visu) def plot_estimated_pose(self, tag, epoch, img,", "'black': k2 = 2580 else: k2 = 2580 col1 =", "trans[0, :]) for i in range(0, points.shape[0]): p_x = points[i,", "cmax - str_width) cmax_ma = min(img_d.shape[1], cmax + str_width) img_d[rmin_mi:rmin_ma,", "tag, p_store=self.p_visu) if jupyter: display(Image.fromarray(img_d)) if self.writer is not None:", "1, 0] = 0 except: #print(\"out of bounds\") pass img_disp", "rot_mat=[[1, 0, 0], [0, 1, 0], [0, 0, 1]], cam_cx=0,", "= max(0, cmin - str_width) cmin_ma = min(img_d.shape[1], cmin +", "(img_d* 255).round().astype(np.uint8) #print(\"IMAGE D:\" ,img_d,img_d.shape ) save_image(img_d, tag=str(epoch) + tag,", "rmax = b['rmax'] cmin = b['cmin'] cmax = b['cmax'] #", "= (img_d* 255).round().astype(np.uint8) #print(\"IMAGE D:\" ,img_d,img_d.shape ) save_image(img_d, tag=str(epoch) +", "plot_estimated_pose(self, tag, epoch, img, points, trans=[[0, 0, 0]], rot_mat=[[1, 0,", "global_step=epoch, dataformats='HWC') def plot_bounding_box(self, tag, epoch, img, rmin=0, rmax=0, cmin=0,", "= img_d.astype(np.uint8) if store: #store_ar = (img_d* 255).round().astype(np.uint8) save_image(img_d, tag=str(epoch)", "seq_no, frame_no) img_disp.save(outpath, \"PNG\", compress_level=1) print(\"Saved image to {}\".format(outpath)) def", "+ '/' self.p_visu = p_visu self.writer = writer if not", "0] = 0 except: #print(\"out of bounds\") pass img_disp =", "import sys import os from PIL import Image from visu.helper_functions", "import torch import numpy as np import k3d class Visualizer():", "pose_type == 'filtered': # filter pred t = np.array(frame['filter_pred']['t']).reshape(1, 3)", "= np.array(frame['filter_pred']['t']).reshape(1, 3) rot_quat = re_quat(copy.deepcopy( frame['filter_pred']['r_wxyz']), 'wxyz') rot =", "numpy as np import sys import os from PIL import", "if not os.path.exists(self.p_visu): os.makedirs(self.p_visu) def plot_estimated_pose(self, tag, epoch, img, points,", "else: k2 = 2580 col1 = np.ones(x.shape[0]) * k col2", "path/tag.png img:= original_image, [widht,height,RGB] \"\"\" if isinstance(b, dict): rmin =", "3) rot_quat = re_quat(copy.deepcopy( frame['filter_pred']['r_wxyz']), 'wxyz') rot = R.from_quat(rot_quat).as_matrix() elif", "+ unique_desig + '.png')) # laval img_d = copy.deepcopy(im) dl_dict", "np.array(frame['final_pred_obs']['t']).reshape(1, 3) rot_quat = re_quat(copy.deepcopy( frame['final_pred_obs']['r_wxyz']), 'wxyz') rot = R.from_quat(rot_quat).as_matrix()", "np.array(frame['filter_pred']['t']).reshape(1, 3) rot_quat = re_quat(copy.deepcopy( frame['filter_pred']['r_wxyz']), 'wxyz') rot = R.from_quat(rot_quat).as_matrix()", "cmin=0, cmax=0, str_width=2, store=False, jupyter=False, b=None): \"\"\" tag := tensorboard", "tag, p_store=self.p_visu) if self.writer is not None: self.writer.add_image(tag, img_d.astype( np.uint8),", "max(0, cmax - str_width) cmax_ma = min(img_d.shape[1], cmax + str_width)", "True, pose_type) if name: video_name = '{}_{}_{}'.format(name, pose_type, seq_no) else:", "points[i, 2] u = int(((p_x / p_z) * cam_fx) +", "1, u - w:u + w + 1, 1] =", "+ w + 1, 0] = 0 except: #print(\"out of", "2] u = int(((p_x / p_z) * cam_fx) + cam_cx)", "+ 1, u - w:u + w + 1, 0]", "t = np.array(frame['final_pred_obs']['t']).reshape(1, 3) rot_quat = re_quat(copy.deepcopy( frame['final_pred_obs']['r_wxyz']), 'wxyz') rot", "point_size=0.005, c1='g', c2='r'): if c1 == 'b': k = 245", "+ cam_cy) try: img_d[v - w:v + w + 1,", "= b['rmax'] cmin = b['cmin'] cmax = b['cmax'] # ToDo", "point_size=point_size) plot += plt_points plt_points = k3d.points(y, col2.astype(np.uint32), point_size=point_size) plot", "cmin_ma = min(img_d.shape[1], cmin + str_width) cmax_mi = max(0, cmax", "img_d = copy.deepcopy(im) dl_dict = frame['dl_dict'] points = copy.deepcopy( seq_data[seq_no][0]['dl_dict']['model_points'][0,", "[1,3] rot: [3,3] \"\"\" img_d = copy.deepcopy(img) points = np.dot(points,", "from PIL import Image from visu.helper_functions import save_image from scipy.spatial.transform", "image to standard path path := != None creats the", "as np import k3d class Visualizer(): def __init__(self, p_visu, writer=None):", "np.array(Image.open( images_path + unique_desig + '.png')) # laval img_d =", "p_z = points[i, 2] u = int(((p_x / p_z) *", "'filtered': # filter pred t = np.array(frame['filter_pred']['t']).reshape(1, 3) rot_quat =", "pass if jupyter: display(Image.fromarray(img_d)) if store: #store_ar = (img_d* 255).round().astype(np.uint8)", "standard path path := != None creats the path and", "- str_width) cmax_ma = min(img_d.shape[1], cmax + str_width) img_d[rmin_mi:rmin_ma, cmin:cmax,", "bounce\") pass if jupyter: display(Image.fromarray(img_d)) if store: #store_ar = (img_d*", "= 2580 col1 = np.ones(x.shape[0]) * k col2 = np.ones(y.shape[0])", "np.dot(points, rot.T) points = np.add(points, t[0, :]) cam_cx = dl_dict['cam_cal'][0][0]", "+ '.png')) # laval img_d = copy.deepcopy(im) dl_dict = frame['dl_dict']", "points = np.add(points, t[0, :]) cam_cx = dl_dict['cam_cal'][0][0] cam_cy =", "it path/tag.png img:= original_image, [widht,height,RGB] points:= points of the object", "'wxyz') rot = R.from_quat(rot_quat).as_matrix() elif pose_type == 'filtered': # filter", "= 11801000 elif c2 == 'black': k2 = 2580 else:", "pred t = np.array(frame['filter_pred']['t']).reshape(1, 3) rot_quat = re_quat(copy.deepcopy( frame['filter_pred']['r_wxyz']), 'wxyz')", "R.from_quat(rot_quat).as_matrix() else: raise Exception('Pose type not implemented.') w = 2", "check Input data img_d = np.array(copy.deepcopy(img)) c = [0, 0,", "if self.writer is not None: self.writer.add_image(tag, img_d.astype( np.uint8), global_step=epoch, dataformats='HWC')", "np.ones(x.shape[0]) * k plot = k3d.plot(name='points') plt_points = k3d.points(x, colors.astype(np.uint32),", "def plot_pcd(x, point_size=0.005, c='g'): \"\"\" x: point_nr,3 \"\"\" if c", "= output_path def plot_points_on_image(self, seq_no, frame_no, jupyter=False, store=False, pose_type='filtered'): seq_data", "0, 255] rmin_mi = max(0, rmin - str_width) rmin_ma =", "25811000 elif c == 'r': k = 11801000 elif c", "rot: [3,3] \"\"\" img_d = copy.deepcopy(img) points = np.dot(points, rot_mat.T)", "= self.images_path output_path = self.output_path frame = seq_data[seq_no][frame_no] unique_desig =", "0, 0], [0, 1, 0], [0, 0, 1]], cam_cx=0, cam_cy=0,", "= np.dot(points, rot.T) points = np.add(points, t[0, :]) cam_cx =", "c2='r'): if c1 == 'b': k = 245 elif c1", "\"\"\" if c == 'b': k = 245 elif c", "to standard path path := != None creats the path", "def plot_two_pcd(x, y, point_size=0.005, c1='g', c2='r'): if c1 == 'b':", "original_image, [widht,height,RGB] points:= points of the object model [length,x,y,z] trans:", "helper import re_quat import copy import torch import numpy as", "seq_data[seq_no][0]['dl_dict']['model_points'][0, :, :]) points = np.dot(points, rot.T) points = np.add(points,", "1]], cam_cx=0, cam_cy=0, cam_fx=0, cam_fy=0, store=False, jupyter=False, w=2): \"\"\" tag", "== 'black': k = 2580 else: k = 2580 colors", "'final_pred_obs': # final pred t = np.array(frame['final_pred_obs']['t']).reshape(1, 3) rot_quat =", "# ground truth t = frame['dl_dict']['gt_trans'].reshape(1, 3) rot_quat = re_quat(copy.deepcopy(", "min(img_d.shape[1], cmin + str_width) cmax_mi = max(0, cmax - str_width)", "- str_width) rmax_ma = min(img_d.shape[0], rmax + str_width) cmin_mi =", "try: img_d[v - w:v + w + 1, u -", "filter pred t = np.array(frame['filter_pred']['t']).reshape(1, 3) rot_quat = re_quat(copy.deepcopy( frame['filter_pred']['r_wxyz']),", "images_path = self.images_path output_path = self.output_path frame = seq_data[seq_no][frame_no] unique_desig", "k = 245 elif c == 'g': k = 25811000", "str_width) cmax_mi = max(0, cmax - str_width) cmax_ma = min(img_d.shape[1],", "jupyter=False, store=False, pose_type='filtered'): seq_data = self.seq_data images_path = self.images_path output_path", "'/' self.p_visu = p_visu self.writer = writer if not os.path.exists(self.p_visu):", "tag epoch := tensorboard epoche store := ture -> stores", "w:u + w + 1, 0] = 0 img_d[v -", "25811000 elif c1 == 'r': k = 11801000 elif c1", "/ p_z) * cam_fx) + cam_cx) v = int(((p_y /", "#store_ar = (img_d* 255).round().astype(np.uint8) #print(\"IMAGE D:\" ,img_d,img_d.shape ) save_image(img_d, tag=str(epoch)", "self.images_path = images_path self.output_path = output_path def plot_points_on_image(self, seq_no, frame_no,", "0 except: #print(\"out of bounds\") pass img_disp = Image.fromarray(img_d) if", "cmax = b['cmax'] # ToDo check Input data img_d =", "np import k3d class Visualizer(): def __init__(self, p_visu, writer=None): if", "k = 25811000 elif c == 'r': k = 11801000", "= min(img_d.shape[1], cmin + str_width) cmax_mi = max(0, cmax -", "k = 2580 if c2 == 'b': k2 = 245", "import k3d class Visualizer(): def __init__(self, p_visu, writer=None): if p_visu[-1]", "'/': p_visu = p_visu + '/' self.p_visu = p_visu self.writer", "k2 = 25811000 elif c2 == 'r': k2 = 11801000", "frame_no) img_disp.save(outpath, \"PNG\", compress_level=1) print(\"Saved image to {}\".format(outpath)) def save_sequence(self,", "# filter pred t = np.array(frame['filter_pred']['t']).reshape(1, 3) rot_quat = re_quat(copy.deepcopy(", "str: im = np.array(Image.open( images_path + unique_desig[0] + '-color.png')) #", "the path and store to it path/tag.png img:= original_image, [widht,height,RGB]", "if p_visu[-1] != '/': p_visu = p_visu + '/' self.p_visu", "u - w:u + w + 1, 0] = 0", "w + 1, 0] = 0 img_d[v - w:v +", "numpy as np import k3d class Visualizer(): def __init__(self, p_visu,", "c2 == 'g': k2 = 25811000 elif c2 == 'r':", "tag=str(epoch) + tag, p_store=self.p_visu) if jupyter: display(Image.fromarray(img_d)) if self.writer is", "unique_desig = frame['dl_dict']['unique_desig'][0] if pose_type == 'ground_truth': # ground truth", "p_visu self.writer = writer if not os.path.exists(self.p_visu): os.makedirs(self.p_visu) def plot_estimated_pose(self,", "points[i, 0] p_y = points[i, 1] p_z = points[i, 2]", "re_quat(copy.deepcopy( frame['dl_dict']['gt_rot_wxyz'][0]), 'wxyz') rot = R.from_quat(rot_quat).as_matrix() elif pose_type == 'filtered':", "copy import torch import numpy as np import k3d class", "__init__(self, seq_data, images_path, output_path=None): self.seq_data = seq_data self.images_path = images_path", "self.writer.add_image(tag, img_d.astype( np.uint8), global_step=epoch, dataformats='HWC') def plot_bounding_box(self, tag, epoch, img,", "+ \\ '{}_{}_{}.png'.format(pose_type, seq_no, frame_no) img_disp.save(outpath, \"PNG\", compress_level=1) print(\"Saved image", "+ tag, p_store=self.p_visu) if jupyter: display(Image.fromarray(img_d)) if self.writer is not", "+ 1, 0] = 0 img_d[v - w:v + w", "= 25811000 elif c2 == 'r': k2 = 11801000 elif", "output_path + \\ '{}_{}_{}.png'.format(pose_type, seq_no, frame_no) img_disp.save(outpath, \"PNG\", compress_level=1) print(\"Saved", "of bounce\") pass if jupyter: display(Image.fromarray(img_d)) if store: #store_ar =", "Visualizer(): def __init__(self, p_visu, writer=None): if p_visu[-1] != '/': p_visu", "class SequenceVisualizer(): def __init__(self, seq_data, images_path, output_path=None): self.seq_data = seq_data", "img_d = np.array(copy.deepcopy(img)) c = [0, 0, 255] rmin_mi =", "save_image(img_d, tag=str(epoch) + tag, p_store=self.p_visu) if self.writer is not None:", "= np.array(frame['final_pred_obs']['t']).reshape(1, 3) rot_quat = re_quat(copy.deepcopy( frame['final_pred_obs']['r_wxyz']), 'wxyz') rot =", "seq_data = self.seq_data images_path = self.images_path output_path = self.output_path frame", "self.output_path frame = seq_data[seq_no][frame_no] unique_desig = frame['dl_dict']['unique_desig'][0] if pose_type ==", "k plot = k3d.plot(name='points') plt_points = k3d.points(x, colors.astype(np.uint32), point_size=point_size) plot", "ground truth t = frame['dl_dict']['gt_trans'].reshape(1, 3) rot_quat = re_quat(copy.deepcopy( frame['dl_dict']['gt_rot_wxyz'][0]),", "re_quat import copy import torch import numpy as np import", "= frame['dl_dict']['gt_trans'].reshape(1, 3) rot_quat = re_quat(copy.deepcopy( frame['dl_dict']['gt_rot_wxyz'][0]), 'wxyz') rot =", "= 11801000 elif c1 == 'black': k = 2580 else:", "store := ture -> stores the image to standard path", "dict): rmin = b['rmin'] rmax = b['rmax'] cmin = b['cmin']", "c1 == 'r': k = 11801000 elif c1 == 'black':", "points = np.dot(points, rot.T) points = np.add(points, t[0, :]) cam_cx", "rot_quat = re_quat(copy.deepcopy( frame['filter_pred']['r_wxyz']), 'wxyz') rot = R.from_quat(rot_quat).as_matrix() elif pose_type", "== 'filtered': # filter pred t = np.array(frame['filter_pred']['t']).reshape(1, 3) rot_quat", "0, 0]], rot_mat=[[1, 0, 0], [0, 1, 0], [0, 0,", "w + 1, 0] = 0 except: #print(\"out of bounce\")", "pose_type='filtered'): seq_data = self.seq_data images_path = self.images_path output_path = self.output_path", "t[0, :]) cam_cx = dl_dict['cam_cal'][0][0] cam_cy = dl_dict['cam_cal'][0][1] cam_fx =", "= output_path + \\ '{}_{}_{}.png'.format(pose_type, seq_no, frame_no) img_disp.save(outpath, \"PNG\", compress_level=1)", "img_d[rmax_mi:rmax_ma, cmin:cmax, :] = c img_d[rmin:rmax, cmin_mi:cmin_ma, :] = c", "print(\"Saved image to {}\".format(outpath)) def save_sequence(self, seq_no, pose_type='filtered', name=''): for", "np.uint8), global_step=epoch, dataformats='HWC') def plot_pcd(x, point_size=0.005, c='g'): \"\"\" x: point_nr,3", "(img_d* 255).round().astype(np.uint8) save_image(img_d, tag=str(epoch) + tag, p_store=self.p_visu) if jupyter: display(Image.fromarray(img_d))", "str_width) rmin_ma = min(img_d.shape[0], rmin + str_width) rmax_mi = max(0,", "else: video_name = '{}_{}'.format(pose_type, seq_no) cmd = \"cd {} &&", "= \"cd {} && ffmpeg -r 10 -i ./filtered_{}_%d.png -vcodec", ":]) cam_cx = dl_dict['cam_cal'][0][0] cam_cy = dl_dict['cam_cal'][0][1] cam_fx = dl_dict['cam_cal'][0][2]", "data img_d = np.array(copy.deepcopy(img)) c = [0, 0, 255] rmin_mi", "= 25811000 elif c == 'r': k = 11801000 elif", "rmin=0, rmax=0, cmin=0, cmax=0, str_width=2, store=False, jupyter=False, b=None): \"\"\" tag", "+ unique_desig[0] + '-color.png')) # ycb else: im = np.array(Image.open(", "# ycb else: im = np.array(Image.open( images_path + unique_desig +", "points.shape[0]): p_x = points[i, 0] p_y = points[i, 1] p_z", "rmax_mi = max(0, rmax - str_width) rmax_ma = min(img_d.shape[0], rmax", "-> stores the image to standard path path := !=", "c == 'b': k = 245 elif c == 'g':", "dl_dict['cam_cal'][0][0] cam_cy = dl_dict['cam_cal'][0][1] cam_fx = dl_dict['cam_cal'][0][2] cam_fy = dl_dict['cam_cal'][0][3]", "display(Image.fromarray(img_d)) if store: #store_ar = (img_d* 255).round().astype(np.uint8) #print(\"IMAGE D:\" ,img_d,img_d.shape", "- w:u + w + 1, 0] = 0 except:", "w = 2 if type(unique_desig) != str: im = np.array(Image.open(", "'r': k = 11801000 elif c1 == 'black': k =", "'{}_{}_{}'.format(name, pose_type, seq_no) else: video_name = '{}_{}'.format(pose_type, seq_no) cmd =", "elif c2 == 'g': k2 = 25811000 elif c2 ==", "import copy import torch import numpy as np import k3d", "self.p_visu = p_visu self.writer = writer if not os.path.exists(self.p_visu): os.makedirs(self.p_visu)", "print(\"STORE\", store) img_d = img_d.astype(np.uint8) if store: #store_ar = (img_d*", "cam_fy = dl_dict['cam_cal'][0][3] for i in range(0, points.shape[0]): p_x =", "= min(img_d.shape[0], rmax + str_width) cmin_mi = max(0, cmin -", "except: #print(\"out of bounds\") pass img_disp = Image.fromarray(img_d) if jupyter:", "to it path/tag.png img:= original_image, [widht,height,RGB] \"\"\" if isinstance(b, dict):", "int(((p_x / p_z) * cam_fx) + cam_cx) v = int(((p_y", "re_quat(copy.deepcopy( frame['final_pred_obs']['r_wxyz']), 'wxyz') rot = R.from_quat(rot_quat).as_matrix() else: raise Exception('Pose type", "def __init__(self, seq_data, images_path, output_path=None): self.seq_data = seq_data self.images_path =", "cam_cy) try: img_d[v - w:v + w + 1, u", "if store: outpath = output_path + \\ '{}_{}_{}.png'.format(pose_type, seq_no, frame_no)", "k2 plot = k3d.plot(name='points') plt_points = k3d.points(x, col1.astype(np.uint32), point_size=point_size) plot", "p_z) * cam_fx) + cam_cx) v = int(((p_y / p_z)", "it path/tag.png img:= original_image, [widht,height,RGB] \"\"\" if isinstance(b, dict): rmin", "11801000 elif c1 == 'black': k = 2580 else: k", "= '{}_{}'.format(pose_type, seq_no) cmd = \"cd {} && ffmpeg -r", "for i in range(0, points.shape[0]): p_x = points[i, 0] p_y", "seq_no) cmd = \"cd {} && ffmpeg -r 10 -i", "= k3d.plot(name='points') plt_points = k3d.points(x, colors.astype(np.uint32), point_size=point_size) plot += plt_points", "- str_width) cmin_ma = min(img_d.shape[1], cmin + str_width) cmax_mi =", "pose_type) if name: video_name = '{}_{}_{}'.format(name, pose_type, seq_no) else: video_name", "frame['dl_dict']['gt_rot_wxyz'][0]), 'wxyz') rot = R.from_quat(rot_quat).as_matrix() elif pose_type == 'filtered': #", "k = 11801000 elif c == 'black': k = 2580", "max(0, rmin - str_width) rmin_ma = min(img_d.shape[0], rmin + str_width)", "elif c1 == 'black': k = 2580 else: k =", "import save_image from scipy.spatial.transform import Rotation as R from helper", "store to it path/tag.png img:= original_image, [widht,height,RGB] \"\"\" if isinstance(b,", "cmd = \"cd {} && ffmpeg -r 10 -i ./filtered_{}_%d.png", ":] = c img_d[rmin:rmax, cmax_mi:cmax_ma, :] = c print(\"STORE\", store)", "epoche store := ture -> stores the image to standard", "+ str_width) rmax_mi = max(0, rmax - str_width) rmax_ma =", "frame['dl_dict'] points = copy.deepcopy( seq_data[seq_no][0]['dl_dict']['model_points'][0, :, :]) points = np.dot(points,", "'b': k = 245 elif c == 'g': k =", "+= plt_points plt_points = k3d.points(y, col2.astype(np.uint32), point_size=point_size) plot += plt_points", "= np.add(points, t[0, :]) cam_cx = dl_dict['cam_cal'][0][0] cam_cy = dl_dict['cam_cal'][0][1]", "1] p_z = points[i, 2] u = int(((p_x / p_z)", "= 2580 else: k = 2580 if c2 == 'b':", "3) rot_quat = re_quat(copy.deepcopy( frame['final_pred_obs']['r_wxyz']), 'wxyz') rot = R.from_quat(rot_quat).as_matrix() else:", "in range(len(self.seq_data)): self.plot_points_on_image(seq_no, fn, False, True, pose_type) if name: video_name", "k = 2580 colors = np.ones(x.shape[0]) * k plot =", "dataformats='HWC') def plot_bounding_box(self, tag, epoch, img, rmin=0, rmax=0, cmin=0, cmax=0,", "points, trans=[[0, 0, 0]], rot_mat=[[1, 0, 0], [0, 1, 0],", "'ground_truth': # ground truth t = frame['dl_dict']['gt_trans'].reshape(1, 3) rot_quat =", "elif c1 == 'g': k = 25811000 elif c1 ==", "image to {}\".format(outpath)) def save_sequence(self, seq_no, pose_type='filtered', name=''): for fn", "== 'b': k = 245 elif c == 'g': k", "+ str_width) cmin_mi = max(0, cmin - str_width) cmin_ma =", "Rotation as R from helper import re_quat import copy import", "plot_bounding_box(self, tag, epoch, img, rmin=0, rmax=0, cmin=0, cmax=0, str_width=2, store=False,", "plot_points_on_image(self, seq_no, frame_no, jupyter=False, store=False, pose_type='filtered'): seq_data = self.seq_data images_path", "copy.deepcopy( seq_data[seq_no][0]['dl_dict']['model_points'][0, :, :]) points = np.dot(points, rot.T) points =", "u - w:u + w + 1, 1] = 255", "y, point_size=0.005, c1='g', c2='r'): if c1 == 'b': k =", "rmin_mi = max(0, rmin - str_width) rmin_ma = min(img_d.shape[0], rmin", "jupyter: display(Image.fromarray(img_d)) if store: #store_ar = (img_d* 255).round().astype(np.uint8) #print(\"IMAGE D:\"", "= max(0, rmin - str_width) rmin_ma = min(img_d.shape[0], rmin +", "str_width) rmax_ma = min(img_d.shape[0], rmax + str_width) cmin_mi = max(0,", "img_d[rmin:rmax, cmin_mi:cmin_ma, :] = c img_d[rmin:rmax, cmax_mi:cmax_ma, :] = c", "1, 0], [0, 0, 1]], cam_cx=0, cam_cy=0, cam_fx=0, cam_fy=0, store=False,", "np.uint8), global_step=epoch, dataformats='HWC') def plot_bounding_box(self, tag, epoch, img, rmin=0, rmax=0,", "= 245 elif c == 'g': k = 25811000 elif", "if pose_type == 'ground_truth': # ground truth t = frame['dl_dict']['gt_trans'].reshape(1,", "None: self.writer.add_image(tag, img_d.astype( np.uint8), global_step=epoch, dataformats='HWC') def plot_pcd(x, point_size=0.005, c='g'):", "rot_mat.T) points = np.add(points, trans[0, :]) for i in range(0,", "plot = k3d.plot(name='points') plt_points = k3d.points(x, colors.astype(np.uint32), point_size=point_size) plot +=", "if c1 == 'b': k = 245 elif c1 ==", "cam_fx) + cam_cx) v = int(((p_y / p_z) * cam_fy)", "re_quat(copy.deepcopy( frame['filter_pred']['r_wxyz']), 'wxyz') rot = R.from_quat(rot_quat).as_matrix() elif pose_type == 'final_pred_obs':", "= points[i, 2] u = int(((p_x / p_z) * cam_fx)", "0 img_d[v - w:v + w + 1, u -", "point_size=point_size) plot += plt_points plt_points.shader = '3d' plot.display() class SequenceVisualizer():", "seq_data[seq_no][frame_no] unique_desig = frame['dl_dict']['unique_desig'][0] if pose_type == 'ground_truth': # ground", "2580 else: k = 2580 if c2 == 'b': k2", "import Rotation as R from helper import re_quat import copy", "ycb else: im = np.array(Image.open( images_path + unique_desig + '.png'))", "class Visualizer(): def __init__(self, p_visu, writer=None): if p_visu[-1] != '/':", "!= None creats the path and store to it path/tag.png", "the object model [length,x,y,z] trans: [1,3] rot: [3,3] \"\"\" img_d", "k = 25811000 elif c1 == 'r': k = 11801000", "col1 = np.ones(x.shape[0]) * k col2 = np.ones(y.shape[0]) * k2", "= p_visu self.writer = writer if not os.path.exists(self.p_visu): os.makedirs(self.p_visu) def", "plt_points.shader = '3d' plot.display() class SequenceVisualizer(): def __init__(self, seq_data, images_path,", "import os from PIL import Image from visu.helper_functions import save_image", "frame_no, jupyter=False, store=False, pose_type='filtered'): seq_data = self.seq_data images_path = self.images_path", "k = 2580 else: k = 2580 colors = np.ones(x.shape[0])", "Exception('Pose type not implemented.') w = 2 if type(unique_desig) !=", "'.png')) # laval img_d = copy.deepcopy(im) dl_dict = frame['dl_dict'] points", "pose_type == 'final_pred_obs': # final pred t = np.array(frame['final_pred_obs']['t']).reshape(1, 3)", "if jupyter: display(img_disp) if store: outpath = output_path + \\", "laval img_d = copy.deepcopy(im) dl_dict = frame['dl_dict'] points = copy.deepcopy(", "k3d.points(x, colors.astype(np.uint32), point_size=point_size) plot += plt_points plt_points.shader = '3d' plot.display()", "the image to standard path path := != None creats", "= copy.deepcopy( seq_data[seq_no][0]['dl_dict']['model_points'][0, :, :]) points = np.dot(points, rot.T) points", "implemented.') w = 2 if type(unique_desig) != str: im =", "= Image.fromarray(img_d) if jupyter: display(img_disp) if store: outpath = output_path", "cam_fy=0, store=False, jupyter=False, w=2): \"\"\" tag := tensorboard tag epoch", "cmin_mi = max(0, cmin - str_width) cmin_ma = min(img_d.shape[1], cmin", "# laval img_d = copy.deepcopy(im) dl_dict = frame['dl_dict'] points =", "= np.ones(x.shape[0]) * k plot = k3d.plot(name='points') plt_points = k3d.points(x,", "+= plt_points plt_points.shader = '3d' plot.display() class SequenceVisualizer(): def __init__(self,", "path and store to it path/tag.png img:= original_image, [widht,height,RGB] \"\"\"", "type(unique_desig) != str: im = np.array(Image.open( images_path + unique_desig[0] +", "= 2580 else: k2 = 2580 col1 = np.ones(x.shape[0]) *", "0] p_y = points[i, 1] p_z = points[i, 2] u", "writer=None): if p_visu[-1] != '/': p_visu = p_visu + '/'", "points = copy.deepcopy( seq_data[seq_no][0]['dl_dict']['model_points'][0, :, :]) points = np.dot(points, rot.T)", "im = np.array(Image.open( images_path + unique_desig[0] + '-color.png')) # ycb", "np.dot(points, rot_mat.T) points = np.add(points, trans[0, :]) for i in", "name: video_name = '{}_{}_{}'.format(name, pose_type, seq_no) else: video_name = '{}_{}'.format(pose_type,", "p_visu = p_visu + '/' self.p_visu = p_visu self.writer =", "- str_width) rmin_ma = min(img_d.shape[0], rmin + str_width) rmax_mi =", "dl_dict['cam_cal'][0][3] for i in range(0, points.shape[0]): p_x = points[i, 0]", "def __init__(self, p_visu, writer=None): if p_visu[-1] != '/': p_visu =", "'wxyz') rot = R.from_quat(rot_quat).as_matrix() elif pose_type == 'final_pred_obs': # final", "img_d[rmin:rmax, cmax_mi:cmax_ma, :] = c print(\"STORE\", store) img_d = img_d.astype(np.uint8)", "+ w + 1, 1] = 255 img_d[v - w:v", "== 'r': k2 = 11801000 elif c2 == 'black': k2", "+ w + 1, u - w:u + w +", "= p_visu + '/' self.p_visu = p_visu self.writer = writer", "k = 245 elif c1 == 'g': k = 25811000", "D:\" ,img_d,img_d.shape ) save_image(img_d, tag=str(epoch) + tag, p_store=self.p_visu) if self.writer", "cam_cx) v = int(((p_y / p_z) * cam_fy) + cam_cy)", "store: #store_ar = (img_d* 255).round().astype(np.uint8) #print(\"IMAGE D:\" ,img_d,img_d.shape ) save_image(img_d,", "\"PNG\", compress_level=1) print(\"Saved image to {}\".format(outpath)) def save_sequence(self, seq_no, pose_type='filtered',", "img_d = img_d.astype(np.uint8) if store: #store_ar = (img_d* 255).round().astype(np.uint8) save_image(img_d,", "c='g'): \"\"\" x: point_nr,3 \"\"\" if c == 'b': k", "= 2 if type(unique_desig) != str: im = np.array(Image.open( images_path", "tag=str(epoch) + tag, p_store=self.p_visu) if self.writer is not None: self.writer.add_image(tag,", "plot_two_pcd(x, y, point_size=0.005, c1='g', c2='r'): if c1 == 'b': k", "/ p_z) * cam_fy) + cam_cy) try: img_d[v - w:v", "images_path self.output_path = output_path def plot_points_on_image(self, seq_no, frame_no, jupyter=False, store=False,", "img_d.astype( np.uint8), global_step=epoch, dataformats='HWC') def plot_pcd(x, point_size=0.005, c='g'): \"\"\" x:", "seq_no) else: video_name = '{}_{}'.format(pose_type, seq_no) cmd = \"cd {}", "seq_no, frame_no, jupyter=False, store=False, pose_type='filtered'): seq_data = self.seq_data images_path =", "self.images_path output_path = self.output_path frame = seq_data[seq_no][frame_no] unique_desig = frame['dl_dict']['unique_desig'][0]", "2 if type(unique_desig) != str: im = np.array(Image.open( images_path +", "k2 = 2580 else: k2 = 2580 col1 = np.ones(x.shape[0])", "epoch, img, points, trans=[[0, 0, 0]], rot_mat=[[1, 0, 0], [0,", "tag, epoch, img, points, trans=[[0, 0, 0]], rot_mat=[[1, 0, 0],", "frame = seq_data[seq_no][frame_no] unique_desig = frame['dl_dict']['unique_desig'][0] if pose_type == 'ground_truth':", "#store_ar = (img_d* 255).round().astype(np.uint8) save_image(img_d, tag=str(epoch) + tag, p_store=self.p_visu) if", "if jupyter: display(Image.fromarray(img_d)) if self.writer is not None: self.writer.add_image(tag, img_d.astype(", "= k3d.points(x, colors.astype(np.uint32), point_size=point_size) plot += plt_points plt_points.shader = '3d'", "cmax + str_width) img_d[rmin_mi:rmin_ma, cmin:cmax, :] = c img_d[rmax_mi:rmax_ma, cmin:cmax,", "p_visu + '/' self.p_visu = p_visu self.writer = writer if", "os.makedirs(self.p_visu) def plot_estimated_pose(self, tag, epoch, img, points, trans=[[0, 0, 0]],", "k3d.plot(name='points') plt_points = k3d.points(x, col1.astype(np.uint32), point_size=point_size) plot += plt_points plt_points", "pred t = np.array(frame['final_pred_obs']['t']).reshape(1, 3) rot_quat = re_quat(copy.deepcopy( frame['final_pred_obs']['r_wxyz']), 'wxyz')", "'r': k2 = 11801000 elif c2 == 'black': k2 =", "point_size=0.005, c='g'): \"\"\" x: point_nr,3 \"\"\" if c == 'b':", "else: raise Exception('Pose type not implemented.') w = 2 if", "{}\".format(outpath)) def save_sequence(self, seq_no, pose_type='filtered', name=''): for fn in range(len(self.seq_data)):", "= min(img_d.shape[0], rmin + str_width) rmax_mi = max(0, rmax -", "except: #print(\"out of bounce\") pass if jupyter: display(Image.fromarray(img_d)) if store:", "= min(img_d.shape[1], cmax + str_width) img_d[rmin_mi:rmin_ma, cmin:cmax, :] = c", "to {}\".format(outpath)) def save_sequence(self, seq_no, pose_type='filtered', name=''): for fn in", "global_step=epoch, dataformats='HWC') def plot_pcd(x, point_size=0.005, c='g'): \"\"\" x: point_nr,3 \"\"\"", "= c img_d[rmin:rmax, cmax_mi:cmax_ma, :] = c print(\"STORE\", store) img_d", "else: k = 2580 if c2 == 'b': k2 =", "dl_dict['cam_cal'][0][1] cam_fx = dl_dict['cam_cal'][0][2] cam_fy = dl_dict['cam_cal'][0][3] for i in", "tensorboard epoche store := ture -> stores the image to", "rmax_ma = min(img_d.shape[0], rmax + str_width) cmin_mi = max(0, cmin", "w + 1, u - w:u + w + 1,", "= self.output_path frame = seq_data[seq_no][frame_no] unique_desig = frame['dl_dict']['unique_desig'][0] if pose_type", "img_d.astype(np.uint8) if store: #store_ar = (img_d* 255).round().astype(np.uint8) save_image(img_d, tag=str(epoch) +", "= np.array(Image.open( images_path + unique_desig[0] + '-color.png')) # ycb else:", "img:= original_image, [widht,height,RGB] \"\"\" if isinstance(b, dict): rmin = b['rmin']", "0] = 0 except: #print(\"out of bounce\") pass if jupyter:", "c == 'r': k = 11801000 elif c == 'black':", "self.seq_data images_path = self.images_path output_path = self.output_path frame = seq_data[seq_no][frame_no]", "== 'g': k = 25811000 elif c == 'r': k", "and store to it path/tag.png img:= original_image, [widht,height,RGB] points:= points", "name=''): for fn in range(len(self.seq_data)): self.plot_points_on_image(seq_no, fn, False, True, pose_type)", "c2 == 'r': k2 = 11801000 elif c2 == 'black':", "if isinstance(b, dict): rmin = b['rmin'] rmax = b['rmax'] cmin", "img:= original_image, [widht,height,RGB] points:= points of the object model [length,x,y,z]", "\"\"\" if isinstance(b, dict): rmin = b['rmin'] rmax = b['rmax']", "!= str: im = np.array(Image.open( images_path + unique_desig[0] + '-color.png'))", "w:u + w + 1, 0] = 0 except: #print(\"out", "= (img_d* 255).round().astype(np.uint8) save_image(img_d, tag=str(epoch) + tag, p_store=self.p_visu) if jupyter:", "dl_dict = frame['dl_dict'] points = copy.deepcopy( seq_data[seq_no][0]['dl_dict']['model_points'][0, :, :]) points", "255).round().astype(np.uint8) #print(\"IMAGE D:\" ,img_d,img_d.shape ) save_image(img_d, tag=str(epoch) + tag, p_store=self.p_visu)", "cmax_ma = min(img_d.shape[1], cmax + str_width) img_d[rmin_mi:rmin_ma, cmin:cmax, :] =", "else: im = np.array(Image.open( images_path + unique_desig + '.png')) #", "11801000 elif c2 == 'black': k2 = 2580 else: k2", "pose_type, seq_no) else: video_name = '{}_{}'.format(pose_type, seq_no) cmd = \"cd", "def save_sequence(self, seq_no, pose_type='filtered', name=''): for fn in range(len(self.seq_data)): self.plot_points_on_image(seq_no,", "'g': k = 25811000 elif c == 'r': k =", "p_z) * cam_fy) + cam_cy) try: img_d[v - w:v +", "w=2): \"\"\" tag := tensorboard tag epoch := tensorboard epoche", "0, 1]], cam_cx=0, cam_cy=0, cam_fx=0, cam_fy=0, store=False, jupyter=False, w=2): \"\"\"", "to it path/tag.png img:= original_image, [widht,height,RGB] points:= points of the", "= b['rmin'] rmax = b['rmax'] cmin = b['cmin'] cmax =", "pose_type='filtered', name=''): for fn in range(len(self.seq_data)): self.plot_points_on_image(seq_no, fn, False, True,", "'3d' plot.display() def plot_two_pcd(x, y, point_size=0.005, c1='g', c2='r'): if c1", "self.output_path = output_path def plot_points_on_image(self, seq_no, frame_no, jupyter=False, store=False, pose_type='filtered'):", "= np.array(copy.deepcopy(img)) c = [0, 0, 255] rmin_mi = max(0,", "self.writer = writer if not os.path.exists(self.p_visu): os.makedirs(self.p_visu) def plot_estimated_pose(self, tag,", "save_sequence(self, seq_no, pose_type='filtered', name=''): for fn in range(len(self.seq_data)): self.plot_points_on_image(seq_no, fn,", "jupyter=False, w=2): \"\"\" tag := tensorboard tag epoch := tensorboard", "str_width) cmax_ma = min(img_d.shape[1], cmax + str_width) img_d[rmin_mi:rmin_ma, cmin:cmax, :]", "k3d.points(y, col2.astype(np.uint32), point_size=point_size) plot += plt_points plt_points.shader = '3d' plot.display()", "torch import numpy as np import k3d class Visualizer(): def", "1, 0] = 0 img_d[v - w:v + w +", "rot_quat = re_quat(copy.deepcopy( frame['final_pred_obs']['r_wxyz']), 'wxyz') rot = R.from_quat(rot_quat).as_matrix() else: raise", "= images_path self.output_path = output_path def plot_points_on_image(self, seq_no, frame_no, jupyter=False,", "+ cam_cx) v = int(((p_y / p_z) * cam_fy) +", "col2.astype(np.uint32), point_size=point_size) plot += plt_points plt_points.shader = '3d' plot.display() class", "= dl_dict['cam_cal'][0][0] cam_cy = dl_dict['cam_cal'][0][1] cam_fx = dl_dict['cam_cal'][0][2] cam_fy =", "plot.display() def plot_two_pcd(x, y, point_size=0.005, c1='g', c2='r'): if c1 ==", "of the object model [length,x,y,z] trans: [1,3] rot: [3,3] \"\"\"", "255).round().astype(np.uint8) save_image(img_d, tag=str(epoch) + tag, p_store=self.p_visu) if jupyter: display(Image.fromarray(img_d)) if", "= 11801000 elif c == 'black': k = 2580 else:", "cam_cx = dl_dict['cam_cal'][0][0] cam_cy = dl_dict['cam_cal'][0][1] cam_fx = dl_dict['cam_cal'][0][2] cam_fy", "#print(\"out of bounds\") pass img_disp = Image.fromarray(img_d) if jupyter: display(img_disp)", "rmax - str_width) rmax_ma = min(img_d.shape[0], rmax + str_width) cmin_mi", "-i ./filtered_{}_%d.png -vcodec mpeg4 -y {}.mp4\".format( self.output_path, seq_no, video_name) os.system(cmd)", "cam_cy = dl_dict['cam_cal'][0][1] cam_fx = dl_dict['cam_cal'][0][2] cam_fy = dl_dict['cam_cal'][0][3] for", "'g': k = 25811000 elif c1 == 'r': k =", "-r 10 -i ./filtered_{}_%d.png -vcodec mpeg4 -y {}.mp4\".format( self.output_path, seq_no,", "cmin = b['cmin'] cmax = b['cmax'] # ToDo check Input", "'{}_{}'.format(pose_type, seq_no) cmd = \"cd {} && ffmpeg -r 10", "1, u - w:u + w + 1, 0] =", "0], [0, 1, 0], [0, 0, 1]], cam_cx=0, cam_cy=0, cam_fx=0,", "c img_d[rmin:rmax, cmax_mi:cmax_ma, :] = c print(\"STORE\", store) img_d =", "= re_quat(copy.deepcopy( frame['dl_dict']['gt_rot_wxyz'][0]), 'wxyz') rot = R.from_quat(rot_quat).as_matrix() elif pose_type ==", "plt_points.shader = '3d' plot.display() def plot_two_pcd(x, y, point_size=0.005, c1='g', c2='r'):", "raise Exception('Pose type not implemented.') w = 2 if type(unique_desig)", "c == 'g': k = 25811000 elif c == 'r':", "from visu.helper_functions import save_image from scipy.spatial.transform import Rotation as R", "\\ '{}_{}_{}.png'.format(pose_type, seq_no, frame_no) img_disp.save(outpath, \"PNG\", compress_level=1) print(\"Saved image to", "str_width) cmin_mi = max(0, cmin - str_width) cmin_ma = min(img_d.shape[1],", "store to it path/tag.png img:= original_image, [widht,height,RGB] points:= points of", "[length,x,y,z] trans: [1,3] rot: [3,3] \"\"\" img_d = copy.deepcopy(img) points", "img_d[v - w:v + w + 1, u - w:u", "creats the path and store to it path/tag.png img:= original_image,", "[widht,height,RGB] \"\"\" if isinstance(b, dict): rmin = b['rmin'] rmax =", "2580 else: k = 2580 colors = np.ones(x.shape[0]) * k", "0] = 0 img_d[v - w:v + w + 1,", "elif c == 'black': k = 2580 else: k =", "fn, False, True, pose_type) if name: video_name = '{}_{}_{}'.format(name, pose_type,", "= copy.deepcopy(img) points = np.dot(points, rot_mat.T) points = np.add(points, trans[0,", "c img_d[rmin:rmax, cmin_mi:cmin_ma, :] = c img_d[rmin:rmax, cmax_mi:cmax_ma, :] =", "k2 = 2580 col1 = np.ones(x.shape[0]) * k col2 =", "img_d.astype( np.uint8), global_step=epoch, dataformats='HWC') def plot_bounding_box(self, tag, epoch, img, rmin=0,", "+= plt_points plt_points.shader = '3d' plot.display() def plot_two_pcd(x, y, point_size=0.005,", "trans=[[0, 0, 0]], rot_mat=[[1, 0, 0], [0, 1, 0], [0,", "245 elif c1 == 'g': k = 25811000 elif c1", "= k3d.plot(name='points') plt_points = k3d.points(x, col1.astype(np.uint32), point_size=point_size) plot += plt_points", "plot.display() class SequenceVisualizer(): def __init__(self, seq_data, images_path, output_path=None): self.seq_data =", "t = frame['dl_dict']['gt_trans'].reshape(1, 3) rot_quat = re_quat(copy.deepcopy( frame['dl_dict']['gt_rot_wxyz'][0]), 'wxyz') rot", "col1.astype(np.uint32), point_size=point_size) plot += plt_points plt_points = k3d.points(y, col2.astype(np.uint32), point_size=point_size)", "* k2 plot = k3d.plot(name='points') plt_points = k3d.points(x, col1.astype(np.uint32), point_size=point_size)", "ToDo check Input data img_d = np.array(copy.deepcopy(img)) c = [0,", "= 2580 if c2 == 'b': k2 = 245 elif", "col2 = np.ones(y.shape[0]) * k2 plot = k3d.plot(name='points') plt_points =", "rot_quat = re_quat(copy.deepcopy( frame['dl_dict']['gt_rot_wxyz'][0]), 'wxyz') rot = R.from_quat(rot_quat).as_matrix() elif pose_type", "k2 = 245 elif c2 == 'g': k2 = 25811000", ":= tensorboard tag epoch := tensorboard epoche store := ture", "+ 1, u - w:u + w + 1, 1]", "= writer if not os.path.exists(self.p_visu): os.makedirs(self.p_visu) def plot_estimated_pose(self, tag, epoch,", "elif c2 == 'r': k2 = 11801000 elif c2 ==", "= max(0, rmax - str_width) rmax_ma = min(img_d.shape[0], rmax +", "* k plot = k3d.plot(name='points') plt_points = k3d.points(x, colors.astype(np.uint32), point_size=point_size)", "= '3d' plot.display() class SequenceVisualizer(): def __init__(self, seq_data, images_path, output_path=None):", "= self.seq_data images_path = self.images_path output_path = self.output_path frame =", "245 elif c == 'g': k = 25811000 elif c", "str_width=2, store=False, jupyter=False, b=None): \"\"\" tag := tensorboard tag epoch", "np.add(points, t[0, :]) cam_cx = dl_dict['cam_cal'][0][0] cam_cy = dl_dict['cam_cal'][0][1] cam_fx", "= frame['dl_dict'] points = copy.deepcopy( seq_data[seq_no][0]['dl_dict']['model_points'][0, :, :]) points =", "rmin_ma = min(img_d.shape[0], rmin + str_width) rmax_mi = max(0, rmax", "frame['dl_dict']['unique_desig'][0] if pose_type == 'ground_truth': # ground truth t =", "max(0, cmin - str_width) cmin_ma = min(img_d.shape[1], cmin + str_width)", "os.path.exists(self.p_visu): os.makedirs(self.p_visu) def plot_estimated_pose(self, tag, epoch, img, points, trans=[[0, 0,", "store=False, jupyter=False, b=None): \"\"\" tag := tensorboard tag epoch :=", ":] = c img_d[rmax_mi:rmax_ma, cmin:cmax, :] = c img_d[rmin:rmax, cmin_mi:cmin_ma,", "k3d.plot(name='points') plt_points = k3d.points(x, colors.astype(np.uint32), point_size=point_size) plot += plt_points plt_points.shader", "def plot_points_on_image(self, seq_no, frame_no, jupyter=False, store=False, pose_type='filtered'): seq_data = self.seq_data", "&& ffmpeg -r 10 -i ./filtered_{}_%d.png -vcodec mpeg4 -y {}.mp4\".format(", "copy.deepcopy(im) dl_dict = frame['dl_dict'] points = copy.deepcopy( seq_data[seq_no][0]['dl_dict']['model_points'][0, :, :])", "= '3d' plot.display() def plot_two_pcd(x, y, point_size=0.005, c1='g', c2='r'): if", "if c2 == 'b': k2 = 245 elif c2 ==", "= 255 img_d[v - w:v + w + 1, u", "SequenceVisualizer(): def __init__(self, seq_data, images_path, output_path=None): self.seq_data = seq_data self.images_path", "R.from_quat(rot_quat).as_matrix() elif pose_type == 'final_pred_obs': # final pred t =", "== 'black': k = 2580 else: k = 2580 if", "- w:u + w + 1, 0] = 0 img_d[v", "\"\"\" x: point_nr,3 \"\"\" if c == 'b': k =", "rot = R.from_quat(rot_quat).as_matrix() elif pose_type == 'filtered': # filter pred", "= c img_d[rmax_mi:rmax_ma, cmin:cmax, :] = c img_d[rmin:rmax, cmin_mi:cmin_ma, :]", "min(img_d.shape[0], rmin + str_width) rmax_mi = max(0, rmax - str_width)", "'-color.png')) # ycb else: im = np.array(Image.open( images_path + unique_desig", "None creats the path and store to it path/tag.png img:=", "self.writer.add_image(tag, img_d.astype( np.uint8), global_step=epoch, dataformats='HWC') def plot_pcd(x, point_size=0.005, c='g'): \"\"\"", "2580 colors = np.ones(x.shape[0]) * k plot = k3d.plot(name='points') plt_points", "plot_pcd(x, point_size=0.005, c='g'): \"\"\" x: point_nr,3 \"\"\" if c ==", "np.ones(x.shape[0]) * k col2 = np.ones(y.shape[0]) * k2 plot =", "plot += plt_points plt_points.shader = '3d' plot.display() class SequenceVisualizer(): def", "points of the object model [length,x,y,z] trans: [1,3] rot: [3,3]", "model [length,x,y,z] trans: [1,3] rot: [3,3] \"\"\" img_d = copy.deepcopy(img)", "'wxyz') rot = R.from_quat(rot_quat).as_matrix() else: raise Exception('Pose type not implemented.')", "k = 2580 else: k = 2580 if c2 ==", "None: self.writer.add_image(tag, img_d.astype( np.uint8), global_step=epoch, dataformats='HWC') def plot_bounding_box(self, tag, epoch,", "unique_desig + '.png')) # laval img_d = copy.deepcopy(im) dl_dict =", "import Image from visu.helper_functions import save_image from scipy.spatial.transform import Rotation", "p_visu, writer=None): if p_visu[-1] != '/': p_visu = p_visu +", "rot = R.from_quat(rot_quat).as_matrix() elif pose_type == 'final_pred_obs': # final pred", "= np.array(Image.open( images_path + unique_desig + '.png')) # laval img_d", "{} && ffmpeg -r 10 -i ./filtered_{}_%d.png -vcodec mpeg4 -y", "path and store to it path/tag.png img:= original_image, [widht,height,RGB] points:=", "== 'final_pred_obs': # final pred t = np.array(frame['final_pred_obs']['t']).reshape(1, 3) rot_quat", "epoch := tensorboard epoche store := ture -> stores the", "self.writer is not None: self.writer.add_image(tag, img_d.astype( np.uint8), global_step=epoch, dataformats='HWC') def", "compress_level=1) print(\"Saved image to {}\".format(outpath)) def save_sequence(self, seq_no, pose_type='filtered', name=''):", "dl_dict['cam_cal'][0][2] cam_fy = dl_dict['cam_cal'][0][3] for i in range(0, points.shape[0]): p_x", "points[i, 1] p_z = points[i, 2] u = int(((p_x /", "0 except: #print(\"out of bounce\") pass if jupyter: display(Image.fromarray(img_d)) if", "frame['dl_dict']['gt_trans'].reshape(1, 3) rot_quat = re_quat(copy.deepcopy( frame['dl_dict']['gt_rot_wxyz'][0]), 'wxyz') rot = R.from_quat(rot_quat).as_matrix()", "cmin - str_width) cmin_ma = min(img_d.shape[1], cmin + str_width) cmax_mi", "c == 'black': k = 2580 else: k = 2580", "= [0, 0, 255] rmin_mi = max(0, rmin - str_width)", "if c == 'b': k = 245 elif c ==", "False, True, pose_type) if name: video_name = '{}_{}_{}'.format(name, pose_type, seq_no)", "k3d class Visualizer(): def __init__(self, p_visu, writer=None): if p_visu[-1] !=", "and store to it path/tag.png img:= original_image, [widht,height,RGB] \"\"\" if", "as np import sys import os from PIL import Image", "0]], rot_mat=[[1, 0, 0], [0, 1, 0], [0, 0, 1]],", "3) rot_quat = re_quat(copy.deepcopy( frame['dl_dict']['gt_rot_wxyz'][0]), 'wxyz') rot = R.from_quat(rot_quat).as_matrix() elif", "truth t = frame['dl_dict']['gt_trans'].reshape(1, 3) rot_quat = re_quat(copy.deepcopy( frame['dl_dict']['gt_rot_wxyz'][0]), 'wxyz')", "= seq_data self.images_path = images_path self.output_path = output_path def plot_points_on_image(self,", "jupyter: display(img_disp) if store: outpath = output_path + \\ '{}_{}_{}.png'.format(pose_type,", "= points[i, 0] p_y = points[i, 1] p_z = points[i,", "is not None: self.writer.add_image(tag, img_d.astype( np.uint8), global_step=epoch, dataformats='HWC') def plot_bounding_box(self,", "rmax + str_width) cmin_mi = max(0, cmin - str_width) cmin_ma", "1] = 255 img_d[v - w:v + w + 1,", ":= tensorboard epoche store := ture -> stores the image", "= 245 elif c2 == 'g': k2 = 25811000 elif", "= R.from_quat(rot_quat).as_matrix() elif pose_type == 'filtered': # filter pred t", "video_name = '{}_{}_{}'.format(name, pose_type, seq_no) else: video_name = '{}_{}'.format(pose_type, seq_no)", "[0, 1, 0], [0, 0, 1]], cam_cx=0, cam_cy=0, cam_fx=0, cam_fy=0,", "if store: #store_ar = (img_d* 255).round().astype(np.uint8) save_image(img_d, tag=str(epoch) + tag,", "from helper import re_quat import copy import torch import numpy", "u = int(((p_x / p_z) * cam_fx) + cam_cx) v", "= np.add(points, trans[0, :]) for i in range(0, points.shape[0]): p_x", "store) img_d = img_d.astype(np.uint8) if store: #store_ar = (img_d* 255).round().astype(np.uint8)", "c1 == 'b': k = 245 elif c1 == 'g':", "plot = k3d.plot(name='points') plt_points = k3d.points(x, col1.astype(np.uint32), point_size=point_size) plot +=", "'b': k = 245 elif c1 == 'g': k =", "elif pose_type == 'filtered': # filter pred t = np.array(frame['filter_pred']['t']).reshape(1,", "if name: video_name = '{}_{}_{}'.format(name, pose_type, seq_no) else: video_name =", "copy.deepcopy(img) points = np.dot(points, rot_mat.T) points = np.add(points, trans[0, :])", "rmin + str_width) rmax_mi = max(0, rmax - str_width) rmax_ma", "c print(\"STORE\", store) img_d = img_d.astype(np.uint8) if store: #store_ar =", "b['rmin'] rmax = b['rmax'] cmin = b['cmin'] cmax = b['cmax']", "1, 1] = 255 img_d[v - w:v + w +", "w + 1, 1] = 255 img_d[v - w:v +", "= c img_d[rmin:rmax, cmin_mi:cmin_ma, :] = c img_d[rmin:rmax, cmax_mi:cmax_ma, :]", "p_store=self.p_visu) if jupyter: display(Image.fromarray(img_d)) if self.writer is not None: self.writer.add_image(tag,", "colors.astype(np.uint32), point_size=point_size) plot += plt_points plt_points.shader = '3d' plot.display() def", "p_x = points[i, 0] p_y = points[i, 1] p_z =", "epoch, img, rmin=0, rmax=0, cmin=0, cmax=0, str_width=2, store=False, jupyter=False, b=None):", "min(img_d.shape[1], cmax + str_width) img_d[rmin_mi:rmin_ma, cmin:cmax, :] = c img_d[rmax_mi:rmax_ma,", "= 2580 else: k = 2580 colors = np.ones(x.shape[0]) *", "plot += plt_points plt_points.shader = '3d' plot.display() def plot_two_pcd(x, y,", "== 'r': k = 11801000 elif c1 == 'black': k", "images_path + unique_desig[0] + '-color.png')) # ycb else: im =", "cam_fx = dl_dict['cam_cal'][0][2] cam_fy = dl_dict['cam_cal'][0][3] for i in range(0,", "if jupyter: display(Image.fromarray(img_d)) if store: #store_ar = (img_d* 255).round().astype(np.uint8) #print(\"IMAGE", "else: k = 2580 colors = np.ones(x.shape[0]) * k plot", "+ '-color.png')) # ycb else: im = np.array(Image.open( images_path +", "'black': k = 2580 else: k = 2580 colors =", "Image from visu.helper_functions import save_image from scipy.spatial.transform import Rotation as", "w:v + w + 1, u - w:u + w", "b=None): \"\"\" tag := tensorboard tag epoch := tensorboard epoche", "elif c == 'g': k = 25811000 elif c ==", "jupyter: display(Image.fromarray(img_d)) if self.writer is not None: self.writer.add_image(tag, img_d.astype( np.uint8),", "cmin:cmax, :] = c img_d[rmax_mi:rmax_ma, cmin:cmax, :] = c img_d[rmin:rmax,", "= R.from_quat(rot_quat).as_matrix() elif pose_type == 'final_pred_obs': # final pred t", "'3d' plot.display() class SequenceVisualizer(): def __init__(self, seq_data, images_path, output_path=None): self.seq_data", "cmin + str_width) cmax_mi = max(0, cmax - str_width) cmax_ma", "visu.helper_functions import save_image from scipy.spatial.transform import Rotation as R from", "store: #store_ar = (img_d* 255).round().astype(np.uint8) save_image(img_d, tag=str(epoch) + tag, p_store=self.p_visu)", "rmax=0, cmin=0, cmax=0, str_width=2, store=False, jupyter=False, b=None): \"\"\" tag :=", "final pred t = np.array(frame['final_pred_obs']['t']).reshape(1, 3) rot_quat = re_quat(copy.deepcopy( frame['final_pred_obs']['r_wxyz']),", "= 0 except: #print(\"out of bounce\") pass if jupyter: display(Image.fromarray(img_d))", "max(0, rmax - str_width) rmax_ma = min(img_d.shape[0], rmax + str_width)", "cam_cy=0, cam_fx=0, cam_fy=0, store=False, jupyter=False, w=2): \"\"\" tag := tensorboard", "for fn in range(len(self.seq_data)): self.plot_points_on_image(seq_no, fn, False, True, pose_type) if", "1, 0] = 0 except: #print(\"out of bounce\") pass if", "plt_points plt_points.shader = '3d' plot.display() def plot_two_pcd(x, y, point_size=0.005, c1='g',", "outpath = output_path + \\ '{}_{}_{}.png'.format(pose_type, seq_no, frame_no) img_disp.save(outpath, \"PNG\",", "of bounds\") pass img_disp = Image.fromarray(img_d) if jupyter: display(img_disp) if", "= 2580 colors = np.ones(x.shape[0]) * k plot = k3d.plot(name='points')", "v = int(((p_y / p_z) * cam_fy) + cam_cy) try:", "'{}_{}_{}.png'.format(pose_type, seq_no, frame_no) img_disp.save(outpath, \"PNG\", compress_level=1) print(\"Saved image to {}\".format(outpath))", "11801000 elif c == 'black': k = 2580 else: k", "output_path=None): self.seq_data = seq_data self.images_path = images_path self.output_path = output_path", "plt_points plt_points.shader = '3d' plot.display() class SequenceVisualizer(): def __init__(self, seq_data,", "= R.from_quat(rot_quat).as_matrix() else: raise Exception('Pose type not implemented.') w =", "not None: self.writer.add_image(tag, img_d.astype( np.uint8), global_step=epoch, dataformats='HWC') def plot_bounding_box(self, tag,", "store=False, jupyter=False, w=2): \"\"\" tag := tensorboard tag epoch :=", "cmax_mi = max(0, cmax - str_width) cmax_ma = min(img_d.shape[1], cmax", "Input data img_d = np.array(copy.deepcopy(img)) c = [0, 0, 255]", "245 elif c2 == 'g': k2 = 25811000 elif c2", "plt_points plt_points = k3d.points(y, col2.astype(np.uint32), point_size=point_size) plot += plt_points plt_points.shader", "object model [length,x,y,z] trans: [1,3] rot: [3,3] \"\"\" img_d =", "sys import os from PIL import Image from visu.helper_functions import", "\"\"\" img_d = copy.deepcopy(img) points = np.dot(points, rot_mat.T) points =", "save_image from scipy.spatial.transform import Rotation as R from helper import", "c2 == 'b': k2 = 245 elif c2 == 'g':", "'b': k2 = 245 elif c2 == 'g': k2 =", "import numpy as np import sys import os from PIL", "img, rmin=0, rmax=0, cmin=0, cmax=0, str_width=2, store=False, jupyter=False, b=None): \"\"\"", "2580 else: k2 = 2580 col1 = np.ones(x.shape[0]) * k", "np.ones(y.shape[0]) * k2 plot = k3d.plot(name='points') plt_points = k3d.points(x, col1.astype(np.uint32),", "np.array(copy.deepcopy(img)) c = [0, 0, 255] rmin_mi = max(0, rmin" ]
[ "tree.append(temp.right) else: self.temp.append(None) #print(\"trabylary\",self.tree) self.traByLayer(tree) def serialize(self, root): \"\"\"Encodes a", "if r==[] or data==[]: return root = r.pop(0) datalen =", "你可以将以下二叉树: 1 / \\ 2 3 / \\ 4 5", "TreeNode :rtype: str \"\"\" if root==None: return '' tree =", "# non-None element appears again add self.temp to self.tree def", "return str(self.tree) def deserialize(self, data): \"\"\"Decodes your encoded data to", "__init__(self): self.tree = [] self.temp = [] self.flag = 1", "tree node. # class TreeNode(object): # def __init__(self, x): #", "TreeNode(temp) r.append(root.left) if data!=[]: temp = data.pop(0) if temp!=None: root.right", "''' 面试题37. 序列化二叉树 请实现两个函数,分别用来序列化和反序列化二叉树。 示例: 你可以将以下二叉树: 1 / \\ 2", "for a binary tree node. # class TreeNode(object): # def", "end = 0 tree = [] for i in range(len(data)):", "to self.tree def traByLayer(self, tree: List[TreeNode]): if tree==[]: return else:", "class TreeNode(object): # def __init__(self, x): # self.val = x", "\"\"\" if root==None: return '' tree = [root] self.traByLayer(tree) print(str(self.tree))", "data[i]==']': start = end+1 end = i if data[start:end]!=' None':", "self.buildTreeByList(r,data) # Your Codec object will be instantiated and called", "a tree to a single string. :type root: TreeNode :rtype:", "tree to a single string. :type root: TreeNode :rtype: str", "1 # non-None element appears again add self.temp to self.tree", "to tree. :type data: str :rtype: TreeNode \"\"\" #data =", "None # self.right = None # 执行用时 :240 ms, 在所有", "be instantiated and called as such: # codec = Codec()", "Tree\") root = TreeNode(tree.pop(0)) self.buildTreeByList([root],tree) return root def buildTreeByList(self,r:List[TreeNode], data:", "def __init__(self, x): # self.val = x # self.left =", "提交中击败了22.75%的用户 # 内存消耗 :31 MB, 在所有 Python3 提交中击败了100.00%的用户 class Codec:", "= r.pop(0) datalen = len(data) if datalen==0: return elif datalen<=2:", "data.pop(0) if temp!=None: root.right = TreeNode(temp) r.append(root.right) return else: #print(\"root\",root.val,\"tree\",data,\"datalen\",datalen)", "if temp!=None: root.right = TreeNode(temp) r.append(root.right) return else: #print(\"root\",root.val,\"tree\",data,\"datalen\",datalen) temp", "data to tree. :type data: str :rtype: TreeNode \"\"\" #data", "if data!=[]: temp = data.pop(0) if temp!=None: root.right = TreeNode(temp)", "= TreeNode(temp) r.append(root.right) return else: #print(\"root\",root.val,\"tree\",data,\"datalen\",datalen) temp = data.pop(0) if", "i in range(len(data)): if data[i]==',' or data[i]==']': start = end+1", "data: str :rtype: TreeNode \"\"\" #data = '[1, 2, 3,", "# 内存消耗 :31 MB, 在所有 Python3 提交中击败了100.00%的用户 class Codec: def", "def __init__(self): self.tree = [] self.temp = [] self.flag =", "return elif datalen<=2: #print(\"root\",root.val,\"tree\",data,\"datalen\",datalen) temp = data.pop(0) if temp!=None: root.left", "data[start:end]!=' None': #print(start,end,data[start:end]) tree.append(int(data[start:end])) else: tree.append(None) #print(\"Tree\",tree,\"then build the Tree\")", "self.right = None # 执行用时 :240 ms, 在所有 Python3 提交中击败了22.75%的用户", "# def __init__(self, x): # self.val = x # self.left", "add self.temp to self.tree def traByLayer(self, tree: List[TreeNode]): if tree==[]:", "if root==None: return '' tree = [root] self.traByLayer(tree) print(str(self.tree)) return", "= len(data) if datalen==0: return elif datalen<=2: #print(\"root\",root.val,\"tree\",data,\"datalen\",datalen) temp =", "[root] self.traByLayer(tree) print(str(self.tree)) return str(self.tree) def deserialize(self, data): \"\"\"Decodes your", "= 0 tree = [] for i in range(len(data)): if", "r.append(root.right) self.buildTreeByList(r,data) # Your Codec object will be instantiated and", "if datalen==0: return elif datalen<=2: #print(\"root\",root.val,\"tree\",data,\"datalen\",datalen) temp = data.pop(0) if", "def serialize(self, root): \"\"\"Encodes a tree to a single string.", "else: self.temp.append(None) #print(\"trabylary\",self.tree) self.traByLayer(tree) def serialize(self, root): \"\"\"Encodes a tree", "# self.right = None # 执行用时 :240 ms, 在所有 Python3", "self.temp.append(None) #print(\"trabylary\",self.tree) self.traByLayer(tree) def serialize(self, root): \"\"\"Encodes a tree to", "x): # self.val = x # self.left = None #", "self.traByLayer(tree) def serialize(self, root): \"\"\"Encodes a tree to a single", "#data = '[1, 2, 3, 1, 3, 2, 4]' if", "2, 4]' if data=='': return None start = 0 end", "range(len(data)): if data[i]==',' or data[i]==']': start = end+1 end =", "List[int]): if r==[] or data==[]: return root = r.pop(0) datalen", "r==[] or data==[]: return root = r.pop(0) datalen = len(data)", "tree: List[TreeNode]): if tree==[]: return else: temp = tree.pop(0) if", "str(self.tree) def deserialize(self, data): \"\"\"Decodes your encoded data to tree.", "temp = data.pop(0) if temp!=None: root.right = TreeNode(temp) r.append(root.right) return", "TreeNode(object): # def __init__(self, x): # self.val = x #", "root = TreeNode(tree.pop(0)) self.buildTreeByList([root],tree) return root def buildTreeByList(self,r:List[TreeNode], data: List[int]):", "self.buildTreeByList([root],tree) return root def buildTreeByList(self,r:List[TreeNode], data: List[int]): if r==[] or", "data.pop(0) if temp!=None: root.left = TreeNode(temp) r.append(root.left) temp = data.pop(0)", "temp = data.pop(0) if temp!=None: root.left = TreeNode(temp) r.append(root.left) if", "tree.append(temp.left) tree.append(temp.right) else: self.temp.append(None) #print(\"trabylary\",self.tree) self.traByLayer(tree) def serialize(self, root): \"\"\"Encodes", "TreeNode \"\"\" #data = '[1, 2, 3, 1, 3, 2,", "or data[i]==']': start = end+1 end = i if data[start:end]!='", "else: #print(\"root\",root.val,\"tree\",data,\"datalen\",datalen) temp = data.pop(0) if temp!=None: root.left = TreeNode(temp)", "build the Tree\") root = TreeNode(tree.pop(0)) self.buildTreeByList([root],tree) return root def", "return root = r.pop(0) datalen = len(data) if datalen==0: return", "TreeNode(temp) r.append(root.right) return else: #print(\"root\",root.val,\"tree\",data,\"datalen\",datalen) temp = data.pop(0) if temp!=None:", "'[1, 2, 3, 1, 3, 2, 4]' if data=='': return", "# Your Codec object will be instantiated and called as", "end = i if data[start:end]!=' None': #print(start,end,data[start:end]) tree.append(int(data[start:end])) else: tree.append(None)", "self.traByLayer(tree) print(str(self.tree)) return str(self.tree) def deserialize(self, data): \"\"\"Decodes your encoded", "binary tree node. # class TreeNode(object): # def __init__(self, x):", "temp!=None: root.right = TreeNode(temp) r.append(root.right) return else: #print(\"root\",root.val,\"tree\",data,\"datalen\",datalen) temp =", "if data[i]==',' or data[i]==']': start = end+1 end = i", "again add self.temp to self.tree def traByLayer(self, tree: List[TreeNode]): if", "[] for i in range(len(data)): if data[i]==',' or data[i]==']': start", "# self.val = x # self.left = None # self.right", "serialize(self, root): \"\"\"Encodes a tree to a single string. :type", "start = 0 end = 0 tree = [] for", "appears again add self.temp to self.tree def traByLayer(self, tree: List[TreeNode]):", "datalen==0: return elif datalen<=2: #print(\"root\",root.val,\"tree\",data,\"datalen\",datalen) temp = data.pop(0) if temp!=None:", "\"\"\" #data = '[1, 2, 3, 1, 3, 2, 4]'", "Python3 提交中击败了100.00%的用户 class Codec: def __init__(self): self.tree = [] self.temp", "self.val = x # self.left = None # self.right =", "instantiated and called as such: # codec = Codec() #", "= [] for i in range(len(data)): if data[i]==',' or data[i]==']':", "data.pop(0) if temp!=None: root.right = TreeNode(temp) r.append(root.right) self.buildTreeByList(r,data) # Your", "temp!=None: root.left = TreeNode(temp) r.append(root.left) temp = data.pop(0) if temp!=None:", "def buildTreeByList(self,r:List[TreeNode], data: List[int]): if r==[] or data==[]: return root", "buildTreeByList(self,r:List[TreeNode], data: List[int]): if r==[] or data==[]: return root =", "else: temp = tree.pop(0) if temp!=None: self.tree+=self.temp self.temp = []", "deserialize(self, data): \"\"\"Decodes your encoded data to tree. :type data:", "3, 2, 4]' if data=='': return None start = 0", "0 tree = [] for i in range(len(data)): if data[i]==','", "def traByLayer(self, tree: List[TreeNode]): if tree==[]: return else: temp =", "self.temp = [] self.flag = 1 # non-None element appears", "/ \\ 2 3 / \\ 4 5 序列化为 \"[1,2,3,null,null,4,5]\"", "r.append(root.right) return else: #print(\"root\",root.val,\"tree\",data,\"datalen\",datalen) temp = data.pop(0) if temp!=None: root.left", "# class TreeNode(object): # def __init__(self, x): # self.val =", "tree. :type data: str :rtype: TreeNode \"\"\" #data = '[1,", "def deserialize(self, data): \"\"\"Decodes your encoded data to tree. :type", "the Tree\") root = TreeNode(tree.pop(0)) self.buildTreeByList([root],tree) return root def buildTreeByList(self,r:List[TreeNode],", "will be instantiated and called as such: # codec =", "= '[1, 2, 3, 1, 3, 2, 4]' if data=='':", "\"\"\"Decodes your encoded data to tree. :type data: str :rtype:", "self.tree def traByLayer(self, tree: List[TreeNode]): if tree==[]: return else: temp", "self.temp to self.tree def traByLayer(self, tree: List[TreeNode]): if tree==[]: return", "if tree==[]: return else: temp = tree.pop(0) if temp!=None: self.tree+=self.temp", "= end+1 end = i if data[start:end]!=' None': #print(start,end,data[start:end]) tree.append(int(data[start:end]))", "data==[]: return root = r.pop(0) datalen = len(data) if datalen==0:", "root = r.pop(0) datalen = len(data) if datalen==0: return elif", "'' tree = [root] self.traByLayer(tree) print(str(self.tree)) return str(self.tree) def deserialize(self,", "= data.pop(0) if temp!=None: root.right = TreeNode(temp) r.append(root.right) return else:", "temp = data.pop(0) if temp!=None: root.left = TreeNode(temp) r.append(root.left) temp", "MB, 在所有 Python3 提交中击败了100.00%的用户 class Codec: def __init__(self): self.tree =", "1 / \\ 2 3 / \\ 4 5 序列化为", "a single string. :type root: TreeNode :rtype: str \"\"\" if", "data[i]==',' or data[i]==']': start = end+1 end = i if", "Definition for a binary tree node. # class TreeNode(object): #", "内存消耗 :31 MB, 在所有 Python3 提交中击败了100.00%的用户 class Codec: def __init__(self):", "Python3 提交中击败了22.75%的用户 # 内存消耗 :31 MB, 在所有 Python3 提交中击败了100.00%的用户 class", "root: TreeNode :rtype: str \"\"\" if root==None: return '' tree", "print(str(self.tree)) return str(self.tree) def deserialize(self, data): \"\"\"Decodes your encoded data", "5 序列化为 \"[1,2,3,null,null,4,5]\" ''' # Definition for a binary tree", "end+1 end = i if data[start:end]!=' None': #print(start,end,data[start:end]) tree.append(int(data[start:end])) else:", "在所有 Python3 提交中击败了100.00%的用户 class Codec: def __init__(self): self.tree = []", "''' # Definition for a binary tree node. # class", "tree = [] for i in range(len(data)): if data[i]==',' or", "= [] self.tree.append(temp.val) tree.append(temp.left) tree.append(temp.right) else: self.temp.append(None) #print(\"trabylary\",self.tree) self.traByLayer(tree) def", "non-None element appears again add self.temp to self.tree def traByLayer(self,", "= data.pop(0) if temp!=None: root.left = TreeNode(temp) r.append(root.left) if data!=[]:", "List[TreeNode]): if tree==[]: return else: temp = tree.pop(0) if temp!=None:", "or data==[]: return root = r.pop(0) datalen = len(data) if", "4]' if data=='': return None start = 0 end =", "= [root] self.traByLayer(tree) print(str(self.tree)) return str(self.tree) def deserialize(self, data): \"\"\"Decodes", "#print(\"Tree\",tree,\"then build the Tree\") root = TreeNode(tree.pop(0)) self.buildTreeByList([root],tree) return root", "#print(\"trabylary\",self.tree) self.traByLayer(tree) def serialize(self, root): \"\"\"Encodes a tree to a", "your encoded data to tree. :type data: str :rtype: TreeNode", "TreeNode(temp) r.append(root.left) temp = data.pop(0) if temp!=None: root.right = TreeNode(temp)", "return None start = 0 end = 0 tree =", "root def buildTreeByList(self,r:List[TreeNode], data: List[int]): if r==[] or data==[]: return", "= None # self.right = None # 执行用时 :240 ms,", "None': #print(start,end,data[start:end]) tree.append(int(data[start:end])) else: tree.append(None) #print(\"Tree\",tree,\"then build the Tree\") root", "ms, 在所有 Python3 提交中击败了22.75%的用户 # 内存消耗 :31 MB, 在所有 Python3", "TreeNode(temp) r.append(root.right) self.buildTreeByList(r,data) # Your Codec object will be instantiated", "data): \"\"\"Decodes your encoded data to tree. :type data: str", "node. # class TreeNode(object): # def __init__(self, x): # self.val", "str \"\"\" if root==None: return '' tree = [root] self.traByLayer(tree)", "in range(len(data)): if data[i]==',' or data[i]==']': start = end+1 end", "root): \"\"\"Encodes a tree to a single string. :type root:", "object will be instantiated and called as such: # codec", "data!=[]: temp = data.pop(0) if temp!=None: root.right = TreeNode(temp) r.append(root.right)", "= [] self.temp = [] self.flag = 1 # non-None", "and called as such: # codec = Codec() # codec.deserialize(codec.serialize(root))", "return else: #print(\"root\",root.val,\"tree\",data,\"datalen\",datalen) temp = data.pop(0) if temp!=None: root.left =", "TreeNode(tree.pop(0)) self.buildTreeByList([root],tree) return root def buildTreeByList(self,r:List[TreeNode], data: List[int]): if r==[]", "data.pop(0) if temp!=None: root.left = TreeNode(temp) r.append(root.left) if data!=[]: temp", "root.right = TreeNode(temp) r.append(root.right) self.buildTreeByList(r,data) # Your Codec object will", "2, 3, 1, 3, 2, 4]' if data=='': return None", "tree.append(None) #print(\"Tree\",tree,\"then build the Tree\") root = TreeNode(tree.pop(0)) self.buildTreeByList([root],tree) return", "str :rtype: TreeNode \"\"\" #data = '[1, 2, 3, 1,", "Your Codec object will be instantiated and called as such:", "for i in range(len(data)): if data[i]==',' or data[i]==']': start =", "4 5 序列化为 \"[1,2,3,null,null,4,5]\" ''' # Definition for a binary", "return '' tree = [root] self.traByLayer(tree) print(str(self.tree)) return str(self.tree) def", "= data.pop(0) if temp!=None: root.left = TreeNode(temp) r.append(root.left) temp =", "/ \\ 4 5 序列化为 \"[1,2,3,null,null,4,5]\" ''' # Definition for", "return else: temp = tree.pop(0) if temp!=None: self.tree+=self.temp self.temp =", "3, 1, 3, 2, 4]' if data=='': return None start", "r.append(root.left) if data!=[]: temp = data.pop(0) if temp!=None: root.right =", "= TreeNode(temp) r.append(root.left) temp = data.pop(0) if temp!=None: root.right =", "Codec object will be instantiated and called as such: #", "序列化为 \"[1,2,3,null,null,4,5]\" ''' # Definition for a binary tree node.", "tree.append(int(data[start:end])) else: tree.append(None) #print(\"Tree\",tree,\"then build the Tree\") root = TreeNode(tree.pop(0))", ":rtype: str \"\"\" if root==None: return '' tree = [root]", "# Definition for a binary tree node. # class TreeNode(object):", "datalen<=2: #print(\"root\",root.val,\"tree\",data,\"datalen\",datalen) temp = data.pop(0) if temp!=None: root.left = TreeNode(temp)", "if temp!=None: self.tree+=self.temp self.temp = [] self.tree.append(temp.val) tree.append(temp.left) tree.append(temp.right) else:", "None # 执行用时 :240 ms, 在所有 Python3 提交中击败了22.75%的用户 # 内存消耗", "__init__(self, x): # self.val = x # self.left = None", "temp!=None: self.tree+=self.temp self.temp = [] self.tree.append(temp.val) tree.append(temp.left) tree.append(temp.right) else: self.temp.append(None)", "请实现两个函数,分别用来序列化和反序列化二叉树。 示例: 你可以将以下二叉树: 1 / \\ 2 3 / \\", "# 执行用时 :240 ms, 在所有 Python3 提交中击败了22.75%的用户 # 内存消耗 :31", "self.left = None # self.right = None # 执行用时 :240", "start = end+1 end = i if data[start:end]!=' None': #print(start,end,data[start:end])", "i if data[start:end]!=' None': #print(start,end,data[start:end]) tree.append(int(data[start:end])) else: tree.append(None) #print(\"Tree\",tree,\"then build", "self.temp = [] self.tree.append(temp.val) tree.append(temp.left) tree.append(temp.right) else: self.temp.append(None) #print(\"trabylary\",self.tree) self.traByLayer(tree)", "root==None: return '' tree = [root] self.traByLayer(tree) print(str(self.tree)) return str(self.tree)", "data: List[int]): if r==[] or data==[]: return root = r.pop(0)", "2 3 / \\ 4 5 序列化为 \"[1,2,3,null,null,4,5]\" ''' #", "r.append(root.left) temp = data.pop(0) if temp!=None: root.right = TreeNode(temp) r.append(root.right)", "single string. :type root: TreeNode :rtype: str \"\"\" if root==None:", "if temp!=None: root.left = TreeNode(temp) r.append(root.left) temp = data.pop(0) if", "root.left = TreeNode(temp) r.append(root.left) if data!=[]: temp = data.pop(0) if", "elif datalen<=2: #print(\"root\",root.val,\"tree\",data,\"datalen\",datalen) temp = data.pop(0) if temp!=None: root.left =", "= [] self.flag = 1 # non-None element appears again", ":type data: str :rtype: TreeNode \"\"\" #data = '[1, 2,", "root.right = TreeNode(temp) r.append(root.right) return else: #print(\"root\",root.val,\"tree\",data,\"datalen\",datalen) temp = data.pop(0)", "self.tree+=self.temp self.temp = [] self.tree.append(temp.val) tree.append(temp.left) tree.append(temp.right) else: self.temp.append(None) #print(\"trabylary\",self.tree)", "class Codec: def __init__(self): self.tree = [] self.temp = []", "tree.pop(0) if temp!=None: self.tree+=self.temp self.temp = [] self.tree.append(temp.val) tree.append(temp.left) tree.append(temp.right)", "= tree.pop(0) if temp!=None: self.tree+=self.temp self.temp = [] self.tree.append(temp.val) tree.append(temp.left)", "\"[1,2,3,null,null,4,5]\" ''' # Definition for a binary tree node. #", "3 / \\ 4 5 序列化为 \"[1,2,3,null,null,4,5]\" ''' # Definition", "a binary tree node. # class TreeNode(object): # def __init__(self,", "string. :type root: TreeNode :rtype: str \"\"\" if root==None: return", "len(data) if datalen==0: return elif datalen<=2: #print(\"root\",root.val,\"tree\",data,\"datalen\",datalen) temp = data.pop(0)", "return root def buildTreeByList(self,r:List[TreeNode], data: List[int]): if r==[] or data==[]:", "= 1 # non-None element appears again add self.temp to", "tree = [root] self.traByLayer(tree) print(str(self.tree)) return str(self.tree) def deserialize(self, data):", ":rtype: TreeNode \"\"\" #data = '[1, 2, 3, 1, 3,", "traByLayer(self, tree: List[TreeNode]): if tree==[]: return else: temp = tree.pop(0)", "temp!=None: root.right = TreeNode(temp) r.append(root.right) self.buildTreeByList(r,data) # Your Codec object", "= 0 end = 0 tree = [] for i", "= data.pop(0) if temp!=None: root.right = TreeNode(temp) r.append(root.right) self.buildTreeByList(r,data) #", ":31 MB, 在所有 Python3 提交中击败了100.00%的用户 class Codec: def __init__(self): self.tree", "#print(start,end,data[start:end]) tree.append(int(data[start:end])) else: tree.append(None) #print(\"Tree\",tree,\"then build the Tree\") root =", "0 end = 0 tree = [] for i in", "= TreeNode(temp) r.append(root.left) if data!=[]: temp = data.pop(0) if temp!=None:", "None start = 0 end = 0 tree = []", "\"\"\"Encodes a tree to a single string. :type root: TreeNode", "#print(\"root\",root.val,\"tree\",data,\"datalen\",datalen) temp = data.pop(0) if temp!=None: root.left = TreeNode(temp) r.append(root.left)", "self.tree.append(temp.val) tree.append(temp.left) tree.append(temp.right) else: self.temp.append(None) #print(\"trabylary\",self.tree) self.traByLayer(tree) def serialize(self, root):", "temp!=None: root.left = TreeNode(temp) r.append(root.left) if data!=[]: temp = data.pop(0)", "root.left = TreeNode(temp) r.append(root.left) temp = data.pop(0) if temp!=None: root.right", "Codec: def __init__(self): self.tree = [] self.temp = [] self.flag", "= TreeNode(tree.pop(0)) self.buildTreeByList([root],tree) return root def buildTreeByList(self,r:List[TreeNode], data: List[int]): if", "data=='': return None start = 0 end = 0 tree", "\\ 2 3 / \\ 4 5 序列化为 \"[1,2,3,null,null,4,5]\" '''", "element appears again add self.temp to self.tree def traByLayer(self, tree:", "[] self.tree.append(temp.val) tree.append(temp.left) tree.append(temp.right) else: self.temp.append(None) #print(\"trabylary\",self.tree) self.traByLayer(tree) def serialize(self,", "else: tree.append(None) #print(\"Tree\",tree,\"then build the Tree\") root = TreeNode(tree.pop(0)) self.buildTreeByList([root],tree)", "temp = data.pop(0) if temp!=None: root.right = TreeNode(temp) r.append(root.right) self.buildTreeByList(r,data)", "= TreeNode(temp) r.append(root.right) self.buildTreeByList(r,data) # Your Codec object will be", "[] self.temp = [] self.flag = 1 # non-None element", "datalen = len(data) if datalen==0: return elif datalen<=2: #print(\"root\",root.val,\"tree\",data,\"datalen\",datalen) temp", "if temp!=None: root.right = TreeNode(temp) r.append(root.right) self.buildTreeByList(r,data) # Your Codec", "if data[start:end]!=' None': #print(start,end,data[start:end]) tree.append(int(data[start:end])) else: tree.append(None) #print(\"Tree\",tree,\"then build the", "[] self.flag = 1 # non-None element appears again add", ":type root: TreeNode :rtype: str \"\"\" if root==None: return ''", "x # self.left = None # self.right = None #", "= None # 执行用时 :240 ms, 在所有 Python3 提交中击败了22.75%的用户 #", ":240 ms, 在所有 Python3 提交中击败了22.75%的用户 # 内存消耗 :31 MB, 在所有", "执行用时 :240 ms, 在所有 Python3 提交中击败了22.75%的用户 # 内存消耗 :31 MB,", "序列化二叉树 请实现两个函数,分别用来序列化和反序列化二叉树。 示例: 你可以将以下二叉树: 1 / \\ 2 3 /", "提交中击败了100.00%的用户 class Codec: def __init__(self): self.tree = [] self.temp =", "= x # self.left = None # self.right = None", "= i if data[start:end]!=' None': #print(start,end,data[start:end]) tree.append(int(data[start:end])) else: tree.append(None) #print(\"Tree\",tree,\"then", "# self.left = None # self.right = None # 执行用时", "self.tree = [] self.temp = [] self.flag = 1 #", "tree==[]: return else: temp = tree.pop(0) if temp!=None: self.tree+=self.temp self.temp", "if data=='': return None start = 0 end = 0", "to a single string. :type root: TreeNode :rtype: str \"\"\"", "temp = tree.pop(0) if temp!=None: self.tree+=self.temp self.temp = [] self.tree.append(temp.val)", "if temp!=None: root.left = TreeNode(temp) r.append(root.left) if data!=[]: temp =", "<reponame>FreesiaLikesPomelo/-offer ''' 面试题37. 序列化二叉树 请实现两个函数,分别用来序列化和反序列化二叉树。 示例: 你可以将以下二叉树: 1 / \\", "1, 3, 2, 4]' if data=='': return None start =", "面试题37. 序列化二叉树 请实现两个函数,分别用来序列化和反序列化二叉树。 示例: 你可以将以下二叉树: 1 / \\ 2 3", "示例: 你可以将以下二叉树: 1 / \\ 2 3 / \\ 4", "encoded data to tree. :type data: str :rtype: TreeNode \"\"\"", "self.flag = 1 # non-None element appears again add self.temp", "r.pop(0) datalen = len(data) if datalen==0: return elif datalen<=2: #print(\"root\",root.val,\"tree\",data,\"datalen\",datalen)", "\\ 4 5 序列化为 \"[1,2,3,null,null,4,5]\" ''' # Definition for a", "在所有 Python3 提交中击败了22.75%的用户 # 内存消耗 :31 MB, 在所有 Python3 提交中击败了100.00%的用户" ]
[ "\"mandatory\": ( \"dimensions\", ), \"validators\": { 1: IPUZ_WORDSEARCH_VALIDATORS, }, },", "\"http://ipuz.org/fill\": { \"mandatory\": (), \"validators\": { 1: IPUZ_FILL_VALIDATORS, }, },", "\"puzzle\", ), \"validators\": { 1: IPUZ_CROSSWORD_VALIDATORS, }, }, \"http://ipuz.org/fill\": {", "}, }, \"http://ipuz.org/crossword\": { \"mandatory\": ( \"dimensions\", \"puzzle\", ), \"validators\":", ".crossword import IPUZ_CROSSWORD_VALIDATORS from .fill import IPUZ_FILL_VALIDATORS from .sudoku import", "IPUZ_CROSSWORD_VALIDATORS, }, }, \"http://ipuz.org/fill\": { \"mandatory\": (), \"validators\": { 1:", "\"puzzle\", ), \"validators\": { 1: IPUZ_ACROSTIC_VALIDATORS, }, }, \"http://ipuz.org/answer\": {", "{ 1: IPUZ_CROSSWORD_VALIDATORS, }, }, \"http://ipuz.org/fill\": { \"mandatory\": (), \"validators\":", "}, }, \"http://ipuz.org/sudoku\": { \"mandatory\": ( \"puzzle\", ), \"validators\": {", ".fill import IPUZ_FILL_VALIDATORS from .sudoku import IPUZ_SUDOKU_VALIDATORS from .wordsearch import", "}, }, \"http://ipuz.org/fill\": { \"mandatory\": (), \"validators\": { 1: IPUZ_FILL_VALIDATORS,", "1: IPUZ_BLOCK_VALIDATORS, }, }, \"http://ipuz.org/crossword\": { \"mandatory\": ( \"dimensions\", \"puzzle\",", "{ 1: IPUZ_FILL_VALIDATORS, }, }, \"http://ipuz.org/sudoku\": { \"mandatory\": ( \"puzzle\",", "\"mandatory\": ( \"dimensions\", ), \"validators\": { 1: IPUZ_BLOCK_VALIDATORS, }, },", "import IPUZ_CROSSWORD_VALIDATORS from .fill import IPUZ_FILL_VALIDATORS from .sudoku import IPUZ_SUDOKU_VALIDATORS", "import IPUZ_WORDSEARCH_VALIDATORS IPUZ_PUZZLEKINDS = { \"http://ipuz.org/acrostic\": { \"mandatory\": ( \"puzzle\",", "{ 1: IPUZ_ANSWER_VALIDATORS, }, }, \"http://ipuz.org/block\": { \"mandatory\": ( \"dimensions\",", "IPUZ_BLOCK_VALIDATORS, }, }, \"http://ipuz.org/crossword\": { \"mandatory\": ( \"dimensions\", \"puzzle\", ),", "( \"puzzle\", ), \"validators\": { 1: IPUZ_SUDOKU_VALIDATORS, }, }, \"http://ipuz.org/wordsearch\":", "\"validators\": { 1: IPUZ_CROSSWORD_VALIDATORS, }, }, \"http://ipuz.org/fill\": { \"mandatory\": (),", "IPUZ_SUDOKU_VALIDATORS, }, }, \"http://ipuz.org/wordsearch\": { \"mandatory\": ( \"dimensions\", ), \"validators\":", "\"http://ipuz.org/crossword\": { \"mandatory\": ( \"dimensions\", \"puzzle\", ), \"validators\": { 1:", "1: IPUZ_FILL_VALIDATORS, }, }, \"http://ipuz.org/sudoku\": { \"mandatory\": ( \"puzzle\", ),", "from .fill import IPUZ_FILL_VALIDATORS from .sudoku import IPUZ_SUDOKU_VALIDATORS from .wordsearch", "1: IPUZ_ACROSTIC_VALIDATORS, }, }, \"http://ipuz.org/answer\": { \"mandatory\": (), \"validators\": {", "\"mandatory\": ( \"dimensions\", \"puzzle\", ), \"validators\": { 1: IPUZ_CROSSWORD_VALIDATORS, },", "1: IPUZ_SUDOKU_VALIDATORS, }, }, \"http://ipuz.org/wordsearch\": { \"mandatory\": ( \"dimensions\", ),", "\"mandatory\": ( \"puzzle\", ), \"validators\": { 1: IPUZ_ACROSTIC_VALIDATORS, }, },", "}, \"http://ipuz.org/fill\": { \"mandatory\": (), \"validators\": { 1: IPUZ_FILL_VALIDATORS, },", "IPUZ_SUDOKU_VALIDATORS from .wordsearch import IPUZ_WORDSEARCH_VALIDATORS IPUZ_PUZZLEKINDS = { \"http://ipuz.org/acrostic\": {", ".answer import IPUZ_ANSWER_VALIDATORS from .block import IPUZ_BLOCK_VALIDATORS from .crossword import", "1: IPUZ_ANSWER_VALIDATORS, }, }, \"http://ipuz.org/block\": { \"mandatory\": ( \"dimensions\", ),", "}, \"http://ipuz.org/sudoku\": { \"mandatory\": ( \"puzzle\", ), \"validators\": { 1:", "( \"puzzle\", ), \"validators\": { 1: IPUZ_ACROSTIC_VALIDATORS, }, }, \"http://ipuz.org/answer\":", "from .answer import IPUZ_ANSWER_VALIDATORS from .block import IPUZ_BLOCK_VALIDATORS from .crossword", "), \"validators\": { 1: IPUZ_CROSSWORD_VALIDATORS, }, }, \"http://ipuz.org/fill\": { \"mandatory\":", "from .crossword import IPUZ_CROSSWORD_VALIDATORS from .fill import IPUZ_FILL_VALIDATORS from .sudoku", "(), \"validators\": { 1: IPUZ_ANSWER_VALIDATORS, }, }, \"http://ipuz.org/block\": { \"mandatory\":", "\"dimensions\", ), \"validators\": { 1: IPUZ_BLOCK_VALIDATORS, }, }, \"http://ipuz.org/crossword\": {", "{ \"mandatory\": (), \"validators\": { 1: IPUZ_FILL_VALIDATORS, }, }, \"http://ipuz.org/sudoku\":", ".sudoku import IPUZ_SUDOKU_VALIDATORS from .wordsearch import IPUZ_WORDSEARCH_VALIDATORS IPUZ_PUZZLEKINDS = {", "\"http://ipuz.org/sudoku\": { \"mandatory\": ( \"puzzle\", ), \"validators\": { 1: IPUZ_SUDOKU_VALIDATORS,", "( \"dimensions\", \"puzzle\", ), \"validators\": { 1: IPUZ_CROSSWORD_VALIDATORS, }, },", "import IPUZ_BLOCK_VALIDATORS from .crossword import IPUZ_CROSSWORD_VALIDATORS from .fill import IPUZ_FILL_VALIDATORS", "IPUZ_FILL_VALIDATORS, }, }, \"http://ipuz.org/sudoku\": { \"mandatory\": ( \"puzzle\", ), \"validators\":", "{ \"mandatory\": (), \"validators\": { 1: IPUZ_ANSWER_VALIDATORS, }, }, \"http://ipuz.org/block\":", "}, }, \"http://ipuz.org/block\": { \"mandatory\": ( \"dimensions\", ), \"validators\": {", "from .wordsearch import IPUZ_WORDSEARCH_VALIDATORS IPUZ_PUZZLEKINDS = { \"http://ipuz.org/acrostic\": { \"mandatory\":", "), \"validators\": { 1: IPUZ_SUDOKU_VALIDATORS, }, }, \"http://ipuz.org/wordsearch\": { \"mandatory\":", "{ 1: IPUZ_ACROSTIC_VALIDATORS, }, }, \"http://ipuz.org/answer\": { \"mandatory\": (), \"validators\":", "), \"validators\": { 1: IPUZ_ACROSTIC_VALIDATORS, }, }, \"http://ipuz.org/answer\": { \"mandatory\":", "}, \"http://ipuz.org/block\": { \"mandatory\": ( \"dimensions\", ), \"validators\": { 1:", "IPUZ_CROSSWORD_VALIDATORS from .fill import IPUZ_FILL_VALIDATORS from .sudoku import IPUZ_SUDOKU_VALIDATORS from", "{ 1: IPUZ_SUDOKU_VALIDATORS, }, }, \"http://ipuz.org/wordsearch\": { \"mandatory\": ( \"dimensions\",", "\"validators\": { 1: IPUZ_FILL_VALIDATORS, }, }, \"http://ipuz.org/sudoku\": { \"mandatory\": (", "{ \"mandatory\": ( \"dimensions\", \"puzzle\", ), \"validators\": { 1: IPUZ_CROSSWORD_VALIDATORS,", "\"validators\": { 1: IPUZ_ACROSTIC_VALIDATORS, }, }, \"http://ipuz.org/answer\": { \"mandatory\": (),", "IPUZ_ACROSTIC_VALIDATORS from .answer import IPUZ_ANSWER_VALIDATORS from .block import IPUZ_BLOCK_VALIDATORS from", "1: IPUZ_CROSSWORD_VALIDATORS, }, }, \"http://ipuz.org/fill\": { \"mandatory\": (), \"validators\": {", "\"puzzle\", ), \"validators\": { 1: IPUZ_SUDOKU_VALIDATORS, }, }, \"http://ipuz.org/wordsearch\": {", "IPUZ_BLOCK_VALIDATORS from .crossword import IPUZ_CROSSWORD_VALIDATORS from .fill import IPUZ_FILL_VALIDATORS from", "<gh_stars>1-10 from .acrostic import IPUZ_ACROSTIC_VALIDATORS from .answer import IPUZ_ANSWER_VALIDATORS from", "IPUZ_ANSWER_VALIDATORS, }, }, \"http://ipuz.org/block\": { \"mandatory\": ( \"dimensions\", ), \"validators\":", "import IPUZ_FILL_VALIDATORS from .sudoku import IPUZ_SUDOKU_VALIDATORS from .wordsearch import IPUZ_WORDSEARCH_VALIDATORS", "\"validators\": { 1: IPUZ_ANSWER_VALIDATORS, }, }, \"http://ipuz.org/block\": { \"mandatory\": (", "}, }, \"http://ipuz.org/wordsearch\": { \"mandatory\": ( \"dimensions\", ), \"validators\": {", "IPUZ_ACROSTIC_VALIDATORS, }, }, \"http://ipuz.org/answer\": { \"mandatory\": (), \"validators\": { 1:", "IPUZ_WORDSEARCH_VALIDATORS IPUZ_PUZZLEKINDS = { \"http://ipuz.org/acrostic\": { \"mandatory\": ( \"puzzle\", ),", "import IPUZ_ANSWER_VALIDATORS from .block import IPUZ_BLOCK_VALIDATORS from .crossword import IPUZ_CROSSWORD_VALIDATORS", ".wordsearch import IPUZ_WORDSEARCH_VALIDATORS IPUZ_PUZZLEKINDS = { \"http://ipuz.org/acrostic\": { \"mandatory\": (", "\"mandatory\": ( \"puzzle\", ), \"validators\": { 1: IPUZ_SUDOKU_VALIDATORS, }, },", "from .acrostic import IPUZ_ACROSTIC_VALIDATORS from .answer import IPUZ_ANSWER_VALIDATORS from .block", "\"http://ipuz.org/block\": { \"mandatory\": ( \"dimensions\", ), \"validators\": { 1: IPUZ_BLOCK_VALIDATORS,", "IPUZ_FILL_VALIDATORS from .sudoku import IPUZ_SUDOKU_VALIDATORS from .wordsearch import IPUZ_WORDSEARCH_VALIDATORS IPUZ_PUZZLEKINDS", "import IPUZ_ACROSTIC_VALIDATORS from .answer import IPUZ_ANSWER_VALIDATORS from .block import IPUZ_BLOCK_VALIDATORS", "\"http://ipuz.org/acrostic\": { \"mandatory\": ( \"puzzle\", ), \"validators\": { 1: IPUZ_ACROSTIC_VALIDATORS,", "\"http://ipuz.org/wordsearch\": { \"mandatory\": ( \"dimensions\", ), \"validators\": { 1: IPUZ_WORDSEARCH_VALIDATORS,", "{ \"mandatory\": ( \"dimensions\", ), \"validators\": { 1: IPUZ_BLOCK_VALIDATORS, },", "(), \"validators\": { 1: IPUZ_FILL_VALIDATORS, }, }, \"http://ipuz.org/sudoku\": { \"mandatory\":", "from .block import IPUZ_BLOCK_VALIDATORS from .crossword import IPUZ_CROSSWORD_VALIDATORS from .fill", "\"http://ipuz.org/answer\": { \"mandatory\": (), \"validators\": { 1: IPUZ_ANSWER_VALIDATORS, }, },", "{ \"mandatory\": ( \"puzzle\", ), \"validators\": { 1: IPUZ_SUDOKU_VALIDATORS, },", ".block import IPUZ_BLOCK_VALIDATORS from .crossword import IPUZ_CROSSWORD_VALIDATORS from .fill import", "IPUZ_ANSWER_VALIDATORS from .block import IPUZ_BLOCK_VALIDATORS from .crossword import IPUZ_CROSSWORD_VALIDATORS from", "{ \"mandatory\": ( \"puzzle\", ), \"validators\": { 1: IPUZ_ACROSTIC_VALIDATORS, },", "\"mandatory\": (), \"validators\": { 1: IPUZ_ANSWER_VALIDATORS, }, }, \"http://ipuz.org/block\": {", "}, \"http://ipuz.org/crossword\": { \"mandatory\": ( \"dimensions\", \"puzzle\", ), \"validators\": {", "( \"dimensions\", ), \"validators\": { 1: IPUZ_BLOCK_VALIDATORS, }, }, \"http://ipuz.org/crossword\":", "}, \"http://ipuz.org/wordsearch\": { \"mandatory\": ( \"dimensions\", ), \"validators\": { 1:", "import IPUZ_SUDOKU_VALIDATORS from .wordsearch import IPUZ_WORDSEARCH_VALIDATORS IPUZ_PUZZLEKINDS = { \"http://ipuz.org/acrostic\":", "( \"dimensions\", ), \"validators\": { 1: IPUZ_WORDSEARCH_VALIDATORS, }, }, }", "}, }, \"http://ipuz.org/answer\": { \"mandatory\": (), \"validators\": { 1: IPUZ_ANSWER_VALIDATORS,", "\"dimensions\", \"puzzle\", ), \"validators\": { 1: IPUZ_CROSSWORD_VALIDATORS, }, }, \"http://ipuz.org/fill\":", ".acrostic import IPUZ_ACROSTIC_VALIDATORS from .answer import IPUZ_ANSWER_VALIDATORS from .block import", "), \"validators\": { 1: IPUZ_BLOCK_VALIDATORS, }, }, \"http://ipuz.org/crossword\": { \"mandatory\":", "{ \"mandatory\": ( \"dimensions\", ), \"validators\": { 1: IPUZ_WORDSEARCH_VALIDATORS, },", "IPUZ_PUZZLEKINDS = { \"http://ipuz.org/acrostic\": { \"mandatory\": ( \"puzzle\", ), \"validators\":", "}, \"http://ipuz.org/answer\": { \"mandatory\": (), \"validators\": { 1: IPUZ_ANSWER_VALIDATORS, },", "\"validators\": { 1: IPUZ_SUDOKU_VALIDATORS, }, }, \"http://ipuz.org/wordsearch\": { \"mandatory\": (", "= { \"http://ipuz.org/acrostic\": { \"mandatory\": ( \"puzzle\", ), \"validators\": {", "from .sudoku import IPUZ_SUDOKU_VALIDATORS from .wordsearch import IPUZ_WORDSEARCH_VALIDATORS IPUZ_PUZZLEKINDS =", "{ 1: IPUZ_BLOCK_VALIDATORS, }, }, \"http://ipuz.org/crossword\": { \"mandatory\": ( \"dimensions\",", "\"mandatory\": (), \"validators\": { 1: IPUZ_FILL_VALIDATORS, }, }, \"http://ipuz.org/sudoku\": {", "{ \"http://ipuz.org/acrostic\": { \"mandatory\": ( \"puzzle\", ), \"validators\": { 1:", "\"validators\": { 1: IPUZ_BLOCK_VALIDATORS, }, }, \"http://ipuz.org/crossword\": { \"mandatory\": (" ]
[ "request.get_json() data[\"id\"] = user_id schema = UserSchema(view=\"admin\", instance=user, partial=True) response", "UserPublicFails(Resource): @check_account_visibility @check_score_visibility def get(self, user_id): user = Users.query.filter_by(id=user_id).first_or_404() if", "count}} @users_namespace.route(\"/me/awards\") @users_namespace.param(\"user_id\", \"User ID\") class UserPrivateAwards(Resource): @authed_only def get(self):", "ID\") class UserPublic(Resource): @check_account_visibility def get(self, user_id): user = Users.query.filter_by(id=user_id).first_or_404()", "\"data\": response.data} @users_namespace.route(\"/<user_id>/solves\") @users_namespace.param(\"user_id\", \"User ID\") class UserPublicSolves(Resource): @check_account_visibility @check_score_visibility", "UserPublicSolves(Resource): @check_account_visibility @check_score_visibility def get(self, user_id): user = Users.query.filter_by(id=user_id).first_or_404() if", "\"meta\": {\"count\": count}} @users_namespace.route(\"/me/awards\") @users_namespace.param(\"user_id\", \"User ID\") class UserPrivateAwards(Resource): @authed_only", "password = <PASSWORD>\") clear_standings() response = schema.dump(response.data) return {\"success\": True,", "response.errors: return {\"success\": False, \"errors\": response.errors}, 400 # return {\"success\":", "@users_namespace.route(\"/<user_id>/fails\") @users_namespace.param(\"user_id\", \"User ID\") class UserPublicFails(Resource): @check_account_visibility @check_score_visibility def get(self,", "len(response.data) return {\"success\": True, \"data\": data, \"meta\": {\"count\": count}} @users_namespace.route(\"/me/awards\")", "UserSchema(view=\"self\", instance=user, partial=True) response = schema.load(data) if response.errors: return {\"success\":", "400 db.session.add(response.data) db.session.commit() if request.args.get(\"notify\"): name = response.data.name password =", "Solves.query.filter_by(user_id=user_id).delete() Tracking.query.filter_by(user_id=user_id).delete() Users.query.filter_by(id=user_id).delete() db.session.commit() db.session.close() clear_standings() return {\"success\": True} @users_namespace.route(\"/me\")", "\"user\")).dump(user) if response.errors: return {\"success\": False, \"errors\": response.errors}, 400 response.data[\"place\"]", "CTFd.schemas.users import UserSchema users_namespace = Namespace(\"users\", description=\"Endpoint to retrieve Users\")", "and is_admin() is False: abort(404) fails = user.get_fails(admin=is_admin()) view =", "UserPublic(Resource): @check_account_visibility def get(self, user_id): user = Users.query.filter_by(id=user_id).first_or_404() if (user.banned", "from flask_restplus import Namespace, Resource from CTFd.models import ( db,", "True, \"data\": response} @authed_only def patch(self): user = get_current_user() data", "Notifications, ) from CTFd.utils.decorators import authed_only, admins_only, ratelimit from CTFd.cache", "response.errors}, 400 db.session.add(response.data) db.session.commit() if request.args.get(\"notify\"): name = response.data.name password", "Awards.query.filter_by(user_id=user_id).delete() Unlocks.query.filter_by(user_id=user_id).delete() Submissions.query.filter_by(user_id=user_id).delete() Solves.query.filter_by(user_id=user_id).delete() Tracking.query.filter_by(user_id=user_id).delete() Users.query.filter_by(id=user_id).delete() db.session.commit() db.session.close() clear_standings() return", "Solves, Awards, Tracking, Unlocks, Submissions, Notifications, ) from CTFd.utils.decorators import", "return {\"success\": True, \"data\": response} @authed_only def patch(self): user =", "schema.load(req) if response.errors: return {\"success\": False, \"errors\": response.errors}, 400 db.session.add(response.data)", "if response.errors: return {\"success\": False, \"errors\": response.errors}, 400 # return", "= request.get_json() schema = UserSchema(\"admin\") response = schema.load(req) if response.errors:", "def get(self): user = get_current_user() solves = user.get_solves(admin=True) view =", "request.get_json() schema = UserSchema(\"admin\") response = schema.load(req) if response.errors: return", "return {\"success\": True, \"data\": response.data} return {\"success\": True, \"data\": None}", "response.data} @admins_only def post(self): req = request.get_json() schema = UserSchema(\"admin\")", "if (user.banned or user.hidden) and is_admin() is False: abort(404) response", "True, \"data\": data, \"meta\": {\"count\": count}} return {\"success\": True, \"data\":", "schema.dump(response.data) db.session.close() clear_standings() return {\"success\": True, \"data\": response} @admins_only def", "session, request, abort from flask_restplus import Namespace, Resource from CTFd.models", "\"data\": response.data} @admins_only def post(self): req = request.get_json() schema =", "response = schema.load(req) if response.errors: return {\"success\": False, \"errors\": response.errors},", "check_account_visibility, check_score_visibility, ) from CTFd.schemas.submissions import SubmissionSchema from CTFd.schemas.awards import", "class UserPrivateSolves(Resource): @authed_only def get(self): user = get_current_user() solves =", "= Namespace(\"users\", description=\"Endpoint to retrieve Users\") @users_namespace.route(\"\") class UserList(Resource): @check_account_visibility", "{\"success\": True, \"data\": response} @authed_only def patch(self): user = get_current_user()", "response.errors}, 400 if is_admin(): data = response.data else: data =", "else \"admin\" response = SubmissionSchema(view=view, many=True).dump(solves) if response.errors: return {\"success\":", "fails = user.get_fails(admin=is_admin()) view = \"user\" if not is_admin() else", "abort(404) fails = user.get_fails(admin=is_admin()) view = \"user\" if not is_admin()", "response.data else: data = [] count = len(response.data) return {\"success\":", "response = schema.dump(response.data) db.session.close() clear_standings() return {\"success\": True, \"data\": response}", "response.data} @users_namespace.route(\"/<int:user_id>\") @users_namespace.param(\"user_id\", \"User ID\") class UserPublic(Resource): @check_account_visibility def get(self,", "users_namespace = Namespace(\"users\", description=\"Endpoint to retrieve Users\") @users_namespace.route(\"\") class UserList(Resource):", "user.get_awards(admin=is_admin()) view = \"user\" if not is_admin() else \"admin\" response", "get_current_user() data = request.get_json() schema = UserSchema(view=\"self\", instance=user, partial=True) response", "response = UserSchema(view=session.get(\"type\", \"user\")).dump(user) if response.errors: return {\"success\": False, \"errors\":", "Unlocks, Submissions, Notifications, ) from CTFd.utils.decorators import authed_only, admins_only, ratelimit", "len(response.data) # return {\"success\": True, \"data\": data, \"meta\": {\"count\": count}}", "is_admin() is False: abort(404) solves = user.get_solves(admin=is_admin()) view = \"user\"", "def delete(self, user_id): Notifications.query.filter_by(user_id=user_id).delete() Awards.query.filter_by(user_id=user_id).delete() Unlocks.query.filter_by(user_id=user_id).delete() Submissions.query.filter_by(user_id=user_id).delete() Solves.query.filter_by(user_id=user_id).delete() Tracking.query.filter_by(user_id=user_id).delete() Users.query.filter_by(id=user_id).delete()", "get(self, user_id): user = Users.query.filter_by(id=user_id).first_or_404() if (user.banned or user.hidden) and", "= get_current_user() solves = user.get_solves(admin=True) view = \"user\" if not", "is_admin() else \"admin\" response = SubmissionSchema(view=view, many=True).dump(solves) if response.errors: return", "if response.errors: return {\"success\": False, \"errors\": response.errors}, 400 db.session.commit() response", "response.errors: return {\"success\": False, \"errors\": response.errors}, 400 response.data[\"place\"] = user.place", "{\"success\": True, \"data\": response.data} @users_namespace.route(\"/me/solves\") class UserPrivateSolves(Resource): @authed_only def get(self):", "import clear_standings from CTFd.utils.user import get_current_user, is_admin from CTFd.utils.decorators.visibility import", "= user.get_fails(admin=is_admin()) view = \"user\" if not is_admin() else \"admin\"", "many=True).dump(solves) if response.errors: return {\"success\": False, \"errors\": response.errors}, 400 return", "is False: abort(404) response = UserSchema(view=session.get(\"type\", \"user\")).dump(user) if response.errors: return", "if not is_admin() else \"admin\" response = SubmissionSchema(view=view, many=True).dump(fails) if", "user.score return {\"success\": True, \"data\": response} @authed_only def patch(self): user", "db, Users, Solves, Awards, Tracking, Unlocks, Submissions, Notifications, ) from", "UserPrivateAwards(Resource): @authed_only def get(self): user = get_current_user() awards = user.get_awards(admin=True)", "False: abort(404) response = UserSchema(view=session.get(\"type\", \"user\")).dump(user) if response.errors: return {\"success\":", "\"data\": response.data} @users_namespace.route(\"/<int:user_id>\") @users_namespace.param(\"user_id\", \"User ID\") class UserPublic(Resource): @check_account_visibility def", "data, \"meta\": {\"count\": count}} return {\"success\": True, \"data\": None, \"meta\":", "class UserPublicSolves(Resource): @check_account_visibility @check_score_visibility def get(self, user_id): user = Users.query.filter_by(id=user_id).first_or_404()", "class UserPublicAwards(Resource): @check_account_visibility @check_score_visibility def get(self, user_id): user = Users.query.filter_by(id=user_id).first_or_404()", "400 response.data[\"place\"] = user.place response.data[\"score\"] = user.score return {\"success\": True,", "{\"success\": False, \"errors\": response.errors}, 400 db.session.commit() response = schema.dump(response.data) db.session.close()", "from CTFd.models import ( db, Users, Solves, Awards, Tracking, Unlocks,", "user = get_current_user() awards = user.get_awards(admin=True) view = \"user\" if", "count = len(response.data) return {\"success\": True, \"data\": data, \"meta\": {\"count\":", "@authed_only def get(self): user = get_current_user() fails = user.get_fails(admin=True) view", "{\"success\": False, \"errors\": response.errors}, 400 return {\"success\": True, \"data\": response.data}", "@users_namespace.route(\"/me/solves\") class UserPrivateSolves(Resource): @authed_only def get(self): user = get_current_user() solves", "import UserSchema users_namespace = Namespace(\"users\", description=\"Endpoint to retrieve Users\") @users_namespace.route(\"\")", "many=True).dump(solves) if response.errors: return {\"success\": False, \"errors\": response.errors}, 400 #", "= get_current_user() response = UserSchema(\"self\").dump(user).data response[\"place\"] = user.place response[\"score\"] =", "UserSchema(\"self\").dump(user).data response[\"place\"] = user.place response[\"score\"] = user.score return {\"success\": True,", "ID\") class UserPublicFails(Resource): @check_account_visibility @check_score_visibility def get(self, user_id): user =", "import ( check_account_visibility, check_score_visibility, ) from CTFd.schemas.submissions import SubmissionSchema from", "CTFd.schemas.awards import AwardSchema from CTFd.schemas.users import UserSchema users_namespace = Namespace(\"users\",", "False, \"errors\": response.errors}, 400 response.data[\"place\"] = user.place response.data[\"score\"] = user.score", "= response.data else: data = [] count = len(response.data) #", "data = [] count = len(response.data) # return {\"success\": True,", "@users_namespace.route(\"/me\") class UserPrivate(Resource): @authed_only def get(self): user = get_current_user() response", "if request.args.get(\"notify\"): name = response.data.name password = <PASSWORD>\") clear_standings() response", "user = get_current_user() data = request.get_json() schema = UserSchema(view=\"self\", instance=user,", "return {\"success\": True, \"data\": None} @users_namespace.route(\"/<user_id>/fails\") @users_namespace.param(\"user_id\", \"User ID\") class", "{\"success\": True, \"data\": response.data} @users_namespace.route(\"/<user_id>/solves\") @users_namespace.param(\"user_id\", \"User ID\") class UserPublicSolves(Resource):", "return {\"success\": True, \"data\": response} @admins_only def delete(self, user_id): Notifications.query.filter_by(user_id=user_id).delete()", "@users_namespace.route(\"/me/awards\") @users_namespace.param(\"user_id\", \"User ID\") class UserPrivateAwards(Resource): @authed_only def get(self): user", "(user.banned or user.hidden) and is_admin() is False: abort(404) fails =", "flask_restplus import Namespace, Resource from CTFd.models import ( db, Users,", "= user.get_solves(admin=True) view = \"user\" if not is_admin() else \"admin\"", "get_current_user() solves = user.get_solves(admin=True) view = \"user\" if not is_admin()", "= UserSchema(view=\"admin\", instance=user, partial=True) response = schema.load(data) if response.errors: return", "if (user.banned or user.hidden) and is_admin() is False: abort(404) solves", "= UserSchema(view=\"self\", instance=user, partial=True) response = schema.load(data) if response.errors: return", "UserSchema(view=session.get(\"type\", \"user\")).dump(user) if response.errors: return {\"success\": False, \"errors\": response.errors}, 400", "False: abort(404) fails = user.get_fails(admin=is_admin()) view = \"user\" if not", "user = Users.query.filter_by(id=user_id).first_or_404() if (user.banned or user.hidden) and is_admin() is", "if response.errors: return {\"success\": False, \"errors\": response.errors}, 400 return {\"success\":", "\"User ID\") class UserPrivateAwards(Resource): @authed_only def get(self): user = get_current_user()", "clear_standings() return {\"success\": True, \"data\": response.data} @users_namespace.route(\"/me/solves\") class UserPrivateSolves(Resource): @authed_only", "Resource from CTFd.models import ( db, Users, Solves, Awards, Tracking,", "@users_namespace.route(\"/me/fails\") class UserPrivateFails(Resource): @authed_only def get(self): user = get_current_user() fails", "many=True).dump(fails) if response.errors: return {\"success\": False, \"errors\": response.errors}, 400 if", "class UserList(Resource): @check_account_visibility def get(self): users = Users.query.filter_by(banned=False, hidden=False) response", "is_admin() else \"admin\" response = AwardSchema(view=view, many=True).dump(awards) if response.errors: return", "class UserPublicFails(Resource): @check_account_visibility @check_score_visibility def get(self, user_id): user = Users.query.filter_by(id=user_id).first_or_404()", "def get(self, user_id): user = Users.query.filter_by(id=user_id).first_or_404() if (user.banned or user.hidden)", "awards = user.get_awards(admin=True) view = \"user\" if not is_admin() else", "= schema.load(req) if response.errors: return {\"success\": False, \"errors\": response.errors}, 400", "import session, request, abort from flask_restplus import Namespace, Resource from", "authed_only, admins_only, ratelimit from CTFd.cache import clear_standings from CTFd.utils.user import", "data[\"id\"] = user_id schema = UserSchema(view=\"admin\", instance=user, partial=True) response =", "ID\") class UserPublicSolves(Resource): @check_account_visibility @check_score_visibility def get(self, user_id): user =", "[] count = len(response.data) # return {\"success\": True, \"data\": data,", "return {\"success\": True, \"data\": None, \"meta\": {\"count\": None}} @users_namespace.route(\"/<user_id>/awards\") @users_namespace.param(\"user_id\",", "\"User ID\") class UserPublicSolves(Resource): @check_account_visibility @check_score_visibility def get(self, user_id): user", "True, \"data\": response.data} @users_namespace.route(\"/<user_id>/solves\") @users_namespace.param(\"user_id\", \"User ID\") class UserPublicSolves(Resource): @check_account_visibility", "response} @authed_only def patch(self): user = get_current_user() data = request.get_json()", "response.errors}, 400 return {\"success\": True, \"data\": response.data} @admins_only def post(self):", "response.data} return {\"success\": True, \"data\": None} @users_namespace.route(\"/<user_id>/fails\") @users_namespace.param(\"user_id\", \"User ID\")", "\"errors\": response.errors}, 400 return {\"success\": True, \"data\": response.data} @users_namespace.route(\"/<user_id>/solves\") @users_namespace.param(\"user_id\",", "user_id): user = Users.query.filter_by(id=user_id).first_or_404() if (user.banned or user.hidden) and is_admin()", "else: data = [] count = len(response.data) return {\"success\": True,", "db.session.commit() if request.args.get(\"notify\"): name = response.data.name password = <PASSWORD>\") clear_standings()", "\"data\": response} @authed_only def patch(self): user = get_current_user() data =", "data = request.get_json() data[\"id\"] = user_id schema = UserSchema(view=\"admin\", instance=user,", "response = SubmissionSchema(view=view, many=True).dump(fails) if response.errors: return {\"success\": False, \"errors\":", "response[\"score\"] = user.score return {\"success\": True, \"data\": response} @authed_only def", "count = len(response.data) # return {\"success\": True, \"data\": data, \"meta\":", "response = SubmissionSchema(view=view, many=True).dump(solves) if response.errors: return {\"success\": False, \"errors\":", "abort from flask_restplus import Namespace, Resource from CTFd.models import (", "True, \"data\": response.data} @users_namespace.route(\"/me/fails\") class UserPrivateFails(Resource): @authed_only def get(self): user", "from CTFd.schemas.submissions import SubmissionSchema from CTFd.schemas.awards import AwardSchema from CTFd.schemas.users", "class UserPrivateAwards(Resource): @authed_only def get(self): user = get_current_user() awards =", "import ( db, Users, Solves, Awards, Tracking, Unlocks, Submissions, Notifications,", "{\"success\": True, \"data\": data, \"meta\": {\"count\": count}} return {\"success\": True,", "clear_standings from CTFd.utils.user import get_current_user, is_admin from CTFd.utils.decorators.visibility import (", "= Users.query.filter_by(id=user_id).first_or_404() if (user.banned or user.hidden) and is_admin() is False:", "else \"admin\" response = SubmissionSchema(view=view, many=True).dump(fails) if response.errors: return {\"success\":", "SubmissionSchema(view=view, many=True).dump(solves) if response.errors: return {\"success\": False, \"errors\": response.errors}, 400", "400 return {\"success\": True, \"data\": response.data} @users_namespace.route(\"/me/fails\") class UserPrivateFails(Resource): @authed_only", "is_admin from CTFd.utils.decorators.visibility import ( check_account_visibility, check_score_visibility, ) from CTFd.schemas.submissions", "get_current_user() awards = user.get_awards(admin=True) view = \"user\" if not is_admin()", "\"data\": response} @admins_only def delete(self, user_id): Notifications.query.filter_by(user_id=user_id).delete() Awards.query.filter_by(user_id=user_id).delete() Unlocks.query.filter_by(user_id=user_id).delete() Submissions.query.filter_by(user_id=user_id).delete()", "import get_current_user, is_admin from CTFd.utils.decorators.visibility import ( check_account_visibility, check_score_visibility, )", "= response.data else: data = [] count = len(response.data) return", "CTFd.utils.user import get_current_user, is_admin from CTFd.utils.decorators.visibility import ( check_account_visibility, check_score_visibility,", "response = schema.dump(response.data) db.session.close() clear_standings() return {\"success\": True, \"data\": response.data}", "None, \"meta\": {\"count\": None}} @users_namespace.route(\"/<user_id>/awards\") @users_namespace.param(\"user_id\", \"User ID or 'me'\")", "False: abort(404) solves = user.get_solves(admin=is_admin()) view = \"user\" if not", "(user.banned or user.hidden) and is_admin() is False: abort(404) response =", "False, \"errors\": response.errors}, 400 return {\"success\": True, \"data\": response.data} @users_namespace.route(\"/<user_id>/solves\")", "and is_admin() is False: abort(404) solves = user.get_solves(admin=is_admin()) view =", "response.data} @users_namespace.route(\"/me/fails\") class UserPrivateFails(Resource): @authed_only def get(self): user = get_current_user()", "description=\"Endpoint to retrieve Users\") @users_namespace.route(\"\") class UserList(Resource): @check_account_visibility def get(self):", "= [] count = len(response.data) # return {\"success\": True, \"data\":", "response.errors}, 400 # return {\"success\": True, \"data\": response.data} return {\"success\":", "Users, Solves, Awards, Tracking, Unlocks, Submissions, Notifications, ) from CTFd.utils.decorators", "get(self): user = get_current_user() fails = user.get_fails(admin=True) view = \"user\"", "import authed_only, admins_only, ratelimit from CTFd.cache import clear_standings from CTFd.utils.user", "return {\"success\": True, \"data\": response.data} @users_namespace.route(\"/me/solves\") class UserPrivateSolves(Resource): @authed_only def", "False, \"errors\": response.errors}, 400 return {\"success\": True, \"data\": response.data} @admins_only", "def patch(self): user = get_current_user() data = request.get_json() schema =", "UserPrivateSolves(Resource): @authed_only def get(self): user = get_current_user() solves = user.get_solves(admin=True)", "= SubmissionSchema(view=view, many=True).dump(fails) if response.errors: return {\"success\": False, \"errors\": response.errors},", "db.session.close() clear_standings() return {\"success\": True, \"data\": response.data} @users_namespace.route(\"/me/solves\") class UserPrivateSolves(Resource):", "user.hidden) and is_admin() is False: abort(404) awards = user.get_awards(admin=is_admin()) view", "@users_namespace.param(\"user_id\", \"User ID\") class UserPublicSolves(Resource): @check_account_visibility @check_score_visibility def get(self, user_id):", "= schema.dump(response.data) return {\"success\": True, \"data\": response.data} @users_namespace.route(\"/<int:user_id>\") @users_namespace.param(\"user_id\", \"User", "\"errors\": response.errors}, 400 response.data[\"place\"] = user.place response.data[\"score\"] = user.score return", "Namespace(\"users\", description=\"Endpoint to retrieve Users\") @users_namespace.route(\"\") class UserList(Resource): @check_account_visibility def", "\"User ID or 'me'\") class UserPublicAwards(Resource): @check_account_visibility @check_score_visibility def get(self,", "Namespace, Resource from CTFd.models import ( db, Users, Solves, Awards,", "{\"count\": count}} @users_namespace.route(\"/me/awards\") @users_namespace.param(\"user_id\", \"User ID\") class UserPrivateAwards(Resource): @authed_only def", "Awards, Tracking, Unlocks, Submissions, Notifications, ) from CTFd.utils.decorators import authed_only,", "if is_admin(): data = response.data else: data = [] count", "= user_id schema = UserSchema(view=\"admin\", instance=user, partial=True) response = schema.load(data)", "= request.get_json() data[\"id\"] = user_id schema = UserSchema(view=\"admin\", instance=user, partial=True)", "if response.errors: return {\"success\": False, \"errors\": response.errors}, 400 db.session.add(response.data) db.session.commit()", "\"meta\": {\"count\": count}} return {\"success\": True, \"data\": None, \"meta\": {\"count\":", "user.get_solves(admin=is_admin()) view = \"user\" if not is_admin() else \"admin\" response", "{\"success\": False, \"errors\": response.errors}, 400 db.session.add(response.data) db.session.commit() if request.args.get(\"notify\"): name", "AwardSchema from CTFd.schemas.users import UserSchema users_namespace = Namespace(\"users\", description=\"Endpoint to", "response.errors: return {\"success\": False, \"errors\": response.errors}, 400 return {\"success\": True,", "True, \"data\": response.data} @admins_only def post(self): req = request.get_json() schema", "is_admin(): data = response.data else: data = [] count =", "if response.errors: return {\"success\": False, \"errors\": response.errors}, 400 response.data[\"place\"] =", "\"data\": None} @users_namespace.route(\"/<user_id>/fails\") @users_namespace.param(\"user_id\", \"User ID\") class UserPublicFails(Resource): @check_account_visibility @check_score_visibility", "\"errors\": response.errors}, 400 if is_admin(): data = response.data else: data", "schema = UserSchema(\"admin\") response = schema.load(req) if response.errors: return {\"success\":", "@admins_only def post(self): req = request.get_json() schema = UserSchema(\"admin\") response", "user_id): Notifications.query.filter_by(user_id=user_id).delete() Awards.query.filter_by(user_id=user_id).delete() Unlocks.query.filter_by(user_id=user_id).delete() Submissions.query.filter_by(user_id=user_id).delete() Solves.query.filter_by(user_id=user_id).delete() Tracking.query.filter_by(user_id=user_id).delete() Users.query.filter_by(id=user_id).delete() db.session.commit() db.session.close()", "\"user\" if not is_admin() else \"admin\" response = SubmissionSchema(view=view, many=True).dump(solves)", "response = UserSchema(\"self\").dump(user).data response[\"place\"] = user.place response[\"score\"] = user.score return", "or user.hidden) and is_admin() is False: abort(404) response = UserSchema(view=session.get(\"type\",", "ratelimit from CTFd.cache import clear_standings from CTFd.utils.user import get_current_user, is_admin", "True, \"data\": response.data} @users_namespace.route(\"/<int:user_id>\") @users_namespace.param(\"user_id\", \"User ID\") class UserPublic(Resource): @check_account_visibility", "Users.query.filter_by(banned=False, hidden=False) response = UserSchema(view=\"user\", many=True).dump(users) if response.errors: return {\"success\":", "response = schema.dump(response.data) return {\"success\": True, \"data\": response.data} @users_namespace.route(\"/<int:user_id>\") @users_namespace.param(\"user_id\",", "if not is_admin() else \"admin\" response = AwardSchema(view=view, many=True).dump(awards) if", "# return {\"success\": True, \"data\": data, \"meta\": {\"count\": count}} return", "if response.errors: return {\"success\": False, \"errors\": response.errors}, 400 if is_admin():", "{\"success\": True, \"data\": response.data} @admins_only def post(self): req = request.get_json()", "Unlocks.query.filter_by(user_id=user_id).delete() Submissions.query.filter_by(user_id=user_id).delete() Solves.query.filter_by(user_id=user_id).delete() Tracking.query.filter_by(user_id=user_id).delete() Users.query.filter_by(id=user_id).delete() db.session.commit() db.session.close() clear_standings() return {\"success\":", "False, \"errors\": response.errors}, 400 if is_admin(): data = response.data else:", "400 return {\"success\": True, \"data\": response.data} @users_namespace.route(\"/<user_id>/solves\") @users_namespace.param(\"user_id\", \"User ID\")", "\"meta\": {\"count\": None}} @users_namespace.route(\"/<user_id>/awards\") @users_namespace.param(\"user_id\", \"User ID or 'me'\") class", "<filename>CTFd/api/v1/users.py from flask import session, request, abort from flask_restplus import", "admins_only, ratelimit from CTFd.cache import clear_standings from CTFd.utils.user import get_current_user,", "from CTFd.utils.decorators import authed_only, admins_only, ratelimit from CTFd.cache import clear_standings", "{\"count\": None}} @users_namespace.route(\"/<user_id>/awards\") @users_namespace.param(\"user_id\", \"User ID or 'me'\") class UserPublicAwards(Resource):", "False, \"errors\": response.errors}, 400 # return {\"success\": True, \"data\": response.data}", "else \"admin\" response = AwardSchema(view=view, many=True).dump(awards) if response.errors: return {\"success\":", "@authed_only def get(self): user = get_current_user() response = UserSchema(\"self\").dump(user).data response[\"place\"]", "get(self): user = get_current_user() awards = user.get_awards(admin=True) view = \"user\"", "@admins_only def patch(self, user_id): user = Users.query.filter_by(id=user_id).first_or_404() data = request.get_json()", "Users.query.filter_by(id=user_id).delete() db.session.commit() db.session.close() clear_standings() return {\"success\": True} @users_namespace.route(\"/me\") class UserPrivate(Resource):", "not is_admin() else \"admin\" response = SubmissionSchema(view=view, many=True).dump(fails) if response.errors:", "name = response.data.name password = <PASSWORD>\") clear_standings() response = schema.dump(response.data)", "user = get_current_user() solves = user.get_solves(admin=True) view = \"user\" if", "solves = user.get_solves(admin=is_admin()) view = \"user\" if not is_admin() else", "def get(self): user = get_current_user() response = UserSchema(\"self\").dump(user).data response[\"place\"] =", "def get(self): users = Users.query.filter_by(banned=False, hidden=False) response = UserSchema(view=\"user\", many=True).dump(users)", "or 'me'\") class UserPublicAwards(Resource): @check_account_visibility @check_score_visibility def get(self, user_id): user", "from CTFd.utils.decorators.visibility import ( check_account_visibility, check_score_visibility, ) from CTFd.schemas.submissions import", "req = request.get_json() schema = UserSchema(\"admin\") response = schema.load(req) if", "@users_namespace.route(\"/<user_id>/solves\") @users_namespace.param(\"user_id\", \"User ID\") class UserPublicSolves(Resource): @check_account_visibility @check_score_visibility def get(self,", "view = \"user\" if not is_admin() else \"admin\" response =", "= get_current_user() awards = user.get_awards(admin=True) view = \"user\" if not", "to retrieve Users\") @users_namespace.route(\"\") class UserList(Resource): @check_account_visibility def get(self): users", "Users.query.filter_by(id=user_id).first_or_404() if (user.banned or user.hidden) and is_admin() is False: abort(404)", "= UserSchema(view=\"user\", many=True).dump(users) if response.errors: return {\"success\": False, \"errors\": response.errors},", "{\"success\": False, \"errors\": response.errors}, 400 if is_admin(): data = response.data", "400 db.session.commit() response = schema.dump(response.data) db.session.close() clear_standings() return {\"success\": True,", "True} @users_namespace.route(\"/me\") class UserPrivate(Resource): @authed_only def get(self): user = get_current_user()", "schema.dump(response.data) return {\"success\": True, \"data\": response.data} @users_namespace.route(\"/<int:user_id>\") @users_namespace.param(\"user_id\", \"User ID\")", "= user.get_awards(admin=is_admin()) view = \"user\" if not is_admin() else \"admin\"", ") from CTFd.schemas.submissions import SubmissionSchema from CTFd.schemas.awards import AwardSchema from", "@authed_only def patch(self): user = get_current_user() data = request.get_json() schema", "\"admin\" response = AwardSchema(view=view, many=True).dump(awards) if response.errors: return {\"success\": False,", "abort(404) awards = user.get_awards(admin=is_admin()) view = \"user\" if not is_admin()", "\"data\": response.data} @users_namespace.route(\"/me/fails\") class UserPrivateFails(Resource): @authed_only def get(self): user =", "True, \"data\": None, \"meta\": {\"count\": None}} @users_namespace.route(\"/<user_id>/awards\") @users_namespace.param(\"user_id\", \"User ID", "\"errors\": response.errors}, 400 return {\"success\": True, \"data\": response.data} @admins_only def", "is_admin() is False: abort(404) response = UserSchema(view=session.get(\"type\", \"user\")).dump(user) if response.errors:", "data = response.data else: data = [] count = len(response.data)", "from CTFd.schemas.users import UserSchema users_namespace = Namespace(\"users\", description=\"Endpoint to retrieve", "= len(response.data) # return {\"success\": True, \"data\": data, \"meta\": {\"count\":", "schema.dump(response.data) db.session.close() clear_standings() return {\"success\": True, \"data\": response.data} @users_namespace.route(\"/me/solves\") class", "{\"success\": False, \"errors\": response.errors}, 400 response.data[\"place\"] = user.place response.data[\"score\"] =", "False, \"errors\": response.errors}, 400 return {\"success\": True, \"data\": response.data} @users_namespace.route(\"/me/fails\")", "db.session.add(response.data) db.session.commit() if request.args.get(\"notify\"): name = response.data.name password = <PASSWORD>\")", "UserPrivate(Resource): @authed_only def get(self): user = get_current_user() response = UserSchema(\"self\").dump(user).data", "( db, Users, Solves, Awards, Tracking, Unlocks, Submissions, Notifications, )", "get(self): users = Users.query.filter_by(banned=False, hidden=False) response = UserSchema(view=\"user\", many=True).dump(users) if", "Users.query.filter_by(id=user_id).first_or_404() data = request.get_json() data[\"id\"] = user_id schema = UserSchema(view=\"admin\",", "instance=user, partial=True) response = schema.load(data) if response.errors: return {\"success\": False,", "is_admin() is False: abort(404) fails = user.get_fails(admin=is_admin()) view = \"user\"", "{\"success\": True, \"data\": data, \"meta\": {\"count\": count}} @users_namespace.route(\"/me/awards\") @users_namespace.param(\"user_id\", \"User", "CTFd.schemas.submissions import SubmissionSchema from CTFd.schemas.awards import AwardSchema from CTFd.schemas.users import", "get_current_user() response = UserSchema(\"self\").dump(user).data response[\"place\"] = user.place response[\"score\"] = user.score", "= [] count = len(response.data) return {\"success\": True, \"data\": data,", "response.data[\"score\"] = user.score return {\"success\": True, \"data\": response.data} @admins_only def", "def get(self): user = get_current_user() fails = user.get_fails(admin=True) view =", "from CTFd.utils.user import get_current_user, is_admin from CTFd.utils.decorators.visibility import ( check_account_visibility,", "get(self): user = get_current_user() response = UserSchema(\"self\").dump(user).data response[\"place\"] = user.place", "response.errors}, 400 response.data[\"place\"] = user.place response.data[\"score\"] = user.score return {\"success\":", "\"user\" if not is_admin() else \"admin\" response = AwardSchema(view=view, many=True).dump(awards)", "return {\"success\": False, \"errors\": response.errors}, 400 if is_admin(): data =", "= user.get_awards(admin=True) view = \"user\" if not is_admin() else \"admin\"", "or user.hidden) and is_admin() is False: abort(404) fails = user.get_fails(admin=is_admin())", "@admins_only def delete(self, user_id): Notifications.query.filter_by(user_id=user_id).delete() Awards.query.filter_by(user_id=user_id).delete() Unlocks.query.filter_by(user_id=user_id).delete() Submissions.query.filter_by(user_id=user_id).delete() Solves.query.filter_by(user_id=user_id).delete() Tracking.query.filter_by(user_id=user_id).delete()", "\"admin\" response = SubmissionSchema(view=view, many=True).dump(solves) if response.errors: return {\"success\": False,", "@users_namespace.param(\"user_id\", \"User ID\") class UserPrivateAwards(Resource): @authed_only def get(self): user =", "True, \"data\": response} @admins_only def delete(self, user_id): Notifications.query.filter_by(user_id=user_id).delete() Awards.query.filter_by(user_id=user_id).delete() Unlocks.query.filter_by(user_id=user_id).delete()", "response.errors}, 400 return {\"success\": True, \"data\": response.data} @users_namespace.route(\"/me/fails\") class UserPrivateFails(Resource):", "users = Users.query.filter_by(banned=False, hidden=False) response = UserSchema(view=\"user\", many=True).dump(users) if response.errors:", "None}} @users_namespace.route(\"/<user_id>/awards\") @users_namespace.param(\"user_id\", \"User ID or 'me'\") class UserPublicAwards(Resource): @check_account_visibility", "and is_admin() is False: abort(404) awards = user.get_awards(admin=is_admin()) view =", "@users_namespace.param(\"user_id\", \"User ID\") class UserPublicFails(Resource): @check_account_visibility @check_score_visibility def get(self, user_id):", "\"errors\": response.errors}, 400 db.session.commit() response = schema.dump(response.data) db.session.close() clear_standings() return", "patch(self, user_id): user = Users.query.filter_by(id=user_id).first_or_404() data = request.get_json() data[\"id\"] =", "{\"success\": True, \"data\": None} @users_namespace.route(\"/<user_id>/fails\") @users_namespace.param(\"user_id\", \"User ID\") class UserPublicFails(Resource):", "response = AwardSchema(view=view, many=True).dump(awards) if response.errors: return {\"success\": False, \"errors\":", "# return {\"success\": True, \"data\": response.data} return {\"success\": True, \"data\":", "user.place response[\"score\"] = user.score return {\"success\": True, \"data\": response} @authed_only", "True, \"data\": None} @users_namespace.route(\"/<user_id>/fails\") @users_namespace.param(\"user_id\", \"User ID\") class UserPublicFails(Resource): @check_account_visibility", "\"data\": response.data} @admins_only def patch(self, user_id): user = Users.query.filter_by(id=user_id).first_or_404() data", "response.data} @users_namespace.route(\"/me/solves\") class UserPrivateSolves(Resource): @authed_only def get(self): user = get_current_user()", "= UserSchema(\"admin\") response = schema.load(req) if response.errors: return {\"success\": False,", "return {\"success\": True, \"data\": response.data} @users_namespace.route(\"/<int:user_id>\") @users_namespace.param(\"user_id\", \"User ID\") class", "response.data} @admins_only def patch(self, user_id): user = Users.query.filter_by(id=user_id).first_or_404() data =", "@users_namespace.route(\"/<user_id>/awards\") @users_namespace.param(\"user_id\", \"User ID or 'me'\") class UserPublicAwards(Resource): @check_account_visibility @check_score_visibility", "user.get_solves(admin=True) view = \"user\" if not is_admin() else \"admin\" response", "= \"user\" if not is_admin() else \"admin\" response = AwardSchema(view=view,", "{\"success\": False, \"errors\": response.errors}, 400 # return {\"success\": True, \"data\":", "db.session.commit() response = schema.dump(response.data) db.session.close() clear_standings() return {\"success\": True, \"data\":", "= user.get_fails(admin=True) view = \"user\" if not is_admin() else \"admin\"", "return {\"success\": True, \"data\": response.data} @admins_only def patch(self, user_id): user", "Tracking, Unlocks, Submissions, Notifications, ) from CTFd.utils.decorators import authed_only, admins_only,", "\"User ID\") class UserPublicFails(Resource): @check_account_visibility @check_score_visibility def get(self, user_id): user", "response.data else: data = [] count = len(response.data) # return", "check_score_visibility, ) from CTFd.schemas.submissions import SubmissionSchema from CTFd.schemas.awards import AwardSchema", "\"errors\": response.errors}, 400 return {\"success\": True, \"data\": response.data} @users_namespace.route(\"/me/fails\") class", "user.hidden) and is_admin() is False: abort(404) fails = user.get_fails(admin=is_admin()) view", "400 if is_admin(): data = response.data else: data = []", "request, abort from flask_restplus import Namespace, Resource from CTFd.models import", "schema.load(data) if response.errors: return {\"success\": False, \"errors\": response.errors}, 400 db.session.commit()", "retrieve Users\") @users_namespace.route(\"\") class UserList(Resource): @check_account_visibility def get(self): users =", "class UserPublic(Resource): @check_account_visibility def get(self, user_id): user = Users.query.filter_by(id=user_id).first_or_404() if", "\"data\": response.data} return {\"success\": True, \"data\": None} @users_namespace.route(\"/<user_id>/fails\") @users_namespace.param(\"user_id\", \"User", "False: abort(404) awards = user.get_awards(admin=is_admin()) view = \"user\" if not", "response.errors: return {\"success\": False, \"errors\": response.errors}, 400 if is_admin(): data", "Tracking.query.filter_by(user_id=user_id).delete() Users.query.filter_by(id=user_id).delete() db.session.commit() db.session.close() clear_standings() return {\"success\": True} @users_namespace.route(\"/me\") class", "and is_admin() is False: abort(404) response = UserSchema(view=session.get(\"type\", \"user\")).dump(user) if", "data = [] count = len(response.data) return {\"success\": True, \"data\":", "= Users.query.filter_by(banned=False, hidden=False) response = UserSchema(view=\"user\", many=True).dump(users) if response.errors: return", "return {\"success\": True, \"data\": response.data} @users_namespace.route(\"/me/fails\") class UserPrivateFails(Resource): @authed_only def", "user.get_fails(admin=is_admin()) view = \"user\" if not is_admin() else \"admin\" response", "abort(404) solves = user.get_solves(admin=is_admin()) view = \"user\" if not is_admin()", "= AwardSchema(view=view, many=True).dump(awards) if response.errors: return {\"success\": False, \"errors\": response.errors},", "if (user.banned or user.hidden) and is_admin() is False: abort(404) fails", "False, \"errors\": response.errors}, 400 db.session.add(response.data) db.session.commit() if request.args.get(\"notify\"): name =", "False, \"errors\": response.errors}, 400 db.session.commit() response = schema.dump(response.data) db.session.close() clear_standings()", "\"errors\": response.errors}, 400 # return {\"success\": True, \"data\": response.data} return", "class UserPrivateFails(Resource): @authed_only def get(self): user = get_current_user() fails =", "return {\"success\": True, \"data\": response.data} @admins_only def post(self): req =", "= SubmissionSchema(view=view, many=True).dump(solves) if response.errors: return {\"success\": False, \"errors\": response.errors},", "return {\"success\": False, \"errors\": response.errors}, 400 return {\"success\": True, \"data\":", "response.errors}, 400 return {\"success\": True, \"data\": response.data} @users_namespace.route(\"/<user_id>/solves\") @users_namespace.param(\"user_id\", \"User", "else: data = [] count = len(response.data) # return {\"success\":", "return {\"success\": True} @users_namespace.route(\"/me\") class UserPrivate(Resource): @authed_only def get(self): user", ") from CTFd.utils.decorators import authed_only, admins_only, ratelimit from CTFd.cache import", "get_current_user() fails = user.get_fails(admin=True) view = \"user\" if not is_admin()", "user = Users.query.filter_by(id=user_id).first_or_404() data = request.get_json() data[\"id\"] = user_id schema", "@authed_only def get(self): user = get_current_user() solves = user.get_solves(admin=True) view", "get_current_user, is_admin from CTFd.utils.decorators.visibility import ( check_account_visibility, check_score_visibility, ) from", "response = schema.load(data) if response.errors: return {\"success\": False, \"errors\": response.errors},", "user.hidden) and is_admin() is False: abort(404) solves = user.get_solves(admin=is_admin()) view", "partial=True) response = schema.load(data) if response.errors: return {\"success\": False, \"errors\":", "'me'\") class UserPublicAwards(Resource): @check_account_visibility @check_score_visibility def get(self, user_id): user =", "{\"success\": True, \"data\": response} @admins_only def delete(self, user_id): Notifications.query.filter_by(user_id=user_id).delete() Awards.query.filter_by(user_id=user_id).delete()", "[] count = len(response.data) return {\"success\": True, \"data\": data, \"meta\":", "= request.get_json() schema = UserSchema(view=\"self\", instance=user, partial=True) response = schema.load(data)", "response.errors}, 400 db.session.commit() response = schema.dump(response.data) db.session.close() clear_standings() return {\"success\":", "\"admin\" response = SubmissionSchema(view=view, many=True).dump(fails) if response.errors: return {\"success\": False,", "many=True).dump(users) if response.errors: return {\"success\": False, \"errors\": response.errors}, 400 return", "import AwardSchema from CTFd.schemas.users import UserSchema users_namespace = Namespace(\"users\", description=\"Endpoint", "= response.data.name password = <PASSWORD>\") clear_standings() response = schema.dump(response.data) return", "Submissions, Notifications, ) from CTFd.utils.decorators import authed_only, admins_only, ratelimit from", "@users_namespace.route(\"/<int:user_id>\") @users_namespace.param(\"user_id\", \"User ID\") class UserPublic(Resource): @check_account_visibility def get(self, user_id):", "{\"success\": True, \"data\": response.data} @users_namespace.route(\"/<int:user_id>\") @users_namespace.param(\"user_id\", \"User ID\") class UserPublic(Resource):", "post(self): req = request.get_json() schema = UserSchema(\"admin\") response = schema.load(req)", "= UserSchema(view=session.get(\"type\", \"user\")).dump(user) if response.errors: return {\"success\": False, \"errors\": response.errors},", "= user.score return {\"success\": True, \"data\": response} @authed_only def patch(self):", "ID\") class UserPrivateAwards(Resource): @authed_only def get(self): user = get_current_user() awards", "(user.banned or user.hidden) and is_admin() is False: abort(404) awards =", "{\"success\": True, \"data\": None, \"meta\": {\"count\": None}} @users_namespace.route(\"/<user_id>/awards\") @users_namespace.param(\"user_id\", \"User", "user = get_current_user() response = UserSchema(\"self\").dump(user).data response[\"place\"] = user.place response[\"score\"]", "@authed_only def get(self): user = get_current_user() awards = user.get_awards(admin=True) view", "not is_admin() else \"admin\" response = AwardSchema(view=view, many=True).dump(awards) if response.errors:", "= user.get_solves(admin=is_admin()) view = \"user\" if not is_admin() else \"admin\"", "@users_namespace.param(\"user_id\", \"User ID\") class UserPublic(Resource): @check_account_visibility def get(self, user_id): user", "@check_score_visibility def get(self, user_id): user = Users.query.filter_by(id=user_id).first_or_404() if (user.banned or", "CTFd.models import ( db, Users, Solves, Awards, Tracking, Unlocks, Submissions,", "response[\"place\"] = user.place response[\"score\"] = user.score return {\"success\": True, \"data\":", "class UserPrivate(Resource): @authed_only def get(self): user = get_current_user() response =", "SubmissionSchema(view=view, many=True).dump(fails) if response.errors: return {\"success\": False, \"errors\": response.errors}, 400", "AwardSchema(view=view, many=True).dump(awards) if response.errors: return {\"success\": False, \"errors\": response.errors}, 400", "count}} return {\"success\": True, \"data\": None, \"meta\": {\"count\": None}} @users_namespace.route(\"/<user_id>/awards\")", "\"data\": data, \"meta\": {\"count\": count}} @users_namespace.route(\"/me/awards\") @users_namespace.param(\"user_id\", \"User ID\") class", "UserSchema(\"admin\") response = schema.load(req) if response.errors: return {\"success\": False, \"errors\":", "response.data[\"place\"] = user.place response.data[\"score\"] = user.score return {\"success\": True, \"data\":", "is False: abort(404) awards = user.get_awards(admin=is_admin()) view = \"user\" if", "user.score return {\"success\": True, \"data\": response.data} @admins_only def patch(self, user_id):", "@check_account_visibility @check_score_visibility def get(self, user_id): user = Users.query.filter_by(id=user_id).first_or_404() if (user.banned", "is False: abort(404) fails = user.get_fails(admin=is_admin()) view = \"user\" if", "response.errors: return {\"success\": False, \"errors\": response.errors}, 400 db.session.add(response.data) db.session.commit() if", "user.place response.data[\"score\"] = user.score return {\"success\": True, \"data\": response.data} @admins_only", "= schema.load(data) if response.errors: return {\"success\": False, \"errors\": response.errors}, 400", "UserPublicAwards(Resource): @check_account_visibility @check_score_visibility def get(self, user_id): user = Users.query.filter_by(id=user_id).first_or_404() if", "response = UserSchema(view=\"user\", many=True).dump(users) if response.errors: return {\"success\": False, \"errors\":", "delete(self, user_id): Notifications.query.filter_by(user_id=user_id).delete() Awards.query.filter_by(user_id=user_id).delete() Unlocks.query.filter_by(user_id=user_id).delete() Submissions.query.filter_by(user_id=user_id).delete() Solves.query.filter_by(user_id=user_id).delete() Tracking.query.filter_by(user_id=user_id).delete() Users.query.filter_by(id=user_id).delete() db.session.commit()", "is_admin() is False: abort(404) awards = user.get_awards(admin=is_admin()) view = \"user\"", "request.args.get(\"notify\"): name = response.data.name password = <PASSWORD>\") clear_standings() response =", "(user.banned or user.hidden) and is_admin() is False: abort(404) solves =", "= schema.dump(response.data) db.session.close() clear_standings() return {\"success\": True, \"data\": response} @admins_only", "{\"success\": True} @users_namespace.route(\"/me\") class UserPrivate(Resource): @authed_only def get(self): user =", "UserSchema(view=\"admin\", instance=user, partial=True) response = schema.load(data) if response.errors: return {\"success\":", "Users\") @users_namespace.route(\"\") class UserList(Resource): @check_account_visibility def get(self): users = Users.query.filter_by(banned=False,", "= \"user\" if not is_admin() else \"admin\" response = SubmissionSchema(view=view,", "True, \"data\": response.data} return {\"success\": True, \"data\": None} @users_namespace.route(\"/<user_id>/fails\") @users_namespace.param(\"user_id\",", "patch(self): user = get_current_user() data = request.get_json() schema = UserSchema(view=\"self\",", "or user.hidden) and is_admin() is False: abort(404) solves = user.get_solves(admin=is_admin())", "True, \"data\": response.data} @admins_only def patch(self, user_id): user = Users.query.filter_by(id=user_id).first_or_404()", "True, \"data\": response.data} @users_namespace.route(\"/me/solves\") class UserPrivateSolves(Resource): @authed_only def get(self): user", "\"user\" if not is_admin() else \"admin\" response = SubmissionSchema(view=view, many=True).dump(fails)", "@users_namespace.param(\"user_id\", \"User ID or 'me'\") class UserPublicAwards(Resource): @check_account_visibility @check_score_visibility def", "response} @admins_only def delete(self, user_id): Notifications.query.filter_by(user_id=user_id).delete() Awards.query.filter_by(user_id=user_id).delete() Unlocks.query.filter_by(user_id=user_id).delete() Submissions.query.filter_by(user_id=user_id).delete() Solves.query.filter_by(user_id=user_id).delete()", "= user.score return {\"success\": True, \"data\": response.data} @admins_only def patch(self,", "data = request.get_json() schema = UserSchema(view=\"self\", instance=user, partial=True) response =", "= user.place response.data[\"score\"] = user.score return {\"success\": True, \"data\": response.data}", "= Users.query.filter_by(id=user_id).first_or_404() data = request.get_json() data[\"id\"] = user_id schema =", "return {\"success\": False, \"errors\": response.errors}, 400 # return {\"success\": True,", "= get_current_user() fails = user.get_fails(admin=True) view = \"user\" if not", "user_id): user = Users.query.filter_by(id=user_id).first_or_404() data = request.get_json() data[\"id\"] = user_id", "= schema.dump(response.data) db.session.close() clear_standings() return {\"success\": True, \"data\": response.data} @users_namespace.route(\"/me/solves\")", "user.get_fails(admin=True) view = \"user\" if not is_admin() else \"admin\" response", "if not is_admin() else \"admin\" response = SubmissionSchema(view=view, many=True).dump(solves) if", "from CTFd.schemas.awards import AwardSchema from CTFd.schemas.users import UserSchema users_namespace =", "CTFd.utils.decorators.visibility import ( check_account_visibility, check_score_visibility, ) from CTFd.schemas.submissions import SubmissionSchema", "@check_account_visibility def get(self, user_id): user = Users.query.filter_by(id=user_id).first_or_404() if (user.banned or", "def get(self): user = get_current_user() awards = user.get_awards(admin=True) view =", "@check_account_visibility def get(self): users = Users.query.filter_by(banned=False, hidden=False) response = UserSchema(view=\"user\",", "flask import session, request, abort from flask_restplus import Namespace, Resource", "{\"count\": count}} return {\"success\": True, \"data\": None, \"meta\": {\"count\": None}}", "if (user.banned or user.hidden) and is_admin() is False: abort(404) awards", "UserList(Resource): @check_account_visibility def get(self): users = Users.query.filter_by(banned=False, hidden=False) response =", "hidden=False) response = UserSchema(view=\"user\", many=True).dump(users) if response.errors: return {\"success\": False,", "CTFd.utils.decorators import authed_only, admins_only, ratelimit from CTFd.cache import clear_standings from", "ID or 'me'\") class UserPublicAwards(Resource): @check_account_visibility @check_score_visibility def get(self, user_id):", "= user.place response[\"score\"] = user.score return {\"success\": True, \"data\": response}", "schema = UserSchema(view=\"admin\", instance=user, partial=True) response = schema.load(data) if response.errors:", "{\"success\": True, \"data\": response.data} @users_namespace.route(\"/me/fails\") class UserPrivateFails(Resource): @authed_only def get(self):", "\"User ID\") class UserPublic(Resource): @check_account_visibility def get(self, user_id): user =", "def patch(self, user_id): user = Users.query.filter_by(id=user_id).first_or_404() data = request.get_json() data[\"id\"]", "<PASSWORD>\") clear_standings() response = schema.dump(response.data) return {\"success\": True, \"data\": response.data}", "return {\"success\": True, \"data\": data, \"meta\": {\"count\": count}} @users_namespace.route(\"/me/awards\") @users_namespace.param(\"user_id\",", "user.get_awards(admin=True) view = \"user\" if not is_admin() else \"admin\" response", "clear_standings() return {\"success\": True} @users_namespace.route(\"/me\") class UserPrivate(Resource): @authed_only def get(self):", "user_id schema = UserSchema(view=\"admin\", instance=user, partial=True) response = schema.load(data) if", "Submissions.query.filter_by(user_id=user_id).delete() Solves.query.filter_by(user_id=user_id).delete() Tracking.query.filter_by(user_id=user_id).delete() Users.query.filter_by(id=user_id).delete() db.session.commit() db.session.close() clear_standings() return {\"success\": True}", "is_admin() else \"admin\" response = SubmissionSchema(view=view, many=True).dump(fails) if response.errors: return", "or user.hidden) and is_admin() is False: abort(404) awards = user.get_awards(admin=is_admin())", "schema = UserSchema(view=\"self\", instance=user, partial=True) response = schema.load(data) if response.errors:", "400 # return {\"success\": True, \"data\": response.data} return {\"success\": True,", "awards = user.get_awards(admin=is_admin()) view = \"user\" if not is_admin() else", "\"data\": response.data} @users_namespace.route(\"/me/solves\") class UserPrivateSolves(Resource): @authed_only def get(self): user =", "None} @users_namespace.route(\"/<user_id>/fails\") @users_namespace.param(\"user_id\", \"User ID\") class UserPublicFails(Resource): @check_account_visibility @check_score_visibility def", "response.data} @users_namespace.route(\"/<user_id>/solves\") @users_namespace.param(\"user_id\", \"User ID\") class UserPublicSolves(Resource): @check_account_visibility @check_score_visibility def", "{\"success\": True, \"data\": response.data} return {\"success\": True, \"data\": None} @users_namespace.route(\"/<user_id>/fails\")", "return {\"success\": True, \"data\": data, \"meta\": {\"count\": count}} return {\"success\":", "clear_standings() return {\"success\": True, \"data\": response} @admins_only def delete(self, user_id):", "from flask import session, request, abort from flask_restplus import Namespace,", "is False: abort(404) solves = user.get_solves(admin=is_admin()) view = \"user\" if", "@users_namespace.route(\"\") class UserList(Resource): @check_account_visibility def get(self): users = Users.query.filter_by(banned=False, hidden=False)", "def post(self): req = request.get_json() schema = UserSchema(\"admin\") response =", "import Namespace, Resource from CTFd.models import ( db, Users, Solves,", "( check_account_visibility, check_score_visibility, ) from CTFd.schemas.submissions import SubmissionSchema from CTFd.schemas.awards", "return {\"success\": False, \"errors\": response.errors}, 400 db.session.add(response.data) db.session.commit() if request.args.get(\"notify\"):", "db.session.close() clear_standings() return {\"success\": True, \"data\": response} @admins_only def delete(self,", "many=True).dump(awards) if response.errors: return {\"success\": False, \"errors\": response.errors}, 400 return", "user.hidden) and is_admin() is False: abort(404) response = UserSchema(view=session.get(\"type\", \"user\")).dump(user)", "UserPrivateFails(Resource): @authed_only def get(self): user = get_current_user() fails = user.get_fails(admin=True)", "UserSchema users_namespace = Namespace(\"users\", description=\"Endpoint to retrieve Users\") @users_namespace.route(\"\") class", "data, \"meta\": {\"count\": count}} @users_namespace.route(\"/me/awards\") @users_namespace.param(\"user_id\", \"User ID\") class UserPrivateAwards(Resource):", "UserSchema(view=\"user\", many=True).dump(users) if response.errors: return {\"success\": False, \"errors\": response.errors}, 400", "SubmissionSchema from CTFd.schemas.awards import AwardSchema from CTFd.schemas.users import UserSchema users_namespace", "= get_current_user() data = request.get_json() schema = UserSchema(view=\"self\", instance=user, partial=True)", "from CTFd.cache import clear_standings from CTFd.utils.user import get_current_user, is_admin from", "get(self): user = get_current_user() solves = user.get_solves(admin=True) view = \"user\"", "return {\"success\": True, \"data\": response.data} @users_namespace.route(\"/<user_id>/solves\") @users_namespace.param(\"user_id\", \"User ID\") class", "response.data.name password = <PASSWORD>\") clear_standings() response = schema.dump(response.data) return {\"success\":", "\"data\": None, \"meta\": {\"count\": None}} @users_namespace.route(\"/<user_id>/awards\") @users_namespace.param(\"user_id\", \"User ID or", "response.errors: return {\"success\": False, \"errors\": response.errors}, 400 db.session.commit() response =", "= len(response.data) return {\"success\": True, \"data\": data, \"meta\": {\"count\": count}}", "\"errors\": response.errors}, 400 db.session.add(response.data) db.session.commit() if request.args.get(\"notify\"): name = response.data.name", "= UserSchema(\"self\").dump(user).data response[\"place\"] = user.place response[\"score\"] = user.score return {\"success\":", "return {\"success\": False, \"errors\": response.errors}, 400 response.data[\"place\"] = user.place response.data[\"score\"]", "solves = user.get_solves(admin=True) view = \"user\" if not is_admin() else", "{\"success\": True, \"data\": response.data} @admins_only def patch(self, user_id): user =", "= <PASSWORD>\") clear_standings() response = schema.dump(response.data) return {\"success\": True, \"data\":", "request.get_json() schema = UserSchema(view=\"self\", instance=user, partial=True) response = schema.load(data) if", "many=True).dump(awards) if response.errors: return {\"success\": False, \"errors\": response.errors}, 400 #", "True, \"data\": data, \"meta\": {\"count\": count}} @users_namespace.route(\"/me/awards\") @users_namespace.param(\"user_id\", \"User ID\")", "abort(404) response = UserSchema(view=session.get(\"type\", \"user\")).dump(user) if response.errors: return {\"success\": False,", "db.session.close() clear_standings() return {\"success\": True} @users_namespace.route(\"/me\") class UserPrivate(Resource): @authed_only def", "clear_standings() response = schema.dump(response.data) return {\"success\": True, \"data\": response.data} @users_namespace.route(\"/<int:user_id>\")", "import SubmissionSchema from CTFd.schemas.awards import AwardSchema from CTFd.schemas.users import UserSchema", "fails = user.get_fails(admin=True) view = \"user\" if not is_admin() else", "db.session.commit() db.session.close() clear_standings() return {\"success\": True} @users_namespace.route(\"/me\") class UserPrivate(Resource): @authed_only", "Notifications.query.filter_by(user_id=user_id).delete() Awards.query.filter_by(user_id=user_id).delete() Unlocks.query.filter_by(user_id=user_id).delete() Submissions.query.filter_by(user_id=user_id).delete() Solves.query.filter_by(user_id=user_id).delete() Tracking.query.filter_by(user_id=user_id).delete() Users.query.filter_by(id=user_id).delete() db.session.commit() db.session.close() clear_standings()", "400 return {\"success\": True, \"data\": response.data} @admins_only def post(self): req", "return {\"success\": False, \"errors\": response.errors}, 400 db.session.commit() response = schema.dump(response.data)", "not is_admin() else \"admin\" response = SubmissionSchema(view=view, many=True).dump(solves) if response.errors:", "user = get_current_user() fails = user.get_fails(admin=True) view = \"user\" if", "\"data\": data, \"meta\": {\"count\": count}} return {\"success\": True, \"data\": None,", "CTFd.cache import clear_standings from CTFd.utils.user import get_current_user, is_admin from CTFd.utils.decorators.visibility" ]
[ "few pages. n_pages = 5 for i in range(n_pages): page", "page_name(i), prefix=str(i)) # Setup main app layout. app_shell = [html.H1(\"App", "n_pages = 5 for i in range(n_pages): page = make_page(i)", "page.layout = html.Div([html.H2(f\"Page {i}\"), html.Button('Click me!', id='btn'), html.Div(id='log')]) @page.callback(Output('log', 'children'),", "page_name(i: int): return f\"page{i}\" def make_page(i: int): page = DashBlueprint()", "= DashBlueprint() page.layout = html.Div([html.H2(f\"Page {i}\"), html.Button('Click me!', id='btn'), html.Div(id='log')])", "me!', id='btn'), html.Div(id='log')]) @page.callback(Output('log', 'children'), Input('btn', 'n_clicks')) def on_click(n_clicks): return", "pages. n_pages = 5 for i in range(n_pages): page =", "return page app = DashProxy(prevent_initial_callbacks=True, plugins=[dl.plugins.pages]) # Register a few", "# Register a few pages. n_pages = 5 for i", "html.Div(id='log')]) @page.callback(Output('log', 'children'), Input('btn', 'n_clicks')) def on_click(n_clicks): return f\"Hello world", "app_shell = [html.H1(\"App shell\"), dl.plugins.page_container] navigation = html.Ul([html.Li(html.A(page_name(i), href=page_name(i))) for", "= make_page(i) page.register(app, page_name(i), prefix=str(i)) # Setup main app layout.", "dl from dash_extensions.enrich import DashBlueprint, DashProxy, html, Output, Input def", "shell\"), dl.plugins.page_container] navigation = html.Ul([html.Li(html.A(page_name(i), href=page_name(i))) for i in range(n_pages)])", "<filename>getting_started/pages.py import dash_labs as dl from dash_extensions.enrich import DashBlueprint, DashProxy,", "import DashBlueprint, DashProxy, html, Output, Input def page_name(i: int): return", "range(n_pages)]) app.layout = html.Div(app_shell + [navigation], style=dict(display=\"block\")) if __name__ ==", "def page_name(i: int): return f\"page{i}\" def make_page(i: int): page =", "Register a few pages. n_pages = 5 for i in", "on_click(n_clicks): return f\"Hello world {n_clicks} from page {i}!\" return page", "for i in range(n_pages): page = make_page(i) page.register(app, page_name(i), prefix=str(i))", "@page.callback(Output('log', 'children'), Input('btn', 'n_clicks')) def on_click(n_clicks): return f\"Hello world {n_clicks}", "world {n_clicks} from page {i}!\" return page app = DashProxy(prevent_initial_callbacks=True,", "i in range(n_pages)]) app.layout = html.Div(app_shell + [navigation], style=dict(display=\"block\")) if", "dash_labs as dl from dash_extensions.enrich import DashBlueprint, DashProxy, html, Output,", "html.Ul([html.Li(html.A(page_name(i), href=page_name(i))) for i in range(n_pages)]) app.layout = html.Div(app_shell +", "prefix=str(i)) # Setup main app layout. app_shell = [html.H1(\"App shell\"),", "= html.Div(app_shell + [navigation], style=dict(display=\"block\")) if __name__ == '__main__': app.run_server()", "# Setup main app layout. app_shell = [html.H1(\"App shell\"), dl.plugins.page_container]", "html.Button('Click me!', id='btn'), html.Div(id='log')]) @page.callback(Output('log', 'children'), Input('btn', 'n_clicks')) def on_click(n_clicks):", "Setup main app layout. app_shell = [html.H1(\"App shell\"), dl.plugins.page_container] navigation", "f\"page{i}\" def make_page(i: int): page = DashBlueprint() page.layout = html.Div([html.H2(f\"Page", "a few pages. n_pages = 5 for i in range(n_pages):", "DashProxy(prevent_initial_callbacks=True, plugins=[dl.plugins.pages]) # Register a few pages. n_pages = 5", "= DashProxy(prevent_initial_callbacks=True, plugins=[dl.plugins.pages]) # Register a few pages. n_pages =", "i in range(n_pages): page = make_page(i) page.register(app, page_name(i), prefix=str(i)) #", "page = make_page(i) page.register(app, page_name(i), prefix=str(i)) # Setup main app", "page = DashBlueprint() page.layout = html.Div([html.H2(f\"Page {i}\"), html.Button('Click me!', id='btn'),", "Output, Input def page_name(i: int): return f\"page{i}\" def make_page(i: int):", "in range(n_pages): page = make_page(i) page.register(app, page_name(i), prefix=str(i)) # Setup", "= [html.H1(\"App shell\"), dl.plugins.page_container] navigation = html.Ul([html.Li(html.A(page_name(i), href=page_name(i))) for i", "make_page(i) page.register(app, page_name(i), prefix=str(i)) # Setup main app layout. app_shell", "DashProxy, html, Output, Input def page_name(i: int): return f\"page{i}\" def", "= html.Div([html.H2(f\"Page {i}\"), html.Button('Click me!', id='btn'), html.Div(id='log')]) @page.callback(Output('log', 'children'), Input('btn',", "Input def page_name(i: int): return f\"page{i}\" def make_page(i: int): page", "page {i}!\" return page app = DashProxy(prevent_initial_callbacks=True, plugins=[dl.plugins.pages]) # Register", "app layout. app_shell = [html.H1(\"App shell\"), dl.plugins.page_container] navigation = html.Ul([html.Li(html.A(page_name(i),", "make_page(i: int): page = DashBlueprint() page.layout = html.Div([html.H2(f\"Page {i}\"), html.Button('Click", "def on_click(n_clicks): return f\"Hello world {n_clicks} from page {i}!\" return", "app.layout = html.Div(app_shell + [navigation], style=dict(display=\"block\")) if __name__ == '__main__':", "def make_page(i: int): page = DashBlueprint() page.layout = html.Div([html.H2(f\"Page {i}\"),", "[html.H1(\"App shell\"), dl.plugins.page_container] navigation = html.Ul([html.Li(html.A(page_name(i), href=page_name(i))) for i in", "DashBlueprint, DashProxy, html, Output, Input def page_name(i: int): return f\"page{i}\"", "return f\"page{i}\" def make_page(i: int): page = DashBlueprint() page.layout =", "{i}\"), html.Button('Click me!', id='btn'), html.Div(id='log')]) @page.callback(Output('log', 'children'), Input('btn', 'n_clicks')) def", "as dl from dash_extensions.enrich import DashBlueprint, DashProxy, html, Output, Input", "f\"Hello world {n_clicks} from page {i}!\" return page app =", "from page {i}!\" return page app = DashProxy(prevent_initial_callbacks=True, plugins=[dl.plugins.pages]) #", "app = DashProxy(prevent_initial_callbacks=True, plugins=[dl.plugins.pages]) # Register a few pages. n_pages", "= html.Ul([html.Li(html.A(page_name(i), href=page_name(i))) for i in range(n_pages)]) app.layout = html.Div(app_shell", "= 5 for i in range(n_pages): page = make_page(i) page.register(app,", "5 for i in range(n_pages): page = make_page(i) page.register(app, page_name(i),", "page app = DashProxy(prevent_initial_callbacks=True, plugins=[dl.plugins.pages]) # Register a few pages.", "in range(n_pages)]) app.layout = html.Div(app_shell + [navigation], style=dict(display=\"block\")) if __name__", "{i}!\" return page app = DashProxy(prevent_initial_callbacks=True, plugins=[dl.plugins.pages]) # Register a", "DashBlueprint() page.layout = html.Div([html.H2(f\"Page {i}\"), html.Button('Click me!', id='btn'), html.Div(id='log')]) @page.callback(Output('log',", "dash_extensions.enrich import DashBlueprint, DashProxy, html, Output, Input def page_name(i: int):", "layout. app_shell = [html.H1(\"App shell\"), dl.plugins.page_container] navigation = html.Ul([html.Li(html.A(page_name(i), href=page_name(i)))", "plugins=[dl.plugins.pages]) # Register a few pages. n_pages = 5 for", "id='btn'), html.Div(id='log')]) @page.callback(Output('log', 'children'), Input('btn', 'n_clicks')) def on_click(n_clicks): return f\"Hello", "dl.plugins.page_container] navigation = html.Ul([html.Li(html.A(page_name(i), href=page_name(i))) for i in range(n_pages)]) app.layout", "Input('btn', 'n_clicks')) def on_click(n_clicks): return f\"Hello world {n_clicks} from page", "{n_clicks} from page {i}!\" return page app = DashProxy(prevent_initial_callbacks=True, plugins=[dl.plugins.pages])", "html, Output, Input def page_name(i: int): return f\"page{i}\" def make_page(i:", "range(n_pages): page = make_page(i) page.register(app, page_name(i), prefix=str(i)) # Setup main", "main app layout. app_shell = [html.H1(\"App shell\"), dl.plugins.page_container] navigation =", "return f\"Hello world {n_clicks} from page {i}!\" return page app", "int): page = DashBlueprint() page.layout = html.Div([html.H2(f\"Page {i}\"), html.Button('Click me!',", "import dash_labs as dl from dash_extensions.enrich import DashBlueprint, DashProxy, html,", "for i in range(n_pages)]) app.layout = html.Div(app_shell + [navigation], style=dict(display=\"block\"))", "int): return f\"page{i}\" def make_page(i: int): page = DashBlueprint() page.layout", "href=page_name(i))) for i in range(n_pages)]) app.layout = html.Div(app_shell + [navigation],", "html.Div([html.H2(f\"Page {i}\"), html.Button('Click me!', id='btn'), html.Div(id='log')]) @page.callback(Output('log', 'children'), Input('btn', 'n_clicks'))", "page.register(app, page_name(i), prefix=str(i)) # Setup main app layout. app_shell =", "'children'), Input('btn', 'n_clicks')) def on_click(n_clicks): return f\"Hello world {n_clicks} from", "'n_clicks')) def on_click(n_clicks): return f\"Hello world {n_clicks} from page {i}!\"", "navigation = html.Ul([html.Li(html.A(page_name(i), href=page_name(i))) for i in range(n_pages)]) app.layout =", "from dash_extensions.enrich import DashBlueprint, DashProxy, html, Output, Input def page_name(i:" ]
[ "'limit': 'param/messages_limit', 'echo': 'param/echo', 'include_claimed': 'param/include_claimed', }, 'hints': { 'allow':", "{ 'href-template': '/v2/queues/{queue_name}/messages', 'href-vars': { 'queue_name': 'param/queue_name', }, 'hints': {", "'limit': 'param/queue_limit', 'detailed': 'param/detailed', }, 'hints': { 'allow': ['GET'], 'formats':", "{ 'allow': ['POST'], 'formats': { 'application/json': {}, }, 'accept-post': ['application/json'],", "on_get(self, req, resp, project_id): resp.data = self.document_utf8 resp.content_type = 'application/json-home'", "}, }, 'rel/post_claim': { 'href-template': '/v2/queues/{queue_name}/claims{?limit}', 'href-vars': { 'queue_name': 'param/queue_name',", "\"License\"); you may not # use this file except in", "indent=4) self.document_utf8 = document.encode('utf-8') def on_get(self, req, resp, project_id): resp.data", "{ 'allow': [ 'DELETE' ], 'formats': { 'application/json': {} }", "'allow': [ 'DELETE' ], 'formats': { 'application/json': {} } }", "'/v2/queues/{queue_name}', 'href-vars': { 'queue_name': 'param/queue_name', }, 'hints': { 'allow': ['PUT',", "permissions and limitations under # the License. import json #", "'/v2/queues/{queue_name}/messages', 'href-vars': { 'queue_name': 'param/queue_name', }, 'hints': { 'allow': ['POST'],", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "['GET'], 'formats': { 'application/json': {}, }, }, }, 'rel/flavor': {", "'limit': 'param/claim_limit', }, 'hints': { 'allow': ['POST'], 'formats': { 'application/json':", "}, }, } } ADMIN_RESOURCES = { # ----------------------------------------------------------------- #", "resp.content_type = 'application/json-home' resp.cache_control = ['max-age=86400'] # status defaults to", "'href-vars': { 'detailed': 'param/detailed', 'limit': 'param/pool_limit', 'marker': 'param/marker', }, 'hints':", "the License. import json # NOTE(kgriffs): http://tools.ietf.org/html/draft-nottingham-json-home-03 JSON_HOME = {", "}, 'hints': { 'allow': ['GET'], 'formats': { 'application/json': {}, },", "} ADMIN_RESOURCES = { # ----------------------------------------------------------------- # Pools # -----------------------------------------------------------------", "'/v2/queues/{queue_name}/messages/{message_id}{?claim}', # noqa 'href-vars': { 'queue_name': 'param/queue_name', 'message_id': 'param/message_id', 'claim':", "}, 'accept-post': ['application/json'] }, }, 'rel/patch_claim': { 'href-template': '/v2/queues/{queue_name}/claims/{claim_id}', 'href-vars':", "# Messages # ----------------------------------------------------------------- 'rel/messages': { 'href-template': ('/v2/queues/{queue_name}/messages' '{?marker,limit,echo,include_claimed}'), 'href-vars':", "{ 'application/json': {}, }, }, }, 'rel/flavor': { 'href-template': '/v2/flavors/{flavor_name}',", "may not # use this file except in compliance with", "Messages # ----------------------------------------------------------------- 'rel/messages': { 'href-template': ('/v2/queues/{queue_name}/messages' '{?marker,limit,echo,include_claimed}'), 'href-vars': {", "{ # ----------------------------------------------------------------- # Queues # ----------------------------------------------------------------- 'rel/queues': { 'href-template':", "'formats': { 'application/json': {}, }, }, }, # ----------------------------------------------------------------- #", "}, }, }, } } ADMIN_RESOURCES = { # -----------------------------------------------------------------", "{ 'application/json': {}, }, 'accept-post': ['application/json'] }, }, 'rel/delete_claim': {", "'href-template': '/v2/queues/{queue_name}/claims/{claim_id}', 'href-vars': { 'queue_name': 'param/queue_name', 'claim_id': 'param/claim_id', }, 'hints':", "for the specific language governing permissions and limitations under #", "may obtain a copy # of the License at #", "'/v2/queues/{queue_name}/claims/{claim_id}', 'href-vars': { 'queue_name': 'param/queue_name', 'claim_id': 'param/claim_id', }, 'hints': {", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "resp, project_id): resp.data = self.document_utf8 resp.content_type = 'application/json-home' resp.cache_control =", "JSON_HOME = { 'resources': { # ----------------------------------------------------------------- # Queues #", "'formats': { 'application/json': {}, }, }, }, 'rel/queue': { 'href-template':", "'claim': 'param/claim_id' }, 'hints': { 'allow': [ 'DELETE' ], 'formats':", "a copy # of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "'allow': ['POST'], 'formats': { 'application/json': {}, }, 'accept-post': ['application/json'], },", "under the License is distributed on an \"AS IS\" BASIS,", "resp.data = self.document_utf8 resp.content_type = 'application/json-home' resp.cache_control = ['max-age=86400'] #", "'queue_name': 'param/queue_name', 'limit': 'param/claim_limit', }, 'hints': { 'allow': ['POST'], 'formats':", "{ 'href-template': '/v2/pools{?detailed,limit,marker}', 'href-vars': { 'detailed': 'param/detailed', 'limit': 'param/pool_limit', 'marker':", "----------------------------------------------------------------- # Messages # ----------------------------------------------------------------- 'rel/messages': { 'href-template': ('/v2/queues/{queue_name}/messages' '{?marker,limit,echo,include_claimed}'),", "'param/include_claimed', }, 'hints': { 'allow': ['GET'], 'formats': { 'application/json': {},", "'param/claim_id' }, 'hints': { 'allow': [ 'DELETE' ], 'formats': {", "License. You may obtain a copy # of the License", "{ 'application/json': {} } } }, # ----------------------------------------------------------------- # Claims", "'limit': 'param/pool_limit', 'marker': 'param/marker', }, 'hints': { 'allow': ['GET'], 'formats':", "Queues # ----------------------------------------------------------------- 'rel/queues': { 'href-template': '/v2/queues{?marker,limit,detailed}', 'href-vars': { 'marker':", "}, }, 'rel/patch_claim': { 'href-template': '/v2/queues/{queue_name}/claims/{claim_id}', 'href-vars': { 'queue_name': 'param/queue_name',", "{ 'flavor_name': 'param/flavor_name', }, 'hints': { 'allow': ['GET', 'PUT', 'PATCH',", "# ----------------------------------------------------------------- # Messages # ----------------------------------------------------------------- 'rel/messages': { 'href-template': ('/v2/queues/{queue_name}/messages'", "this file except in compliance with the License. You may", "}, 'hints': { 'allow': ['PATCH'], 'formats': { 'application/json': {}, },", "'href-template': '/v2/pools/{pool_name}', 'href-vars': { 'pool_name': 'param/pool_name', }, 'hints': { 'allow':", "----------------------------------------------------------------- 'rel/messages': { 'href-template': ('/v2/queues/{queue_name}/messages' '{?marker,limit,echo,include_claimed}'), 'href-vars': { 'queue_name': 'param/queue_name',", "'allow': ['DELETE'], 'formats': { 'application/json': {}, }, }, }, }", "----------------------------------------------------------------- 'rel/flavors': { 'href-template': '/v2/flavors{?detailed,limit,marker}', 'href-vars': { 'detailed': 'param/detailed', 'limit':", "}, # ----------------------------------------------------------------- # Health # ----------------------------------------------------------------- 'rel/health': { 'href':", "__init__(self, conf): if conf.admin_mode: JSON_HOME['resources'].update(ADMIN_RESOURCES) document = json.dumps(JSON_HOME, ensure_ascii=False, indent=4)", "software # distributed under the License is distributed on an", "'application/json': {}, }, 'accept-post': ['application/json'] }, }, 'rel/delete_claim': { 'href-template':", "'formats': { 'application/json': {}, }, }, }, 'rel/flavor': { 'href-template':", "['application/json'], }, }, 'rel/messages_delete': { 'href-template': '/v2/queues/{queue_name}/messages{?ids,pop}', 'href-vars': { 'queue_name':", "# ----------------------------------------------------------------- # Queues # ----------------------------------------------------------------- 'rel/queues': { 'href-template': '/v2/queues{?marker,limit,detailed}',", "}, 'rel/messages_delete': { 'href-template': '/v2/queues/{queue_name}/messages{?ids,pop}', 'href-vars': { 'queue_name': 'param/queue_name', 'ids':", "'rel/flavor': { 'href-template': '/v2/flavors/{flavor_name}', 'href-vars': { 'flavor_name': 'param/flavor_name', }, 'hints':", "{ 'detailed': 'param/detailed', 'limit': 'param/pool_limit', 'marker': 'param/marker', }, 'hints': {", "'href-vars': { 'marker': 'param/marker', 'limit': 'param/queue_limit', 'detailed': 'param/detailed', }, 'hints':", "file except in compliance with the License. You may obtain", "}, # ----------------------------------------------------------------- # Messages # ----------------------------------------------------------------- 'rel/messages': { 'href-template':", "'param/pool_name', }, 'hints': { 'allow': ['GET', 'PUT', 'PATCH', 'DELETE'], 'formats':", "{ 'queue_name': 'param/queue_name', 'ids': 'param/ids', 'pop': 'param/pop' }, 'hints': {", "OR CONDITIONS OF ANY KIND, either express or implied. See", "} } }, # ----------------------------------------------------------------- # Claims # ----------------------------------------------------------------- 'rel/claim':", "'/v2/queues/{queue_name}/stats', 'href-vars': { 'queue_name': 'param/queue_name', }, 'hints': { 'allow': ['GET'],", "['GET', 'PUT', 'PATCH', 'DELETE'], 'formats': { 'application/json': {}, }, },", "'queue_name': 'param/queue_name', 'claim_id': 'param/claim_id', }, 'hints': { 'allow': ['DELETE'], 'formats':", "'application/json': {}, }, }, }, # ----------------------------------------------------------------- # Flavors #", "under the Apache License, Version 2.0 (the \"License\"); you may", "obtain a copy # of the License at # #", "'param/claim_limit', }, 'hints': { 'allow': ['POST'], 'formats': { 'application/json': {},", "with the License. You may obtain a copy # of", "'queue_name': 'param/queue_name', }, 'hints': { 'allow': ['PUT', 'DELETE'], 'formats': {", "'include_claimed': 'param/include_claimed', }, 'hints': { 'allow': ['GET'], 'formats': { 'application/json':", "----------------------------------------------------------------- 'rel/health': { 'href': '/v2/health', 'hints': { 'allow': ['GET'], 'formats':", "'allow': ['PATCH'], 'formats': { 'application/json': {}, }, 'accept-post': ['application/json'] },", "{}, }, 'accept-post': ['application/json'] }, }, 'rel/delete_claim': { 'href-template': '/v2/queues/{queue_name}/claims/{claim_id}',", "}, }, }, } class Resource(object): def __init__(self, conf): if", "'application/json': {}, }, }, }, } class Resource(object): def __init__(self,", "'param/queue_name', }, 'hints': { 'allow': ['PUT', 'DELETE'], 'formats': { 'application/json':", "{ 'application/json': {}, }, }, }, 'rel/post_claim': { 'href-template': '/v2/queues/{queue_name}/claims{?limit}',", "{ 'queue_name': 'param/queue_name', }, 'hints': { 'allow': ['POST'], 'formats': {", "{ 'href-template': '/v2/flavors{?detailed,limit,marker}', 'href-vars': { 'detailed': 'param/detailed', 'limit': 'param/flavor_limit', 'marker':", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "\"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY", "'param/marker', 'limit': 'param/messages_limit', 'echo': 'param/echo', 'include_claimed': 'param/include_claimed', }, 'hints': {", "'href-template': '/v2/flavors/{flavor_name}', 'href-vars': { 'flavor_name': 'param/flavor_name', }, 'hints': { 'allow':", "}, 'rel/post_claim': { 'href-template': '/v2/queues/{queue_name}/claims{?limit}', 'href-vars': { 'queue_name': 'param/queue_name', 'limit':", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "}, }, }, 'rel/post_messages': { 'href-template': '/v2/queues/{queue_name}/messages', 'href-vars': { 'queue_name':", "'param/claim_id', }, 'hints': { 'allow': ['PATCH'], 'formats': { 'application/json': {},", "['PUT', 'DELETE'], 'formats': { 'application/json': {}, }, }, }, 'rel/queue_stats':", "Pools # ----------------------------------------------------------------- 'rel/pools': { 'href-template': '/v2/pools{?detailed,limit,marker}', 'href-vars': { 'detailed':", "import json # NOTE(kgriffs): http://tools.ietf.org/html/draft-nottingham-json-home-03 JSON_HOME = { 'resources': {", "to in writing, software # distributed under the License is", "} }, # ----------------------------------------------------------------- # Claims # ----------------------------------------------------------------- 'rel/claim': {", "json # NOTE(kgriffs): http://tools.ietf.org/html/draft-nottingham-json-home-03 JSON_HOME = { 'resources': { #", "'application/json': {} } } }, 'rel/message_delete': { 'href-template': '/v2/queues/{queue_name}/messages/{message_id}{?claim}', #", "'param/claim_id', }, 'hints': { 'allow': ['GET'], 'formats': { 'application/json': {},", "'href-vars': { 'pool_name': 'param/pool_name', }, 'hints': { 'allow': ['GET', 'PUT',", "or agreed to in writing, software # distributed under the", "'queue_name': 'param/queue_name', 'claim_id': 'param/claim_id', }, 'hints': { 'allow': ['PATCH'], 'formats':", "required by applicable law or agreed to in writing, software", "'formats': { 'application/json': {}, }, }, }, 'rel/post_claim': { 'href-template':", "'href-vars': { 'queue_name': 'param/queue_name', }, 'hints': { 'allow': ['PUT', 'DELETE'],", "'application/json': {}, }, }, }, # ----------------------------------------------------------------- # Messages #", "'formats': { 'application/json': {} } } }, 'rel/message_delete': { 'href-template':", "= json.dumps(JSON_HOME, ensure_ascii=False, indent=4) self.document_utf8 = document.encode('utf-8') def on_get(self, req,", "# ----------------------------------------------------------------- # Claims # ----------------------------------------------------------------- 'rel/claim': { 'href-template': '/v2/queues/{queue_name}/claims/{claim_id}',", "if conf.admin_mode: JSON_HOME['resources'].update(ADMIN_RESOURCES) document = json.dumps(JSON_HOME, ensure_ascii=False, indent=4) self.document_utf8 =", "'/v2/queues{?marker,limit,detailed}', 'href-vars': { 'marker': 'param/marker', 'limit': 'param/queue_limit', 'detailed': 'param/detailed', },", "'/v2/queues/{queue_name}/messages{?ids,pop}', 'href-vars': { 'queue_name': 'param/queue_name', 'ids': 'param/ids', 'pop': 'param/pop' },", "'application/json': {}, }, 'accept-post': ['application/json'] }, }, 'rel/patch_claim': { 'href-template':", "# ----------------------------------------------------------------- # Health # ----------------------------------------------------------------- 'rel/health': { 'href': '/v2/health',", "}, 'rel/post_messages': { 'href-template': '/v2/queues/{queue_name}/messages', 'href-vars': { 'queue_name': 'param/queue_name', },", "you may not # use this file except in compliance", "agreed to in writing, software # distributed under the License", "'limit': 'param/flavor_limit', 'marker': 'param/marker', }, 'hints': { 'allow': ['GET'], 'formats':", "def __init__(self, conf): if conf.admin_mode: JSON_HOME['resources'].update(ADMIN_RESOURCES) document = json.dumps(JSON_HOME, ensure_ascii=False,", "}, }, 'rel/flavor': { 'href-template': '/v2/flavors/{flavor_name}', 'href-vars': { 'flavor_name': 'param/flavor_name',", "distributed under the License is distributed on an \"AS IS\"", "{ 'application/json': {}, }, }, }, 'rel/queue_stats': { 'href-template': '/v2/queues/{queue_name}/stats',", "CONDITIONS OF ANY KIND, either express or implied. See the", "# noqa 'href-vars': { 'queue_name': 'param/queue_name', 'message_id': 'param/message_id', 'claim': 'param/claim_id'", "= { 'resources': { # ----------------------------------------------------------------- # Queues # -----------------------------------------------------------------", "'claim_id': 'param/claim_id', }, 'hints': { 'allow': ['GET'], 'formats': { 'application/json':", "}, 'rel/delete_claim': { 'href-template': '/v2/queues/{queue_name}/claims/{claim_id}', 'href-vars': { 'queue_name': 'param/queue_name', 'claim_id':", "'param/claim_id', }, 'hints': { 'allow': ['DELETE'], 'formats': { 'application/json': {},", "'rel/queues': { 'href-template': '/v2/queues{?marker,limit,detailed}', 'href-vars': { 'marker': 'param/marker', 'limit': 'param/queue_limit',", "= self.document_utf8 resp.content_type = 'application/json-home' resp.cache_control = ['max-age=86400'] # status", "writing, software # distributed under the License is distributed on", "'accept-post': ['application/json'] }, }, 'rel/delete_claim': { 'href-template': '/v2/queues/{queue_name}/claims/{claim_id}', 'href-vars': {", "{ 'href-template': ('/v2/queues/{queue_name}/messages' '{?marker,limit,echo,include_claimed}'), 'href-vars': { 'queue_name': 'param/queue_name', 'marker': 'param/marker',", "}, 'rel/patch_claim': { 'href-template': '/v2/queues/{queue_name}/claims/{claim_id}', 'href-vars': { 'queue_name': 'param/queue_name', 'claim_id':", "'href-template': '/v2/pools{?detailed,limit,marker}', 'href-vars': { 'detailed': 'param/detailed', 'limit': 'param/pool_limit', 'marker': 'param/marker',", "}, # ----------------------------------------------------------------- # Flavors # ----------------------------------------------------------------- 'rel/flavors': { 'href-template':", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "'allow': ['GET'], 'formats': { 'application/json': {}, }, }, }, }", "# ----------------------------------------------------------------- 'rel/messages': { 'href-template': ('/v2/queues/{queue_name}/messages' '{?marker,limit,echo,include_claimed}'), 'href-vars': { 'queue_name':", "'application/json': {}, }, }, }, 'rel/queue_stats': { 'href-template': '/v2/queues/{queue_name}/stats', 'href-vars':", "an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF", "}, 'hints': { 'allow': ['GET', 'PUT', 'PATCH', 'DELETE'], 'formats': {", "# of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "use this file except in compliance with the License. You", "'allow': ['GET', 'PUT', 'PATCH', 'DELETE'], 'formats': { 'application/json': {}, },", "'href-template': '/v2/queues/{queue_name}/messages{?ids,pop}', 'href-vars': { 'queue_name': 'param/queue_name', 'ids': 'param/ids', 'pop': 'param/pop'", "{ 'queue_name': 'param/queue_name', 'limit': 'param/claim_limit', }, 'hints': { 'allow': ['POST'],", "----------------------------------------------------------------- 'rel/claim': { 'href-template': '/v2/queues/{queue_name}/claims/{claim_id}', 'href-vars': { 'queue_name': 'param/queue_name', 'claim_id':", "'hints': { 'allow': ['GET'], 'formats': { 'application/json': {}, }, },", "----------------------------------------------------------------- # Pools # ----------------------------------------------------------------- 'rel/pools': { 'href-template': '/v2/pools{?detailed,limit,marker}', 'href-vars':", "'formats': { 'application/json': {}, }, }, }, } } ADMIN_RESOURCES", "not # use this file except in compliance with the", "'allow': ['GET'], 'formats': { 'application/json': {}, }, }, }, 'rel/flavor':", "['GET'], 'formats': { 'application/json': {}, }, }, }, 'rel/pool': {", "# Claims # ----------------------------------------------------------------- 'rel/claim': { 'href-template': '/v2/queues/{queue_name}/claims/{claim_id}', 'href-vars': {", "}, }, }, # ----------------------------------------------------------------- # Flavors # ----------------------------------------------------------------- 'rel/flavors':", "{}, }, }, }, # ----------------------------------------------------------------- # Flavors # -----------------------------------------------------------------", "def on_get(self, req, resp, project_id): resp.data = self.document_utf8 resp.content_type =", "'href-vars': { 'queue_name': 'param/queue_name', 'limit': 'param/claim_limit', }, 'hints': { 'allow':", "'param/ids', 'pop': 'param/pop' }, 'hints': { 'allow': [ 'DELETE' ],", "License is distributed on an \"AS IS\" BASIS, WITHOUT #", "KIND, either express or implied. See the # License for", "document = json.dumps(JSON_HOME, ensure_ascii=False, indent=4) self.document_utf8 = document.encode('utf-8') def on_get(self,", "# ----------------------------------------------------------------- # Pools # ----------------------------------------------------------------- 'rel/pools': { 'href-template': '/v2/pools{?detailed,limit,marker}',", "{ 'resources': { # ----------------------------------------------------------------- # Queues # ----------------------------------------------------------------- 'rel/queues':", "'{?marker,limit,echo,include_claimed}'), 'href-vars': { 'queue_name': 'param/queue_name', 'marker': 'param/marker', 'limit': 'param/messages_limit', 'echo':", "'param/detailed', 'limit': 'param/flavor_limit', 'marker': 'param/marker', }, 'hints': { 'allow': ['GET'],", "IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND,", "'detailed': 'param/detailed', 'limit': 'param/flavor_limit', 'marker': 'param/marker', }, 'hints': { 'allow':", "'href-template': '/v2/queues{?marker,limit,detailed}', 'href-vars': { 'marker': 'param/marker', 'limit': 'param/queue_limit', 'detailed': 'param/detailed',", "{ 'pool_name': 'param/pool_name', }, 'hints': { 'allow': ['GET', 'PUT', 'PATCH',", "{}, }, }, }, 'rel/flavor': { 'href-template': '/v2/flavors/{flavor_name}', 'href-vars': {", "express or implied. See the # License for the specific", "{ 'href-template': '/v2/queues/{queue_name}', 'href-vars': { 'queue_name': 'param/queue_name', }, 'hints': {", "}, 'accept-post': ['application/json'] }, }, 'rel/delete_claim': { 'href-template': '/v2/queues/{queue_name}/claims/{claim_id}', 'href-vars':", "'href': '/v2/health', 'hints': { 'allow': ['GET'], 'formats': { 'application/json': {},", "See the # License for the specific language governing permissions", "{}, }, }, }, } } ADMIN_RESOURCES = { #", "'application/json': {}, }, }, }, 'rel/pool': { 'href-template': '/v2/pools/{pool_name}', 'href-vars':", "http://tools.ietf.org/html/draft-nottingham-json-home-03 JSON_HOME = { 'resources': { # ----------------------------------------------------------------- # Queues", "'rel/queue_stats': { 'href-template': '/v2/queues/{queue_name}/stats', 'href-vars': { 'queue_name': 'param/queue_name', }, 'hints':", "'param/echo', 'include_claimed': 'param/include_claimed', }, 'hints': { 'allow': ['GET'], 'formats': {", "}, 'hints': { 'allow': ['DELETE'], 'formats': { 'application/json': {}, },", "'rel/queue': { 'href-template': '/v2/queues/{queue_name}', 'href-vars': { 'queue_name': 'param/queue_name', }, 'hints':", "copy # of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "'param/queue_name', 'claim_id': 'param/claim_id', }, 'hints': { 'allow': ['DELETE'], 'formats': {", "'allow': ['GET'], 'formats': { 'application/json': {}, }, }, }, 'rel/pool':", "], 'formats': { 'application/json': {} } } }, 'rel/message_delete': {", "'echo': 'param/echo', 'include_claimed': 'param/include_claimed', }, 'hints': { 'allow': ['GET'], 'formats':", "}, }, 'rel/messages_delete': { 'href-template': '/v2/queues/{queue_name}/messages{?ids,pop}', 'href-vars': { 'queue_name': 'param/queue_name',", "'param/marker', }, 'hints': { 'allow': ['GET'], 'formats': { 'application/json': {},", "the Apache License, Version 2.0 (the \"License\"); you may not", "'DELETE' ], 'formats': { 'application/json': {} } } }, #", "['GET'], 'formats': { 'application/json': {}, }, }, }, 'rel/queue': {", "'flavor_name': 'param/flavor_name', }, 'hints': { 'allow': ['GET', 'PUT', 'PATCH', 'DELETE'],", "'rel/post_claim': { 'href-template': '/v2/queues/{queue_name}/claims{?limit}', 'href-vars': { 'queue_name': 'param/queue_name', 'limit': 'param/claim_limit',", "{ 'href-template': '/v2/flavors/{flavor_name}', 'href-vars': { 'flavor_name': 'param/flavor_name', }, 'hints': {", "}, }, }, 'rel/queue_stats': { 'href-template': '/v2/queues/{queue_name}/stats', 'href-vars': { 'queue_name':", "law or agreed to in writing, software # distributed under", "'formats': { 'application/json': {}, }, }, }, } class Resource(object):", "'application/json': {} } } }, # ----------------------------------------------------------------- # Claims #", "'allow': ['PUT', 'DELETE'], 'formats': { 'application/json': {}, }, }, },", "{ 'application/json': {}, }, }, }, # ----------------------------------------------------------------- # Health", "['application/json'] }, }, 'rel/patch_claim': { 'href-template': '/v2/queues/{queue_name}/claims/{claim_id}', 'href-vars': { 'queue_name':", "{}, }, }, }, 'rel/pool': { 'href-template': '/v2/pools/{pool_name}', 'href-vars': {", "{ 'allow': ['PATCH'], 'formats': { 'application/json': {}, }, 'accept-post': ['application/json']", "implied. See the # License for the specific language governing", "'rel/pools': { 'href-template': '/v2/pools{?detailed,limit,marker}', 'href-vars': { 'detailed': 'param/detailed', 'limit': 'param/pool_limit',", "'accept-post': ['application/json'], }, }, 'rel/messages_delete': { 'href-template': '/v2/queues/{queue_name}/messages{?ids,pop}', 'href-vars': {", "'application/json': {}, }, }, }, # ----------------------------------------------------------------- # Health #", "----------------------------------------------------------------- 'rel/queues': { 'href-template': '/v2/queues{?marker,limit,detailed}', 'href-vars': { 'marker': 'param/marker', 'limit':", "'formats': { 'application/json': {}, }, 'accept-post': ['application/json'] }, }, 'rel/patch_claim':", "'application/json': {}, }, }, }, 'rel/queue': { 'href-template': '/v2/queues/{queue_name}', 'href-vars':", "'PUT', 'PATCH', 'DELETE'], 'formats': { 'application/json': {}, }, }, },", "'param/queue_name', 'claim_id': 'param/claim_id', }, 'hints': { 'allow': ['PATCH'], 'formats': {", "'application/json': {}, }, }, }, 'rel/post_claim': { 'href-template': '/v2/queues/{queue_name}/claims{?limit}', 'href-vars':", "{}, }, }, }, # ----------------------------------------------------------------- # Messages # -----------------------------------------------------------------", "}, 'accept-post': ['application/json'], }, }, 'rel/messages_delete': { 'href-template': '/v2/queues/{queue_name}/messages{?ids,pop}', 'href-vars':", "Apache License, Version 2.0 (the \"License\"); you may not #", "}, 'hints': { 'allow': ['PUT', 'DELETE'], 'formats': { 'application/json': {},", "{ 'marker': 'param/marker', 'limit': 'param/queue_limit', 'detailed': 'param/detailed', }, 'hints': {", "'DELETE'], 'formats': { 'application/json': {}, }, }, }, # -----------------------------------------------------------------", "Inc. # # Licensed under the Apache License, Version 2.0", "'accept-post': ['application/json'] }, }, 'rel/patch_claim': { 'href-template': '/v2/queues/{queue_name}/claims/{claim_id}', 'href-vars': {", "'pool_name': 'param/pool_name', }, 'hints': { 'allow': ['GET', 'PUT', 'PATCH', 'DELETE'],", "'queue_name': 'param/queue_name', 'message_id': 'param/message_id', 'claim': 'param/claim_id' }, 'hints': { 'allow':", "'href-vars': { 'queue_name': 'param/queue_name', }, 'hints': { 'allow': ['GET'], 'formats':", "'pop': 'param/pop' }, 'hints': { 'allow': [ 'DELETE' ], 'formats':", "'allow': ['GET'], 'formats': { 'application/json': {}, }, }, }, 'rel/post_claim':", "limitations under # the License. import json # NOTE(kgriffs): http://tools.ietf.org/html/draft-nottingham-json-home-03", "'href-vars': { 'flavor_name': 'param/flavor_name', }, 'hints': { 'allow': ['GET', 'PUT',", "# ----------------------------------------------------------------- 'rel/queues': { 'href-template': '/v2/queues{?marker,limit,detailed}', 'href-vars': { 'marker': 'param/marker',", "('/v2/queues/{queue_name}/messages' '{?marker,limit,echo,include_claimed}'), 'href-vars': { 'queue_name': 'param/queue_name', 'marker': 'param/marker', 'limit': 'param/messages_limit',", "'href-template': '/v2/queues/{queue_name}/claims{?limit}', 'href-vars': { 'queue_name': 'param/queue_name', 'limit': 'param/claim_limit', }, 'hints':", "ADMIN_RESOURCES = { # ----------------------------------------------------------------- # Pools # ----------------------------------------------------------------- 'rel/pools':", "# Pools # ----------------------------------------------------------------- 'rel/pools': { 'href-template': '/v2/pools{?detailed,limit,marker}', 'href-vars': {", "{ 'detailed': 'param/detailed', 'limit': 'param/flavor_limit', 'marker': 'param/marker', }, 'hints': {", "}, }, # ----------------------------------------------------------------- # Messages # ----------------------------------------------------------------- 'rel/messages': {", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR", "'allow': ['GET'], 'formats': { 'application/json': {}, }, }, }, 'rel/queue':", "}, }, }, 'rel/pool': { 'href-template': '/v2/pools/{pool_name}', 'href-vars': { 'pool_name':", "# # Licensed under the Apache License, Version 2.0 (the", "{ 'allow': ['PUT', 'DELETE'], 'formats': { 'application/json': {}, }, },", "'param/flavor_limit', 'marker': 'param/marker', }, 'hints': { 'allow': ['GET'], 'formats': {", "{}, }, 'accept-post': ['application/json'] }, }, 'rel/patch_claim': { 'href-template': '/v2/queues/{queue_name}/claims/{claim_id}',", "'hints': { 'allow': [ 'DELETE' ], 'formats': { 'application/json': {}", "'/v2/queues/{queue_name}/claims{?limit}', 'href-vars': { 'queue_name': 'param/queue_name', 'limit': 'param/claim_limit', }, 'hints': {", "{}, }, }, }, 'rel/post_messages': { 'href-template': '/v2/queues/{queue_name}/messages', 'href-vars': {", "'href-vars': { 'queue_name': 'param/queue_name', 'ids': 'param/ids', 'pop': 'param/pop' }, 'hints':", "'href-vars': { 'queue_name': 'param/queue_name', }, 'hints': { 'allow': ['POST'], 'formats':", "'queue_name': 'param/queue_name', 'marker': 'param/marker', 'limit': 'param/messages_limit', 'echo': 'param/echo', 'include_claimed': 'param/include_claimed',", "} }, 'rel/message_delete': { 'href-template': '/v2/queues/{queue_name}/messages/{message_id}{?claim}', # noqa 'href-vars': {", "'DELETE'], 'formats': { 'application/json': {}, }, }, }, 'rel/queue_stats': {", "in compliance with the License. You may obtain a copy", "'href-vars': { 'queue_name': 'param/queue_name', 'claim_id': 'param/claim_id', }, 'hints': { 'allow':", "}, } } ADMIN_RESOURCES = { # ----------------------------------------------------------------- # Pools", "'allow': ['POST'], 'formats': { 'application/json': {}, }, 'accept-post': ['application/json'] },", "'href-template': '/v2/queues/{queue_name}/stats', 'href-vars': { 'queue_name': 'param/queue_name', }, 'hints': { 'allow':", "}, 'rel/flavor': { 'href-template': '/v2/flavors/{flavor_name}', 'href-vars': { 'flavor_name': 'param/flavor_name', },", "['GET'], 'formats': { 'application/json': {}, }, }, }, # -----------------------------------------------------------------", "on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS", "'/v2/flavors/{flavor_name}', 'href-vars': { 'flavor_name': 'param/flavor_name', }, 'hints': { 'allow': ['GET',", "'queue_name': 'param/queue_name', }, 'hints': { 'allow': ['POST'], 'formats': { 'application/json':", "Version 2.0 (the \"License\"); you may not # use this", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "'param/queue_name', }, 'hints': { 'allow': ['POST'], 'formats': { 'application/json': {},", "specific language governing permissions and limitations under # the License.", "{}, }, 'accept-post': ['application/json'], }, }, 'rel/messages_delete': { 'href-template': '/v2/queues/{queue_name}/messages{?ids,pop}',", "{ 'href-template': '/v2/queues{?marker,limit,detailed}', 'href-vars': { 'marker': 'param/marker', 'limit': 'param/queue_limit', 'detailed':", "{}, }, }, }, 'rel/queue_stats': { 'href-template': '/v2/queues/{queue_name}/stats', 'href-vars': {", "}, }, 'rel/post_messages': { 'href-template': '/v2/queues/{queue_name}/messages', 'href-vars': { 'queue_name': 'param/queue_name',", "NOTE(kgriffs): http://tools.ietf.org/html/draft-nottingham-json-home-03 JSON_HOME = { 'resources': { # ----------------------------------------------------------------- #", "Rackspace, Inc. # # Licensed under the Apache License, Version", "----------------------------------------------------------------- # Claims # ----------------------------------------------------------------- 'rel/claim': { 'href-template': '/v2/queues/{queue_name}/claims/{claim_id}', 'href-vars':", "}, # ----------------------------------------------------------------- # Claims # ----------------------------------------------------------------- 'rel/claim': { 'href-template':", "Licensed under the Apache License, Version 2.0 (the \"License\"); you", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "'formats': { 'application/json': {}, }, }, }, 'rel/pool': { 'href-template':", "document.encode('utf-8') def on_get(self, req, resp, project_id): resp.data = self.document_utf8 resp.content_type", "{ 'href-template': '/v2/queues/{queue_name}/stats', 'href-vars': { 'queue_name': 'param/queue_name', }, 'hints': {", "License for the specific language governing permissions and limitations under", "}, }, }, 'rel/flavor': { 'href-template': '/v2/flavors/{flavor_name}', 'href-vars': { 'flavor_name':", "# ----------------------------------------------------------------- 'rel/flavors': { 'href-template': '/v2/flavors{?detailed,limit,marker}', 'href-vars': { 'detailed': 'param/detailed',", "'href-vars': { 'queue_name': 'param/queue_name', 'message_id': 'param/message_id', 'claim': 'param/claim_id' }, 'hints':", "governing permissions and limitations under # the License. import json", "}, 'rel/queue_stats': { 'href-template': '/v2/queues/{queue_name}/stats', 'href-vars': { 'queue_name': 'param/queue_name', },", "'resources': { # ----------------------------------------------------------------- # Queues # ----------------------------------------------------------------- 'rel/queues': {", "{}, }, }, }, } class Resource(object): def __init__(self, conf):", "'param/queue_limit', 'detailed': 'param/detailed', }, 'hints': { 'allow': ['GET'], 'formats': {", "['GET'], 'formats': { 'application/json': {}, }, }, }, 'rel/post_messages': {", "Copyright (c) 2013 Rackspace, Inc. # # Licensed under the", "# Health # ----------------------------------------------------------------- 'rel/health': { 'href': '/v2/health', 'hints': {", "'queue_name': 'param/queue_name', 'ids': 'param/ids', 'pop': 'param/pop' }, 'hints': { 'allow':", "['GET'], 'formats': { 'application/json': {}, }, }, }, 'rel/post_claim': {", "req, resp, project_id): resp.data = self.document_utf8 resp.content_type = 'application/json-home' resp.cache_control", "], 'formats': { 'application/json': {} } } }, # -----------------------------------------------------------------", "ensure_ascii=False, indent=4) self.document_utf8 = document.encode('utf-8') def on_get(self, req, resp, project_id):", "= document.encode('utf-8') def on_get(self, req, resp, project_id): resp.data = self.document_utf8", "}, }, } class Resource(object): def __init__(self, conf): if conf.admin_mode:", "'rel/pool': { 'href-template': '/v2/pools/{pool_name}', 'href-vars': { 'pool_name': 'param/pool_name', }, 'hints':", "the # License for the specific language governing permissions and", "'rel/delete_claim': { 'href-template': '/v2/queues/{queue_name}/claims/{claim_id}', 'href-vars': { 'queue_name': 'param/queue_name', 'claim_id': 'param/claim_id',", "['POST'], 'formats': { 'application/json': {}, }, 'accept-post': ['application/json'], }, },", "# # Unless required by applicable law or agreed to", "}, }, 'rel/delete_claim': { 'href-template': '/v2/queues/{queue_name}/claims/{claim_id}', 'href-vars': { 'queue_name': 'param/queue_name',", "# the License. import json # NOTE(kgriffs): http://tools.ietf.org/html/draft-nottingham-json-home-03 JSON_HOME =", "'detailed': 'param/detailed', 'limit': 'param/pool_limit', 'marker': 'param/marker', }, 'hints': { 'allow':", "except in compliance with the License. You may obtain a", "}, 'hints': { 'allow': ['POST'], 'formats': { 'application/json': {}, },", "'hints': { 'allow': ['PUT', 'DELETE'], 'formats': { 'application/json': {}, },", "'rel/message_delete': { 'href-template': '/v2/queues/{queue_name}/messages/{message_id}{?claim}', # noqa 'href-vars': { 'queue_name': 'param/queue_name',", "'/v2/pools/{pool_name}', 'href-vars': { 'pool_name': 'param/pool_name', }, 'hints': { 'allow': ['GET',", "Claims # ----------------------------------------------------------------- 'rel/claim': { 'href-template': '/v2/queues/{queue_name}/claims/{claim_id}', 'href-vars': { 'queue_name':", "conf): if conf.admin_mode: JSON_HOME['resources'].update(ADMIN_RESOURCES) document = json.dumps(JSON_HOME, ensure_ascii=False, indent=4) self.document_utf8", "'href-template': '/v2/queues/{queue_name}/messages/{message_id}{?claim}', # noqa 'href-vars': { 'queue_name': 'param/queue_name', 'message_id': 'param/message_id',", "}, } class Resource(object): def __init__(self, conf): if conf.admin_mode: JSON_HOME['resources'].update(ADMIN_RESOURCES)", "JSON_HOME['resources'].update(ADMIN_RESOURCES) document = json.dumps(JSON_HOME, ensure_ascii=False, indent=4) self.document_utf8 = document.encode('utf-8') def", "'href-template': '/v2/flavors{?detailed,limit,marker}', 'href-vars': { 'detailed': 'param/detailed', 'limit': 'param/flavor_limit', 'marker': 'param/marker',", "Flavors # ----------------------------------------------------------------- 'rel/flavors': { 'href-template': '/v2/flavors{?detailed,limit,marker}', 'href-vars': { 'detailed':", "Resource(object): def __init__(self, conf): if conf.admin_mode: JSON_HOME['resources'].update(ADMIN_RESOURCES) document = json.dumps(JSON_HOME,", "'rel/flavors': { 'href-template': '/v2/flavors{?detailed,limit,marker}', 'href-vars': { 'detailed': 'param/detailed', 'limit': 'param/flavor_limit',", "language governing permissions and limitations under # the License. import", "}, }, }, # ----------------------------------------------------------------- # Messages # ----------------------------------------------------------------- 'rel/messages':", "{ 'href-template': '/v2/queues/{queue_name}/messages/{message_id}{?claim}', # noqa 'href-vars': { 'queue_name': 'param/queue_name', 'message_id':", "}, 'rel/queue': { 'href-template': '/v2/queues/{queue_name}', 'href-vars': { 'queue_name': 'param/queue_name', },", "'rel/health': { 'href': '/v2/health', 'hints': { 'allow': ['GET'], 'formats': {", "'param/messages_limit', 'echo': 'param/echo', 'include_claimed': 'param/include_claimed', }, 'hints': { 'allow': ['GET'],", "by applicable law or agreed to in writing, software #", "'formats': { 'application/json': {}, }, }, }, 'rel/queue_stats': { 'href-template':", "'marker': 'param/marker', 'limit': 'param/messages_limit', 'echo': 'param/echo', 'include_claimed': 'param/include_claimed', }, 'hints':", "{ 'queue_name': 'param/queue_name', }, 'hints': { 'allow': ['GET'], 'formats': {", "'detailed': 'param/detailed', }, 'hints': { 'allow': ['GET'], 'formats': { 'application/json':", "noqa 'href-vars': { 'queue_name': 'param/queue_name', 'message_id': 'param/message_id', 'claim': 'param/claim_id' },", "{ 'application/json': {}, }, 'accept-post': ['application/json'], }, }, 'rel/messages_delete': {", "'formats': { 'application/json': {}, }, 'accept-post': ['application/json'] }, }, 'rel/delete_claim':", "# ----------------------------------------------------------------- 'rel/claim': { 'href-template': '/v2/queues/{queue_name}/claims/{claim_id}', 'href-vars': { 'queue_name': 'param/queue_name',", "BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either", "(c) 2013 Rackspace, Inc. # # Licensed under the Apache", "'href-template': '/v2/queues/{queue_name}', 'href-vars': { 'queue_name': 'param/queue_name', }, 'hints': { 'allow':", "['POST'], 'formats': { 'application/json': {}, }, 'accept-post': ['application/json'] }, },", "{} } } }, 'rel/message_delete': { 'href-template': '/v2/queues/{queue_name}/messages/{message_id}{?claim}', # noqa", "and limitations under # the License. import json # NOTE(kgriffs):", "['PATCH'], 'formats': { 'application/json': {}, }, 'accept-post': ['application/json'] }, },", "} } ADMIN_RESOURCES = { # ----------------------------------------------------------------- # Pools #", "# Queues # ----------------------------------------------------------------- 'rel/queues': { 'href-template': '/v2/queues{?marker,limit,detailed}', 'href-vars': {", "Health # ----------------------------------------------------------------- 'rel/health': { 'href': '/v2/health', 'hints': { 'allow':", "# ----------------------------------------------------------------- 'rel/health': { 'href': '/v2/health', 'hints': { 'allow': ['GET'],", "'href-template': '/v2/queues/{queue_name}/messages', 'href-vars': { 'queue_name': 'param/queue_name', }, 'hints': { 'allow':", "{ 'queue_name': 'param/queue_name', }, 'hints': { 'allow': ['PUT', 'DELETE'], 'formats':", "'queue_name': 'param/queue_name', 'claim_id': 'param/claim_id', }, 'hints': { 'allow': ['GET'], 'formats':", "}, 'rel/pool': { 'href-template': '/v2/pools/{pool_name}', 'href-vars': { 'pool_name': 'param/pool_name', },", "{ 'application/json': {}, }, }, }, # ----------------------------------------------------------------- # Messages", "'claim_id': 'param/claim_id', }, 'hints': { 'allow': ['PATCH'], 'formats': { 'application/json':", "= 'application/json-home' resp.cache_control = ['max-age=86400'] # status defaults to 200", "Unless required by applicable law or agreed to in writing,", "'allow': ['GET'], 'formats': { 'application/json': {}, }, }, }, #", "'param/queue_name', 'ids': 'param/ids', 'pop': 'param/pop' }, 'hints': { 'allow': [", "# ----------------------------------------------------------------- 'rel/pools': { 'href-template': '/v2/pools{?detailed,limit,marker}', 'href-vars': { 'detailed': 'param/detailed',", "'claim_id': 'param/claim_id', }, 'hints': { 'allow': ['DELETE'], 'formats': { 'application/json':", "{} } } }, # ----------------------------------------------------------------- # Claims # -----------------------------------------------------------------", "the License. You may obtain a copy # of the", "applicable law or agreed to in writing, software # distributed", "# use this file except in compliance with the License.", "} } }, 'rel/message_delete': { 'href-template': '/v2/queues/{queue_name}/messages/{message_id}{?claim}', # noqa 'href-vars':", "} class Resource(object): def __init__(self, conf): if conf.admin_mode: JSON_HOME['resources'].update(ADMIN_RESOURCES) document", "{ 'href-template': '/v2/queues/{queue_name}/claims/{claim_id}', 'href-vars': { 'queue_name': 'param/queue_name', 'claim_id': 'param/claim_id', },", "'param/detailed', }, 'hints': { 'allow': ['GET'], 'formats': { 'application/json': {},", "'formats': { 'application/json': {}, }, 'accept-post': ['application/json'], }, }, 'rel/messages_delete':", "'rel/messages': { 'href-template': ('/v2/queues/{queue_name}/messages' '{?marker,limit,echo,include_claimed}'), 'href-vars': { 'queue_name': 'param/queue_name', 'marker':", "'allow': ['GET'], 'formats': { 'application/json': {}, }, }, }, 'rel/post_messages':", "}, 'hints': { 'allow': [ 'DELETE' ], 'formats': { 'application/json':", "# NOTE(kgriffs): http://tools.ietf.org/html/draft-nottingham-json-home-03 JSON_HOME = { 'resources': { # -----------------------------------------------------------------", "OF ANY KIND, either express or implied. See the #", "'rel/claim': { 'href-template': '/v2/queues/{queue_name}/claims/{claim_id}', 'href-vars': { 'queue_name': 'param/queue_name', 'claim_id': 'param/claim_id',", "----------------------------------------------------------------- 'rel/pools': { 'href-template': '/v2/pools{?detailed,limit,marker}', 'href-vars': { 'detailed': 'param/detailed', 'limit':", "{ 'queue_name': 'param/queue_name', 'message_id': 'param/message_id', 'claim': 'param/claim_id' }, 'hints': {", "License. import json # NOTE(kgriffs): http://tools.ietf.org/html/draft-nottingham-json-home-03 JSON_HOME = { 'resources':", "'param/queue_name', 'marker': 'param/marker', 'limit': 'param/messages_limit', 'echo': 'param/echo', 'include_claimed': 'param/include_claimed', },", "{ 'queue_name': 'param/queue_name', 'claim_id': 'param/claim_id', }, 'hints': { 'allow': ['GET'],", "}, }, # ----------------------------------------------------------------- # Health # ----------------------------------------------------------------- 'rel/health': {", "WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express", "in writing, software # distributed under the License is distributed", "['application/json'] }, }, 'rel/delete_claim': { 'href-template': '/v2/queues/{queue_name}/claims/{claim_id}', 'href-vars': { 'queue_name':", "'message_id': 'param/message_id', 'claim': 'param/claim_id' }, 'hints': { 'allow': [ 'DELETE'", "'marker': 'param/marker', }, 'hints': { 'allow': ['GET'], 'formats': { 'application/json':", "}, }, 'rel/pool': { 'href-template': '/v2/pools/{pool_name}', 'href-vars': { 'pool_name': 'param/pool_name',", "'rel/messages_delete': { 'href-template': '/v2/queues/{queue_name}/messages{?ids,pop}', 'href-vars': { 'queue_name': 'param/queue_name', 'ids': 'param/ids',", "'/v2/health', 'hints': { 'allow': ['GET'], 'formats': { 'application/json': {}, },", "'param/queue_name', 'limit': 'param/claim_limit', }, 'hints': { 'allow': ['POST'], 'formats': {", "class Resource(object): def __init__(self, conf): if conf.admin_mode: JSON_HOME['resources'].update(ADMIN_RESOURCES) document =", "}, }, }, # ----------------------------------------------------------------- # Health # ----------------------------------------------------------------- 'rel/health':", "2.0 (the \"License\"); you may not # use this file", "{}, }, }, }, 'rel/queue': { 'href-template': '/v2/queues/{queue_name}', 'href-vars': {", "}, }, }, 'rel/queue': { 'href-template': '/v2/queues/{queue_name}', 'href-vars': { 'queue_name':", "# Flavors # ----------------------------------------------------------------- 'rel/flavors': { 'href-template': '/v2/flavors{?detailed,limit,marker}', 'href-vars': {", "{ 'allow': ['GET', 'PUT', 'PATCH', 'DELETE'], 'formats': { 'application/json': {},", "{ 'application/json': {}, }, }, }, } class Resource(object): def", "{}, }, }, }, 'rel/post_claim': { 'href-template': '/v2/queues/{queue_name}/claims{?limit}', 'href-vars': {", "'href-template': ('/v2/queues/{queue_name}/messages' '{?marker,limit,echo,include_claimed}'), 'href-vars': { 'queue_name': 'param/queue_name', 'marker': 'param/marker', 'limit':", "# Copyright (c) 2013 Rackspace, Inc. # # Licensed under", "'param/queue_name', 'claim_id': 'param/claim_id', }, 'hints': { 'allow': ['GET'], 'formats': {", "License, Version 2.0 (the \"License\"); you may not # use", "compliance with the License. You may obtain a copy #", "'formats': { 'application/json': {} } } }, # ----------------------------------------------------------------- #", "under # the License. import json # NOTE(kgriffs): http://tools.ietf.org/html/draft-nottingham-json-home-03 JSON_HOME", "either express or implied. See the # License for the", "{ 'href-template': '/v2/pools/{pool_name}', 'href-vars': { 'pool_name': 'param/pool_name', }, 'hints': {", "# License for the specific language governing permissions and limitations", "the specific language governing permissions and limitations under # the", "{ 'queue_name': 'param/queue_name', 'claim_id': 'param/claim_id', }, 'hints': { 'allow': ['PATCH'],", "project_id): resp.data = self.document_utf8 resp.content_type = 'application/json-home' resp.cache_control = ['max-age=86400']", "{ 'allow': ['GET'], 'formats': { 'application/json': {}, }, }, },", "'param/message_id', 'claim': 'param/claim_id' }, 'hints': { 'allow': [ 'DELETE' ],", "['DELETE'], 'formats': { 'application/json': {}, }, }, }, } }", "{ 'application/json': {}, }, 'accept-post': ['application/json'] }, }, 'rel/patch_claim': {", "{ 'href-template': '/v2/queues/{queue_name}/claims{?limit}', 'href-vars': { 'queue_name': 'param/queue_name', 'limit': 'param/claim_limit', },", "2013 Rackspace, Inc. # # Licensed under the Apache License,", "{ 'application/json': {}, }, }, }, 'rel/pool': { 'href-template': '/v2/pools/{pool_name}',", "{ 'application/json': {}, }, }, }, 'rel/post_messages': { 'href-template': '/v2/queues/{queue_name}/messages',", "'application/json': {}, }, }, }, } } ADMIN_RESOURCES = {", "'param/marker', 'limit': 'param/queue_limit', 'detailed': 'param/detailed', }, 'hints': { 'allow': ['GET'],", "'DELETE' ], 'formats': { 'application/json': {} } } }, 'rel/message_delete':", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "{ # ----------------------------------------------------------------- # Pools # ----------------------------------------------------------------- 'rel/pools': { 'href-template':", "# WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "'hints': { 'allow': ['GET', 'PUT', 'PATCH', 'DELETE'], 'formats': { 'application/json':", "'param/pool_limit', 'marker': 'param/marker', }, 'hints': { 'allow': ['GET'], 'formats': {", "{ 'application/json': {}, }, }, }, 'rel/queue': { 'href-template': '/v2/queues/{queue_name}',", "'rel/post_messages': { 'href-template': '/v2/queues/{queue_name}/messages', 'href-vars': { 'queue_name': 'param/queue_name', }, 'hints':", "the License is distributed on an \"AS IS\" BASIS, WITHOUT", "You may obtain a copy # of the License at", "'rel/patch_claim': { 'href-template': '/v2/queues/{queue_name}/claims/{claim_id}', 'href-vars': { 'queue_name': 'param/queue_name', 'claim_id': 'param/claim_id',", "= { # ----------------------------------------------------------------- # Pools # ----------------------------------------------------------------- 'rel/pools': {", "----------------------------------------------------------------- # Queues # ----------------------------------------------------------------- 'rel/queues': { 'href-template': '/v2/queues{?marker,limit,detailed}', 'href-vars':", "'application/json': {}, }, 'accept-post': ['application/json'], }, }, 'rel/messages_delete': { 'href-template':", "self.document_utf8 = document.encode('utf-8') def on_get(self, req, resp, project_id): resp.data =", "'/v2/flavors{?detailed,limit,marker}', 'href-vars': { 'detailed': 'param/detailed', 'limit': 'param/flavor_limit', 'marker': 'param/marker', },", "# ----------------------------------------------------------------- # Flavors # ----------------------------------------------------------------- 'rel/flavors': { 'href-template': '/v2/flavors{?detailed,limit,marker}',", "}, }, 'rel/queue_stats': { 'href-template': '/v2/queues/{queue_name}/stats', 'href-vars': { 'queue_name': 'param/queue_name',", "'marker': 'param/marker', 'limit': 'param/queue_limit', 'detailed': 'param/detailed', }, 'hints': { 'allow':", "'param/queue_name', }, 'hints': { 'allow': ['GET'], 'formats': { 'application/json': {},", "self.document_utf8 resp.content_type = 'application/json-home' resp.cache_control = ['max-age=86400'] # status defaults", "'href-vars': { 'detailed': 'param/detailed', 'limit': 'param/flavor_limit', 'marker': 'param/marker', }, 'hints':", "'param/flavor_name', }, 'hints': { 'allow': ['GET', 'PUT', 'PATCH', 'DELETE'], 'formats':", "{ 'application/json': {} } } }, 'rel/message_delete': { 'href-template': '/v2/queues/{queue_name}/messages/{message_id}{?claim}',", "'hints': { 'allow': ['DELETE'], 'formats': { 'application/json': {}, }, },", "['GET'], 'formats': { 'application/json': {}, }, }, }, } class", "'ids': 'param/ids', 'pop': 'param/pop' }, 'hints': { 'allow': [ 'DELETE'", "}, }, # ----------------------------------------------------------------- # Flavors # ----------------------------------------------------------------- 'rel/flavors': {", "----------------------------------------------------------------- # Flavors # ----------------------------------------------------------------- 'rel/flavors': { 'href-template': '/v2/flavors{?detailed,limit,marker}', 'href-vars':", "'param/pop' }, 'hints': { 'allow': [ 'DELETE' ], 'formats': {", "}, }, }, 'rel/post_claim': { 'href-template': '/v2/queues/{queue_name}/claims{?limit}', 'href-vars': { 'queue_name':", "'PATCH', 'DELETE'], 'formats': { 'application/json': {}, }, }, }, #", "----------------------------------------------------------------- # Health # ----------------------------------------------------------------- 'rel/health': { 'href': '/v2/health', 'hints':", "ANY KIND, either express or implied. See the # License", "'hints': { 'allow': ['POST'], 'formats': { 'application/json': {}, }, 'accept-post':", "# distributed under the License is distributed on an \"AS", "'application/json': {}, }, }, }, 'rel/post_messages': { 'href-template': '/v2/queues/{queue_name}/messages', 'href-vars':", "# Unless required by applicable law or agreed to in", "{ 'queue_name': 'param/queue_name', 'claim_id': 'param/claim_id', }, 'hints': { 'allow': ['DELETE'],", "'href-vars': { 'queue_name': 'param/queue_name', 'marker': 'param/marker', 'limit': 'param/messages_limit', 'echo': 'param/echo',", "is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES", "[ 'DELETE' ], 'formats': { 'application/json': {} } } },", "conf.admin_mode: JSON_HOME['resources'].update(ADMIN_RESOURCES) document = json.dumps(JSON_HOME, ensure_ascii=False, indent=4) self.document_utf8 = document.encode('utf-8')", "'param/detailed', 'limit': 'param/pool_limit', 'marker': 'param/marker', }, 'hints': { 'allow': ['GET'],", "'formats': { 'application/json': {}, }, }, }, 'rel/post_messages': { 'href-template':", "{ 'queue_name': 'param/queue_name', 'marker': 'param/marker', 'limit': 'param/messages_limit', 'echo': 'param/echo', 'include_claimed':", "{ 'href-template': '/v2/queues/{queue_name}/messages{?ids,pop}', 'href-vars': { 'queue_name': 'param/queue_name', 'ids': 'param/ids', 'pop':", "{ 'allow': ['DELETE'], 'formats': { 'application/json': {}, }, }, },", "{}, }, }, }, # ----------------------------------------------------------------- # Health # -----------------------------------------------------------------", "'/v2/pools{?detailed,limit,marker}', 'href-vars': { 'detailed': 'param/detailed', 'limit': 'param/pool_limit', 'marker': 'param/marker', },", "}, }, 'rel/queue': { 'href-template': '/v2/queues/{queue_name}', 'href-vars': { 'queue_name': 'param/queue_name',", "'hints': { 'allow': ['PATCH'], 'formats': { 'application/json': {}, }, 'accept-post':", "'param/queue_name', 'message_id': 'param/message_id', 'claim': 'param/claim_id' }, 'hints': { 'allow': [", "{ 'allow': ['POST'], 'formats': { 'application/json': {}, }, 'accept-post': ['application/json']", "'queue_name': 'param/queue_name', }, 'hints': { 'allow': ['GET'], 'formats': { 'application/json':", "{ 'href': '/v2/health', 'hints': { 'allow': ['GET'], 'formats': { 'application/json':", "'application/json': {}, }, }, }, 'rel/flavor': { 'href-template': '/v2/flavors/{flavor_name}', 'href-vars':", "{ 'application/json': {}, }, }, }, # ----------------------------------------------------------------- # Flavors", "json.dumps(JSON_HOME, ensure_ascii=False, indent=4) self.document_utf8 = document.encode('utf-8') def on_get(self, req, resp,", "}, 'rel/message_delete': { 'href-template': '/v2/queues/{queue_name}/messages/{message_id}{?claim}', # noqa 'href-vars': { 'queue_name':", "(the \"License\"); you may not # use this file except", "or implied. See the # License for the specific language", "{ 'application/json': {}, }, }, }, } } ADMIN_RESOURCES =" ]
[ "containing code that created the new file. May or may", "bitwise concatenation of the major, minor and patch levels. Prerelease", "triggered the signature hit.', }), ('sig', ('it:av:sig', {}), { 'ro':", "('comp', {'fields': ( ('app', 'it:prod:soft'), ('intent', 'it:os:android:intent'))}), { 'doc': 'The", "('reg', ('it:dev:regval', {}), { 'doc': 'The registry key or value", "= s_version.packVersion(major, minor, patch) subs = {'major': major, 'minor': minor,", "'A log level integer that increases with severity.', }), ('data',", "{ 'doc': 'URL where a specific version of the software", "('desc', ('str', {}), { 'doc': 'A brief description of the", "of the client during the URL retrieval..' }), ('client:port', ('inet:port',", "mesg='No text left after stripping whitespace') subs = s_version.parseSemver(valu) if", "('str', {}), { 'doc': 'A free-form description of the signature.',", "serving a url.', }), ('url', ('inet:url', {}), { 'ro': True,", "( ('host', ('it:host', {}), { 'doc': 'The host that executed", "{}), { 'doc': 'The process that matched the YARA rule.'}),", "POSIX account's default shell.\", 'ex': '/bin/bash', }), ('windows:sid', ('it:os:windows:sid', {}),", "{ 'doc': 'The (optional) clear text password for this password", "{'regex': r'^T[0-9]{4}(.[0-9]{3})?$'}), { 'doc': 'A Mitre ATT&CK Technique ID.', 'ex':", "the network.', }), ('net4', ('inet:net4', {}), { 'doc': 'The optional", "the org or person who authored the software.', }), ('author:org',", "description of the ATT&CK technique.', 'disp': {'hint': 'text'}, }), ('url',", "'doc': 'A YARA rule match to a file.', }), ('it:app:yara:procmatch',", "thread.', }), ('src:proc', ('it:exec:proc', {}), { 'doc': 'An external process", "signature name.', }), ('sig:soft', ('it:prod:soft', {}), { 'ro': True, 'doc':", "containing the file.', }), ('path', ('file:path', {}), { 'doc': 'The", "which the activity occurred.'}), ('time', ('time', {}), { 'doc': 'The", "that requested the URL.', }), ('host', ('it:host', {}), { 'doc':", "'doc': 'The exit code or return value for the thread.',", "{ 'doc': 'Observed/variant names for this software.', }), ('desc', ('str',", "('str', {'lower': True})))}), { 'doc': 'A signature name within the", ": version : update : edition : language : sw_edition", ": vendor : product : version : update : edition", "match to a file.', }), ('it:app:yara:procmatch', ('guid', {}), { 'doc':", "of a host creating or setting a registry key.', }),", "on the host.', }), )), ('it:dev:str', {}, ( ('norm', ('str',", "increases with severity.', }), ('data', ('data', {}), { 'doc': 'A", "'The primary name for the ATT&CK group.', }), ('names', ('array',", "}), )), ('it:exec:mmap', {}, ( ('proc', ('it:exec:proc', {}), { 'doc':", "originated.', }), ('client:ipv4', ('inet:ipv4', {}), { 'doc': 'The IPv4 where", "'The most recent version of the rule evaluated as a", "{'fields': ( ('app', 'it:prod:soft'), ('intent', 'it:os:android:intent'))}), { 'doc': 'The given", "True}), { 'ro': True, 'doc': 'The \"language\" field from the", "permission string.'}), ('it:os:android:intent', ('str', {}), { 'doc': 'An android intent", "caused the hit.'}), ('src:ipv4', ('inet:ipv4', {}), { 'doc': 'The source", "hash value.', }), ('hash:sha512', ('hash:sha512', {}), { 'doc': 'The SHA512", "valu): if valu < 0: raise s_exc.BadTypeValu(valu=valu, name=self.name, mesg='Cannot norm", "'ro': True, 'doc': 'The \"language\" field from the CPE 2.3", "MD5 password hash value.', }), ('hash:sha1', ('hash:sha1', {}), { 'doc':", "True, 'doc': 'The android intent which is broadcast by the", "an unsuccessful logon attempt.', }), ('logoff:time', ('time', {}), { 'doc':", "description of the host.', }), ('domain', ('it:domain', {}), { 'doc':", "return valu, subs async def _onFormItDevStr(self, node): await node.set('norm', node.ndef[1])", "that read the registry.', }), ('host', ('it:host', {}), { 'doc':", "}), )), ('it:exec:url', {}, ( ('proc', ('it:exec:proc', {}), { 'doc':", "self.setNormFunc(int, self._normPyInt) def _normPyStr(self, valu): valu = valu.strip() if not", "executing on a host. May be an actual (e.g., endpoint)", "{ 'doc': 'The IPv6 where the logon originated.', }), )),", "'doc': 'A SHA256 hash of the memory map. Bytes may", "port. Typically the same host referenced in :proc, if present.',", "the YARA rule.'}), ('version', ('it:semver', {}), { 'doc': 'The most", "deleted.', }), ('path', ('file:path', {}), { 'doc': 'The path where", "{'type': 'it:mitre:attack:tactic', 'uniq': True, 'sorted': True, 'split': ','}), { 'doc':", "host.', }), ('serial', ('str', {}), { 'doc': 'The serial number", "{ 'doc': 'The URL that documents the ATT&CK mitigation.', }),", "for the process. For example, rundll32.exe may be considered the", "r'^T[0-9]{4}(.[0-9]{3})?$'}), { 'doc': 'A Mitre ATT&CK Technique ID.', 'ex': 'T1548',", "'doc': 'A mutex created by a process at runtime.', }),", "first codeblock of the function.'}), ('rank', ('int', {}), { 'doc':", "{}), { 'doc': 'A reference URL for information about the", "present.', }), ('exe', ('file:bytes', {}), { 'doc': 'The specific file", "of the ATT&CK technique.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url',", "}), ('it:cmd', ('str', {'strip': True}), { 'doc': 'A unique command-line", "version string.', }), ('arch', ('it:dev:str', {}), { 'doc': 'Software architecture.',", "\"any\" - = N/A ''' def __init__(self, modl, name, info,", "'doc': 'The time the data from the registry was deleted.',", "function rank score used to evaluate if it exhibits interesting", "True, 'split': ','}), { 'doc': 'An array of techniques used", "'doc': 'Host with the software.'}), ('softver', ('it:prod:softver', {}), {'ro': True,", "host running the process that created the new file. Typically", "of URLs that document the ATT&CK software.', }), ('techniques', ('array',", "array of ATT&CK tactics that include this technique.', }), )),", "to bind().' }), ('server:ipv6', ('inet:ipv6', {}), { 'doc': 'The IPv6", "{}), { 'doc': 'The parent ATT&CK technique on this sub-technique.',", "('rank', ('int', {}), { 'doc': 'The function rank score used", "'it:reveng:function'))}), { 'doc': 'An instance of a function in an", "that caused the hit.'}), ('dst:ipv6', ('inet:ipv6', {}), { 'doc': 'The", "('it:exec:reg:set', ('guid', {}), { 'doc': 'An instance of a host", "host running the process that wrote to the file. Typically", "caused the hit.'}), ('time', ('time', {}), { 'doc': 'The time", "('it:adid', ('str', {'lower': True, 'strip': True}), { 'doc': 'An advertising", "}), ('time', ('time', {}), { 'doc': 'The time the registry", "the value of the registry key, if the value is", "{ 'doc': 'The process ID.', }), ('time', ('time', {}), {", "the thread was created.', }), ('exited', ('time', {}), { 'doc':", "{}), { 'doc': 'The place where the host resides.', }),", "Tactic ID.', 'ex': 'TA0040', }), ('it:mitre:attack:technique', ('str', {'regex': r'^T[0-9]{4}(.[0-9]{3})?$'}), {", "{ 'doc': 'The IPv4 where the logon originated.', }), ('client:ipv6',", "process.', }), ('it:exec:loadlib', ('guid', {}), { 'doc': 'A library load", "process executing code that wrote to / modified the existing", "{}), { 'doc': 'A YARA rule unique identifier.', }), ('it:app:yara:match',", "software.', }), ('techniques', ('array', {'type': 'it:mitre:attack:technique', 'uniq': True, 'sorted': True,", "}), ('posix:home', ('file:path', {}), { 'doc': \"The path to the", "'The place where the host resides.', }), ('loc', ('loc', {}),", "of the YARA rule.'}), ('author', ('ps:contact', {}), { 'doc': 'Contact", "'The start time for the process.', }), ('exited', ('time', {}),", "in subs.items(): await node.set(f'semver:{k}', v) except asyncio.CancelledError: # pragma: no", "('exe', ('file:bytes', {}), { 'doc': 'The specific file containing code", "instance of a host creating or setting a registry key.',", "time the memory map was deleted.', }), ('path', ('file:path', {}),", "match.'}), )), ('it:app:yara:rule', {}, ( ('text', ('str', {}), { 'doc':", "library.'}), ('lib', ('it:prod:softver', {}), {'ro': True, 'doc': 'The library software", "{ 'doc': 'The host containing the file.', }), ('path', ('file:path',", "('guid', {}), { 'doc': 'An instance of a host binding", "to a filesystem.', }), ('it:exec:file:del', ('guid', {}), { 'doc': 'An", "{}), { 'doc': 'The specific file containing code that wrote", "the value is a string.', }), ('int', ('it:dev:int', {}), {", "}), ('posix:gid', ('int', {}), { 'doc': 'The primary group ID", "True}), { 'doc': 'An advertising identification string.'}), ('it:os:windows:sid', ('str', {'regex':", "thread.', }), ('src:thread', ('it:exec:thread', {}), { 'doc': 'The thread which", "subs is None: raise s_exc.BadTypeValu(valu=valu, name='bruteVersionStr', mesg='Unable to brute force", "the ATT&CK group.', }), ('tag', ('syn:tag', {}), { 'doc': 'The", "True}), { 'doc': 'A description of the ATT&CK technique.', 'disp':", "parts.append(part) return parts def _normPyStr(self, valu): if not valu.startswith('cpe:2.3:'): mesg", "Overflow)', }), ('desc', ('str', {}), { 'doc': 'The CWE description", "technique.', }), ('tag', ('syn:tag', {}), { 'doc': 'The synapse tag", "('it:app:snort:hit', ('guid', {}), { 'doc': 'An instance of a snort", "('it:prod:softver', {}), {'ro': True, 'doc': 'Software on the host.'}) )),", "host running the process that read the file. Typically the", "android advertising identification string.'}), ('it:os:android:perm', ('str', {}), { 'doc': 'An", "}), ('it:mitre:attack:group', ('str', {'regex': r'^G[0-9]{4}$'}), { 'doc': 'A Mitre ATT&CK", "'' continue part += c except StopIteration: parts.append(part) return parts", "raise except Exception: logger.exception('Failed to brute force version string [%s]',", "to a synapse ou:org.', }), ('name', ('ou:name', {}), { 'doc':", "'A Mitre ATT&CK element status.', 'ex': 'current', }), ('it:mitre:attack:group', ('str',", "}), ('it:fs:file', ('guid', {}), { 'doc': 'A file on a", "('file', ('file:bytes', {}), { 'doc': 'The file on the host.',", "is mapped with execute permissions.', }), ('created', ('time', {}), {", "('posix:gid', ('int', {}), { 'doc': 'The primary group ID of", "an individual logon/logoff event.' }), ('it:hosturl', ('comp', {'fields': (('host', 'it:host'),", "the signature.', }) )), ('it:av:filehit', {}, ( ('file', ('file:bytes', {}),", ")), ('it:hostsoft', {}, ( ('host', ('it:host', {}), {'ro': True, 'doc':", "or return value for the thread.', }), ('src:proc', ('it:exec:proc', {}),", "file.'}), )), ('it:hostsoft', {}, ( ('host', ('it:host', {}), {'ro': True,", "address of the flow that caused the hit.'}), ('src:ipv6', ('inet:ipv6',", "{}), { 'doc': 'The destination IPv4 address of the flow", "the snort rule.'}), ('src', ('inet:addr', {}), { 'doc': 'The source", "mapped with execute permissions.', }), ('created', ('time', {}), { 'doc':", "{ 'doc': 'The sensor host node that produced the hit.'}),", "the software.', 'disp': {'hint': 'text'}, }), ('desc:short', ('str', {'lower': True}),", "'disp': {'hint': 'text'}, }), ('pid', ('int', {}), { 'doc': 'The", "if the mmap is mapped with read permissions.', }), ('perms:write',", "}), ('it:exec:thread', ('guid', {}), { 'doc': 'A thread executing in", "app software which listens for the android intent.'}), ('intent', ('it:os:android:intent',", "'doc': 'The process ID.', }), ('time', ('time', {}), { 'doc':", "()), ), } name = 'it' return ((name, modl), )", "('cves', ('array', {'type': 'it:sec:cve', 'uniq': True, 'sorted': True}), { 'doc':", "array of ATT&CK technique IDs used by the group.', }),", "pair.', }), ('it:prod:soft', ('guid', {}), { 'doc': 'A arbitrary, unversioned", "'doc': 'The name of the function.'}), ('description', ('str', {}), {", "{}), { 'doc': 'The name of the function.'}), ('description', ('str',", "('intent', ('it:os:android:intent', {}), {'ro': True, 'doc': 'The android intent which", "('host', ('it:host', {}), { 'doc': 'The host that executed the", "time the data from the registry was deleted.', }), ('reg',", "'The file on the host.', }), ('ctime', ('time', {}), {", "'doc': 'The optional contiguous IPv4 address range of this network.',", "was read.', }), ('path', ('file:path', {}), { 'doc': 'The path", "{ 'doc': 'The name of the snort rule.'}), ('version', ('it:semver',", "}), ('exited', ('time', {}), { 'doc': 'The time the process", "'A short description of the software.', }), ('cpe', ('it:sec:cpe', {}),", "not be the same :exe referenced in :proc, if present.',", "('name', ('str', {}), { 'doc': 'The name of the snort", "'doc': 'A description of the software.', 'disp': {'hint': 'text'}, }),", "{}), { 'doc': 'If deprecated, this field may contain the", "{'strip': True}), { 'doc': 'A unique command-line string.', 'ex': 'foo.exe", "True}), { 'doc': 'An array of URLs that document the", "'doc': 'The base memory address where the map was created", "('url', ('inet:url', {}), { 'doc': 'The URL that was requested.',", "the named pipe. Typically the same host referenced in :proc,", "'doc': 'The name of a host or system.', }), ('it:host',", "{ 'doc': 'A arbitrary, unversioned software product.', }), ('it:adid', ('str',", "('it:os:windows:sid', ('str', {'regex': r'^S-1-[0-59]-\\d{2}-\\d{8,10}-\\d{8,10}-\\d{8,10}-[1-9]\\d{3}$'}), { 'doc': 'A Microsoft Windows Security", "this group.', }), ('posix:gid', ('int', {}), { 'doc': 'The primary", "the logon originated.', }), ('client:ipv6', ('inet:ipv6', {}), { 'doc': 'The", "node that produced the hit.'}), ('version', ('it:semver', {}), { 'doc':", "13: mesg = f'CPE 2.3 string has {len(parts)} parts, expected", "'The main process executing code that deleted the file.', }),", "{ 'doc': 'A free-form description of the CVE vulnerability.', 'disp':", "}), ('time', ('time', {}), { 'doc': 'The time the data", "{ 'doc': 'The user name of the process owner.', }),", "{}), { 'doc': 'The inet:addr of the server when binding", "('time', ('time', {}), { 'doc': 'The time the named pipe", "integer value representing the semver is the bitwise concatenation of", "build information will be parsed out and available as strings", "node.set('norm', node.ndef[1]) async def _onFormMakeDevStr(self, node): pprop = node.ndef[1] await", "properly or bruteforce parts try: valu, subs = self.bruteVersionStr(prop) await", "of the account.', 'ex': '1001', }), ('windows:sid', ('it:os:windows:sid', {}), {", "'it:sec:cve', 'uniq': True, 'sorted': True}), { 'doc': 'A list of", "'doc': 'The virtual address of the first codeblock of the", "by the group.', }), )), ('it:mitre:attack:tactic', {}, ( ('name', ('str',", "the file was created.', }), ('path:dir', ('file:path', {}), { 'ro':", "{}), { 'doc': 'The value of the registry key, if", "string.'}), ('it:os:android:perm', ('str', {}), { 'doc': 'An android permission string.'}),", "if not prop: return await node.set('vers:norm', prop) # Make it:dev:str", "def _onPropSoftverSoft(self, node, oldv): # Check to see if name", "the software at a particular version.', }), ('names', ('array', {'type':", "'A named pipe created by a process at runtime.', }),", "'The (optional) clear text password for this password hash.', }),", "to annotate nodes included in this ATT&CK mitigation.', 'ex': 'cno.mitre.m0100',", "'The last known location for the host.' }), ('place', ('geo:place',", "{ 'ro': True, 'doc': 'The guid matching the function.'}), ('string',", "the ATT&CK software.', }), ('tag', ('syn:tag', {}), { 'doc': 'The", "{ 'doc': 'The snort rule that matched the file.'}), ('flow',", "the logon occured.', }), ('success', ('bool', {}), { 'doc': 'Set", "registry.', }), ('host', ('it:host', {}), { 'doc': 'The host running", "'file:bytes'), ('sig', 'it:av:sig'))}), { 'doc': 'A file that triggered an", "is broadcast by the app.'}), )), ('it:prod:softver', {}, ( ('software',", "process that created the mutex. Typically the same host referenced", "of the process.', }), ('src:exe', ('file:path', {}), { 'doc': 'The", "string is expected to start with \"cpe:2.3:\"' raise s_exc.BadTypeValu(valu=valu, mesg=mesg)", "'Buffer Copy without Checking Size of Input (Classic Buffer Overflow)',", "{}), { 'doc': 'The file considered the \"main\" executable for", "{}, ( ('user', ('inet:user', {}), { 'doc': 'The username associated", "contains the signature.', }), )), ('it:av:prochit', {}, ( ('proc', ('it:exec:proc',", "including any command line parameters.', 'disp': {'hint': 'text'}, }), ('pid',", "('it:av:prochit', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The file", "of the ATT&CK group.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url',", "rule text.', 'disp': {'hint': 'text'}, }), ('name', ('str', {}), {", "host reading a file from a filesystem.', }), ('it:exec:file:write', ('guid',", "of the YARA rule.'}), ('version', ('it:semver', {}), { 'doc': 'The", "= { 'ctors': ( ('it:semver', 'synapse.models.infotech.SemVer', {}, { 'doc': 'Semantic", "'ex': 'S0154', }), ('it:dev:str', ('str', {}), { 'doc': 'A developer-selected", "shell.\", 'ex': '/bin/bash', }), ('windows:sid', ('it:os:windows:sid', {}), { 'doc': 'The", "the named pipe.', }), ('host', ('it:host', {}), { 'doc': 'The", "'The \"product\" field from the CPE 2.3 string.'}), ('version', ('str',", "file. Typically the same host referenced in :proc, if present.',", "evaluate if it exhibits interesting behavior.'}), ('complexity', ('int', {}), {", "'ex': 'TA0040', }), ('it:mitre:attack:technique', ('str', {'regex': r'^T[0-9]{4}(.[0-9]{3})?$'}), { 'doc': 'A", "same host referenced in :proc, if present.', }), ('exe', ('file:bytes',", "{}), { 'doc': 'System normalized semantic version number.', }), ('semver:major',", "IPv4 address of the flow that caused the hit.'}), ('dst:port',", "()), ('it:dev:pipe', {}, ()), ('it:dev:mutex', {}, ()), ('it:dev:regkey', {}, ()),", "evaluation engines.'}), )), ('it:app:yara:match', {}, ( ('rule', ('it:app:yara:rule', {}), {", "'it:group'}), { 'doc': 'Groups that are a member of this", "= await node.snap.nodes('it:prod:soft=$soft', opts=opts) if nodes: name = nodes[0].get('name') if", "host where the account is registered.', }), ('domain', ('it:domain', {}),", "parse strings using the it:semver normalization before attempting to extract", "'The Microsoft Windows Security Identifier of the group.', }), )),", "the software.', }), ('author:acct', ('inet:web:acct', {}), { 'deprecated': True, 'doc':", "}), ('exitcode', ('int', {}), { 'doc': 'The exit code or", "('time', ('time', {}), { 'doc': 'The time the file was", "{'type': 'inet:url', 'uniq': True}), { 'doc': 'An array of URLs", "'The snort rule that matched the file.'}), ('flow', ('inet:flow', {}),", ": target_sw : target_hw : other * = \"any\" -", "string.'}), ('edition', ('str', {'lower': True, 'strip': True}), { 'ro': True,", "URL. May or may not be the same :exe specified", "'doc': 'The contact information of the org or person who", "CWE to a full description.', }), ('parents', ('array', {'type': 'it:sec:cwe',", "of the client during the URL retrieval.' }), ('client:ipv4', ('inet:ipv4',", "{ 'doc': 'The process which killed this process.', }), )),", "contact information associated with this account.', }), ('host', ('it:host', {}),", "'ou:name', 'uniq': True, 'sorted': True}), { 'doc': 'An array of", "registry key, if the value is an integer.', }), ('bytes',", "('array', {'type': 'it:dev:str', 'uniq': True}), { 'doc': 'An array of", "('host', ('it:host', {}), { 'doc': 'The host that the account", "path if the mmap is a mapped view of a", "subs} def repr(self, valu): major, minor, patch = s_version.unpackVersion(valu) valu", "member of.', }), )), ('it:group', {}, ( ('name', ('str', {'lower':", "into a integer value Major, minor and patch levels are", "('groups', ('array', {'type': 'it:group'}), { 'doc': 'An array of groups", "names for the ATT&CK software.', }), ('desc', ('str', {'strip': True}),", "{}, ( ('file', ('file:bytes', {}), { 'ro': True, 'doc': 'The", "that was created.', }), )), ('it:exec:file:del', {}, ( ('proc', ('it:exec:proc',", "{}), { 'doc': 'The time the mutex was created.', }),", "size of the memory map in bytes.', }), ('perms:read', ('bool',", "('server:ipv4', ('inet:ipv4', {}), { 'doc': 'The IPv4 address specified to", "'A function from an imported library.', }), ), 'interfaces': (", "to True if the software is a library.'}), )), ('it:adid',", "node.get('software') if prop: opts = {'vars': {'soft': prop}} nodes =", "{ 'doc': 'The name of the group.', }), ('desc', ('str',", "{ 'doc': 'The mutex string.', }), )), ('it:exec:pipe', {}, (", "{ 'doc': 'Version major number.', }), ('semver:minor', ('int', {}), {", "('inet:ipv4', {}), { 'doc': 'The IPv4 of the client during", "}), ('time', ('time', {}), { 'doc': 'The time that the", "the process.', }), ('loaded', ('time', {}), { 'doc': 'The time", "{}), { 'doc': 'The IPv4 where the logon originated.', }),", "{}), { 'doc': 'The client port during the URL retrieval..'", "r'^M[0-9]{4}$'}), { 'doc': 'A Mitre ATT&CK Mitigation ID.', 'ex': 'M1036',", "represents a host or system.' }), ('it:log:event', ('guid', {}), {", "code that read the registry. May or may not be", "software version.'}), )), ('it:prod:softfile', {}, ( ('soft', ('it:prod:softver', {}), {'ro':", "('file', ('file:bytes', {}), { 'ro': True, 'doc': 'The file that", "'doc': 'The host where the group is registered.', }), ('domain',", "('it:dev:regval', {}), { 'doc': 'The registry key or value that", "{ 'ro': True, 'doc': 'The \"part\" field from the CPE", "or system.', }), ('it:sec:cve', ('str', {'lower': True, 'regex': r'(?i)^CVE-[0-9]{4}-[0-9]{4,}$'}), {", "( ('soft', 'it:prod:softver'), ('os', 'it:prod:softver'))}), { 'doc': 'The software version", "'The name of the YARA rule.'}), ('author', ('ps:contact', {}), {", "file containing code that wrote to the file. May or", "}), )), ('it:exec:bind', {}, ( ('proc', ('it:exec:proc', {}), { 'doc':", "that documents the ATT&CK technique.', }), ('tag', ('syn:tag', {}), {", "'doc': 'The GECOS field for the POSIX account.', }), ('posix:home',", "a filesystem.', }), ('it:exec:file:del', ('guid', {}), { 'doc': 'An instance", "2.3 string.'}), ('vendor', ('ou:name', {}), { 'ro': True, 'doc': 'The", "'The value of the registry key, if the value is", "'ro': True, 'doc': 'The parent directory of the file path", "to the registry. May or may not be the same", "contiguous IPv6 address range of this network.', }), )), ('it:account',", "parts.append(part) part = '' continue part += c except StopIteration:", "( ('host', ('it:host', {}), {'ro': True, 'doc': 'Host with the", "in an executable.', }), ('it:reveng:funcstr', ('comp', {'fields': (('function', 'it:reveng:function'), ('string',", "'doc': 'An instance of a snort rule hit.', }), ('it:reveng:function',", "'text'}, }), ('url', ('inet:url', {}), { 'doc': 'The URL that", "{ 'doc': 'The current version of the rule.'}), )), ('it:app:snort:hit',", "'doc': 'The inet:flow that matched the snort rule.'}), ('src', ('inet:addr',", "'The MD5 password hash value.', }), ('hash:sha1', ('hash:sha1', {}), {", "for when this version of the software was released.', }),", "string representing a named pipe.', }), ('it:dev:mutex', ('str', {}), {", "password hash.', }), )), ('it:cmd', {}, ()), ('it:exec:proc', {}, (", "running the process that requested the URL. Typically the same", "{}), {'ro': True, 'doc': 'Host with the software.'}), ('softver', ('it:prod:softver',", "specific file containing code that created the named pipe. May", "('proc', ('it:exec:proc', {}), { 'doc': 'The process that matched the", "'The final component of the file path (parsed from :path).',", "vulnerability.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url', {}), { 'doc':", "array of ATT&CK software IDs used by the group.', }),", "brief description of the network.', }), ('org', ('ou:org', {}), {", "'strip': True}), { 'doc': 'The name of the software at", "{ 'ro': True, 'doc': 'The file extension of the file", "this ATT&CK technique.', 'ex': 'cno.mitre.t0100', }), ('references', ('array', {'type': 'inet:url',", "to / modified the existing file.', }), ('host', ('it:host', {}),", "'strip': True}), { 'ro': True, 'doc': 'The \"other\" field from", "{ 'doc': 'An array of ATT&CK technique IDs used by", ":path).', }), ('file', ('file:bytes', {}), { 'doc': 'The file that", "subs} class SemVer(s_types.Int): ''' Provides support for parsing a semantic", "present. ''' def postTypeInit(self): s_types.Int.postTypeInit(self) self.setNormFunc(str, self._normPyStr) self.setNormFunc(int, self._normPyInt) def", "{ 'doc': 'True if the mmap is mapped with execute", "'Organization which authored the software.', }), ('author:acct', ('inet:web:acct', {}), {", "{ 'doc': 'The GECOS field for the POSIX account.', }),", "this version instance.', }), ('vers:norm', ('str', {'lower': True}), { 'doc':", "def bruteVersionStr(self, valu): ''' Brute force the version out of", "{'type': 'it:group'}), { 'doc': 'Groups that are a member of", "ATT&CK technique on this sub-technique.', }), ('tactics', ('array', {'type': 'it:mitre:attack:tactic',", ")), ('it:exec:bind', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The", "('time', {}), { 'doc': 'The file access time.', }), ('user',", "attempts to parse strings using the it:semver normalization before attempting", "the account', }), ('contact', ('ps:contact', {}), { 'doc': 'Additional contact", "'If deprecated, this field may contain the current value for", "that bound the listening port. Typically the same host referenced", "int, dict: The system normalized version integer and a subs", "valu, subs except s_exc.BadTypeValu: # Try doing version part extraction", "this thread.', }), )), ('it:exec:loadlib', {}, ( ('proc', ('it:exec:proc', {}),", "{}), { 'doc': 'A GUID that represents an individual logon/logoff", "'doc': 'The inet:addr of the server when binding the port.'", "{}), { 'doc': 'The name of the YARA rule.'}), ('author',", "'The NIST CPE 2.3 string specifying this software.', }), ('author',", "subs = info.get('subs') return valu, subs except s_exc.BadTypeValu: # Try", "see if name is available and set it if possible", "the trigger.'}), ('dst:ipv4', ('inet:ipv4', {}), { 'doc': 'The destination IPv4", "file that triggered the signature hit.', }), ('sig', ('it:av:sig', {}),", "in a process.', }), ('it:cmd', ('str', {'strip': True}), { 'doc':", "StopIteration: parts.append(part) return parts def _normPyStr(self, valu): if not valu.startswith('cpe:2.3:'):", "True, 'split': ','}), { 'doc': 'An array of ATT&CK software", "}), )), ('it:exec:mutex', {}, ( ('proc', ('it:exec:proc', {}), { 'doc':", "{ 'doc': 'A SHA256 hash of the memory map. Bytes", "(e.g., endpoint) or virtual (e.g., malware sandbox) host.', }), ('it:exec:thread',", "the process that created the new file. Typically the same", "{}, ( ('name', ('str', {'lower': True, 'strip': True, 'onespace': True}),", "('file:bytes', {}), { 'doc': 'The library file that was loaded.',", "the ATT&CK technique.', }), ('tag', ('syn:tag', {}), { 'doc': 'The", "of the flow that caused the hit.'}), ('src:port', ('inet:port', {}),", ")), ('it:exec:file:add', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The", "flow that caused the hit.'}), ('dst:port', ('inet:port', {}), { 'doc':", "{'lower': True}), { 'doc': 'Lower case normalized version of the", "that the account logged in to.', }), ('account', ('it:account', {}),", "the operating system.'}), ('os', ('it:prod:softver', {}), {'ro': True, 'doc': 'The", "name=self.name, mesg='Cannot norm a integer larger than 1152921504606846975 as a", "are represented as integers, with a max width of 20", "\"target_hw\" field from the CPE 2.3 string.'}), ('other', ('str', {'lower':", "\"main\" executable for DLLs loaded by that program.', }), ('cmd',", "'doc': 'A GUID that represents a host or system.' }),", "'The path for the file.', }), ('path:dir', ('file:path', {}), {", "'doc': 'An instance of a host creating or setting a", "where the logon originated.', }), ('client:ipv4', ('inet:ipv4', {}), { 'doc':", "'doc': 'The host running the process that requested the URL.", "manufacturer of the host.', }), ('model', ('str', {}), { 'doc':", "the URL. May or may not be the same :exe", "where the file was read.', }), ('path:dir', ('file:path', {}), {", "'The default installation path of the file.'}), )), ('it:hostsoft', {},", "which requests the permission.'}), ('perm', ('it:os:android:perm', {}), {'ro': True, 'doc':", "'synapse.models.infotech.Cpe23Str', {}, { 'doc': 'A NIST CPE 2.3 Formatted String',", "run on the operating system.'}), ('os', ('it:prod:softver', {}), {'ro': True,", "('it:dev:pipe', {}, ()), ('it:dev:mutex', {}, ()), ('it:dev:regkey', {}, ()), ('it:dev:regval',", "antivirus signature.', }), ('it:av:prochit', ('guid', {}), { 'doc': 'An instance", "on a host. May be an actual (e.g., endpoint) or", "('file:path', {}), { 'doc': 'The path to the executable of", "{}), { 'doc': 'The host where the logon originated.', }),", "'The app software which broadcasts the android intent.'}), ('intent', ('it:os:android:intent',", "logging.getLogger(__name__) class Cpe23Str(s_types.Str): ''' CPE 2.3 Formatted String https://nvlpubs.nist.gov/nistpubs/Legacy/IR/nistir7695.pdf (Section", "('guid', {}), { 'doc': 'An instance of a host adding", "( ('soft', ('it:prod:softver', {}), {'ro': True, 'doc': 'The software which", "given software requests the android permission.'}), ('it:os:android:ilisten', ('comp', {'fields': (", "that logged in.', }), ('creds', ('auth:creds', {}), { 'doc': 'The", "from the registry.', }), ('host', ('it:host', {}), { 'doc': 'The", "function references.'}), )), ('it:reveng:impfunc', {}, ()), ), } name =", "hit.', }), ('sig', ('it:av:sig', {}), { 'ro': True, 'doc': 'The", "host running the process that created the named pipe. Typically", "{ 'doc': 'A thread executing in a process.', }), ('it:exec:loadlib',", "the bitwise concatenation of the major, minor and patch levels.", "the ATT&CK group.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url', {}),", "wrote to / modified the existing file.', }), ('host', ('it:host',", "name.' }), ('it:av:filehit', ('comp', {'fields': (('file', 'file:bytes'), ('sig', 'it:av:sig'))}), {", "def _normPyStr(self, valu): if not valu.startswith('cpe:2.3:'): mesg = 'CPE 2.3", "free-form description of the host.', }), ('domain', ('it:domain', {}), {", "representing an individual log event.', 'interfaces': ('it:host:activity',), }), ('it:network', ('guid',", "to indicate an unsuccessful logon attempt.', }), ('logoff:time', ('time', {}),", "{'lower': True, 'strip': True}), { 'doc': 'An advertising identification string.'}),", "{}), { 'doc': 'The host running the process that deleted", "{ 'doc': 'A Microsoft Windows Security Identifier.', 'ex': 'S-1-5-21-1220945662-1202665555-839525555-5555', }),", "intent.'}), ('it:os:android:ibroadcast', ('comp', {'fields': ( ('app', 'it:prod:soft'), ('intent', 'it:os:android:intent') )}),", "primary group ID of the account.', 'ex': '1001', }), ('windows:sid',", "('int', ('it:dev:int', {}), { 'doc': 'The value of the registry", "{ 'doc': 'A brief description of the domain.', }), ('org',", "the author of the YARA rule.'}), ('version', ('it:semver', {}), {", "'A file on a host.' }), ('it:exec:file:add', ('guid', {}), {", "the app.'}), )), ('it:prod:softos', {}, ( ('soft', ('it:prod:softver', {}), {'ro':", "('inet:ipv4', {}), { 'doc': 'The IPv4 address specified to bind().'", "value for the thread.', }), ('src:proc', ('it:exec:proc', {}), { 'doc':", "host running the process that deleted the file. Typically the", "()), ('it:host', {}, ( ('name', ('it:hostname', {}), { 'doc': 'The", "( ('salt', ('hex', {}), { 'doc': 'The (optional) hex encoded", "('salt', ('hex', {}), { 'doc': 'The (optional) hex encoded salt", "'doc': 'A URL linking this CVE to a full description.',", "{ 'doc': 'A description of the ATT&CK mitigation.', 'disp': {'hint':", "{}), { 'doc': 'The geo-political location string for the node.',", "function.'}), ('string', ('str', {}), { 'ro': True, 'doc': 'The string", "'doc': 'A logical boundary of authentication and configuration such as", "{}), { 'doc': 'A description of the software.', 'disp': {'hint':", "'doc': 'Web account of the software author.', }), ('author:email', ('inet:email',", "{ 'doc': 'The file modification time.', }), ('atime', ('time', {}),", "key.', 'ex': 'HKEY_LOCAL_MACHINE\\\\SOFTWARE\\\\Microsoft\\\\Windows\\\\CurrentVersion\\\\Run', }), ('it:dev:regval', ('guid', {}), { 'doc': 'A", "if the value is a string.', }), ('int', ('it:dev:int', {}),", "axon.', }), )), ('it:exec:mutex', {}, ( ('proc', ('it:exec:proc', {}), {", "the mutex. Typically the same host referenced in :proc, if", "the ATT&CK software.', }), ('desc', ('str', {'strip': True}), { 'doc':", "the hit.'}), ('dst', ('inet:addr', {}), { 'doc': 'The destination address", "file that was created.', }), )), ('it:exec:file:del', {}, ( ('proc',", "time the library was loaded.', }), ('unloaded', ('time', {}), {", "s_module import synapse.lib.version as s_version logger = logging.getLogger(__name__) class Cpe23Str(s_types.Str):", "'The source address of flow that caused the hit.'}), ('src:ipv4',", "('name', ('str', {}), { 'doc': 'The CWE description field.', 'ex':", "event.', }), )), ('it:domain', {}, ( ('name', ('str', {'lower': True,", "network.', }), ('net6', ('inet:net6', {}), { 'doc': 'The optional contiguous", "'Lower case normalized version of the it:dev:str.', }), )), ('it:sec:cve',", "the memory map. Bytes may optionally be present in the", "a library software version.'}), ('it:prod:softos', ('comp', {'fields': ( ('soft', 'it:prod:softver'),", "value of the registry key, if the value is a", "'1001', }), ('windows:sid', ('it:os:windows:sid', {}), { 'doc': 'The Microsoft Windows", "{'fields': (('file', 'file:bytes'), ('function', 'it:reveng:function'))}), { 'doc': 'An instance of", "'doc': 'The URL that documents the ATT&CK group.', }), ('tag',", "'The optional contiguous IPv6 address range of this network.', }),", ")), ('it:exec:file:del', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The", "CPE 2.3 string.'}), ('edition', ('str', {'lower': True, 'strip': True}), {", "created the mutex.', }), ('host', ('it:host', {}), { 'doc': 'The", "the host.', }), ('operator', ('ps:contact', {}), { 'doc': 'The operator", "function inside an executable.', }), ('it:reveng:filefunc', ('comp', {'fields': (('file', 'file:bytes'),", "{ 'doc': 'The exit code for the process.', }), ('user',", "'The bound (listening) TCP port.' }), )), ('it:fs:file', {}, (", "library was loaded.', }), ('unloaded', ('time', {}), { 'doc': 'The", "instance of a process triggering an alert on a specific", "('bytes', ('file:bytes', {}), { 'doc': 'The file representing the value", "'Other function calls within the scope of the function.', }),", "that represents a host or system.' }), ('it:log:event', ('guid', {}),", "function.', }), )), ('it:reveng:funcstr', {}, ( ('function', ('it:reveng:function', {}), {", "('perms:execute', ('bool', {}), { 'doc': 'True if the mmap is", "}), ('it:av:prochit', ('guid', {}), { 'doc': 'An instance of a", "('edition', ('str', {'lower': True, 'strip': True}), { 'ro': True, 'doc':", "minor, patch) subs = {'major': major, 'minor': minor, 'patch': patch}", "}), ('file', ('file:bytes', {}), { 'doc': 'The file that was", ")), ('it:dev:int', {}, ()), ('it:dev:pipe', {}, ()), ('it:dev:mutex', {}, ()),", "s_exc import synapse.lib.types as s_types import synapse.lib.module as s_module import", "('it:mitre:attack:technique', {}), { 'doc': 'If deprecated, this field may contain", "name of the YARA rule.'}), ('author', ('ps:contact', {}), { 'doc':", "process which killed this process.', }), )), ('it:exec:thread', {}, (", "== '\\\\': c += next(genr) if c == ':': parts.append(part)", ": edition : language : sw_edition : target_sw : target_hw", "most recent version of the rule evaluated as a match.'}),", "that bound the listening port. May or may not be", "('description', ('str', {}), { 'doc': 'Notes concerning the function.'}), ('impcalls',", "of the group.', }), )), ('it:logon', {}, ( ('time', ('time',", "('duration', ('duration', {}), { 'doc': 'The duration of the logon", "within the namespace of an antivirus engine name.' }), ('it:av:filehit',", "host resides.', }), ('loc', ('loc', {}), { 'doc': 'The geo-political", "app software which broadcasts the android intent.'}), ('intent', ('it:os:android:intent', {}),", "'doc': 'An external process which created the thread.', }), ('src:thread',", "{}), { 'doc': 'The NIST CPE 2.3 string specifying this", "('it:reveng:funcstr', ('comp', {'fields': (('function', 'it:reveng:function'), ('string', 'str'))}), { 'deprecated': True,", "read.', }), ('reg', ('it:dev:regval', {}), { 'doc': 'The registry key", "(10, 'debug'), (20, 'info'), (30, 'notice'), (40, 'warning'), (50, 'err'),", "2.3 string has {len(parts)} parts, expected 13.' raise s_exc.BadTypeValu(valu=valu, mesg=mesg)", "a listening port.', }), ('it:fs:file', ('guid', {}), { 'doc': 'A", "True, 'doc': 'The file distributed by the software.'}), ('path', ('file:path',", "ATT&CK group ID.', 'ex': 'cno.mitre.g0100', }), ('references', ('array', {'type': 'inet:url',", "created the process.' }), ('killedby', ('it:exec:proc', {}), { 'doc': 'The", "{ 'doc': 'The virtual address of the first codeblock of", "of the software.', 'disp': {'hint': 'text'}, }), ('desc:short', ('str', {'lower':", "the ATT&CK tactic.', }), ('desc', ('str', {}), { 'doc': 'A", "matched the file.'}), ('flow', ('inet:flow', {}), { 'doc': 'The inet:flow", "'doc': 'The path that the library was loaded from.', }),", "'An instance of a snort rule hit.', }), ('it:reveng:function', ('guid',", "memory map was created.', }), ('deleted', ('time', {}), { 'doc':", "'doc': 'An array of ATT&CK tactics that include this technique.',", "('app', ('it:prod:softver', {}), {'ro': True, 'doc': 'The app software which", "{ 'doc': 'The product model of the host.', }), ('serial',", "CPE 2.3 string.'}), ('other', ('str', {'lower': True, 'strip': True}), {", "GUID that represents a logical network.' }), ('it:domain', ('guid', {}),", "{ 'doc': 'An android permission string.'}), ('it:os:android:intent', ('str', {}), {", "s_exc.BadTypeValu(valu=valu, mesg=mesg) subs = { 'part': parts[2], 'vendor': parts[3], 'product':", "'doc': 'The user name of the process owner.', }), ('path',", "True}), { 'doc': 'A unique command-line string.', 'ex': 'foo.exe --dostuff", "{ 'doc': 'A named pipe created by a process at", "address for the host.' }), ('latlong', ('geo:latlong', {}), { 'doc':", "{}), { 'doc': 'A free-form description of the signature.', 'disp':", "string associated with this version instance.', }), ('vers:norm', ('str', {'lower':", "owner of the file.', }), )), ('it:exec:file:add', {}, ( ('proc',", "{ 'doc': 'An instance of a host getting a registry", "}), ('url', ('inet:url', {}), { 'doc': 'A URL linking this", "}), ('addresses', ('array', {'type': 'it:mitre:attack:technique', 'uniq': True, 'sorted': True, 'split':", "}), ('data', ('data', {}), { 'doc': 'A raw JSON record", "'doc': 'The signature name.', }), ('sig:soft', ('it:prod:soft', {}), { 'ro':", "{ 'doc': 'Observed/variant names for this software version.', }), ('cpe',", "code that wrote to the file. May or may not", "process which contains the thread.', }), ('created', ('time', {}), {", "( ('norm', ('str', {'lower': True}), { 'doc': 'Lower case normalized", "}), ('client:ipv4', ('inet:ipv4', {}), { 'doc': 'The IPv4 of the", "'' parts = [] genr = iter(text) try: while True:", "account is registered.', }), ('posix:uid', ('int', {}), { 'doc': 'The", "('it:network', {}, ( ('name', ('str', {'lower': True, 'strip': True, 'onespace':", "node, oldv): # Check to see if name is available", "exit code or return value for the thread.', }), ('src:proc',", "was deleted.', }), )), ('it:exec:file:read', {}, ( ('proc', ('it:exec:proc', {}),", "('windows:sid', ('it:os:windows:sid', {}), { 'doc': 'The Microsoft Windows Security Identifier", "\"edition\" field from the CPE 2.3 string.'}), ('language', ('str', {'lower':", "('contact', ('ps:contact', {}), { 'doc': 'Additional contact information associated with", "{ 'doc': 'The IPv6 of the client during the URL", "{ 'ro': True, 'doc': 'Host serving a url.', }), ('url',", "{ 'doc': 'The source port of the flow that caused", "in the process.', }), ('size', ('int', {}), { 'doc': 'The", "read the registry. Typically the same host referenced in :proc,", "'doc': 'A url hosted on or served by a host", "( ('mesg', ('str', {}), { 'doc': 'The log messsage text.',", "norm a integer larger than 1152921504606846975 as a semver.') major,", "'uniq': True, 'sorted': True}), { 'doc': 'Observed/variant names for this", "info.get('subs') return valu, subs except s_exc.BadTypeValu: # Try doing version", "{ 'doc': 'An array of alternate names for the ATT&CK", "an account on a host or network.' }), ('it:group', ('guid',", "of the client during the URL retrieval..' }), ('client:ipv6', ('inet:ipv6',", "{'lower': True, 'strip': True}), { 'ro': True, 'doc': 'The \"other\"", "or a virtual / notional host.', }), ('exe', ('file:bytes', {}),", "Mitre ATT&CK Group ID.', 'ex': 'G0100', }), ('it:mitre:attack:tactic', ('str', {'regex':", "{}), { 'doc': 'The time the file was created.', }),", "}), ('it:exec:reg:get', ('guid', {}), { 'doc': 'An instance of a", "destination port of the flow that caused the hit.'}), ('time',", "password hash value.', }), ('hash:sha256', ('hash:sha256', {}), { 'doc': 'The", "('it:os:android:ilisten', {}, ( ('app', ('it:prod:softver', {}), {'ro': True, 'doc': 'The", "('comp', {'fields': ( ('app', 'it:prod:soft'), ('perm', 'it:os:android:perm'))}), { 'doc': 'The", "{}, ( ('name', ('str', {}), { 'doc': 'The CWE description", "group ID of the account.', 'ex': '1001', }), ('windows:sid', ('it:os:windows:sid',", "the thread exited.', }), ('exitcode', ('int', {}), { 'doc': 'The", "that document the ATT&CK technique.', }), ('parent', ('it:mitre:attack:technique', {}), {", "the map was created in the process.', }), ('size', ('int',", "('it:exec:file:write', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The main", "('array', {'type': 'it:group'}), { 'doc': 'Groups that are a member", "that caused the hit.'}), ('dst', ('inet:addr', {}), { 'doc': 'The", "'doc': 'True if the mmap is mapped with execute permissions.',", ")), ('it:adid', {}, ()), ('it:os:ios:idfa', {}, ()), ('it:os:android:aaid', {}, ()),", "}), ('path', ('file:path', {}), { 'doc': 'The path to the", "{}), { 'doc': 'The path where the file was written", "'Timestamp for when this version of the software was released.',", "valu): ''' Brute force the version out of a string.", "that the library was loaded from.', }), ('file', ('file:bytes', {}),", "{'fields': (('soft', 'it:prod:soft'), ('name', ('str', {'lower': True})))}), { 'doc': 'A", "text): part = '' parts = [] genr = iter(text)", "the process that wrote to the file. Typically the same", "}), ('it:mitre:attack:mitigation', ('str', {'regex': r'^M[0-9]{4}$'}), { 'doc': 'A Mitre ATT&CK", "retrieval.' }), ('client:ipv4', ('inet:ipv4', {}), { 'doc': 'The IPv4 of", "address specified to bind().' }), ('server:ipv6', ('inet:ipv6', {}), { 'doc':", "name of the software at a particular version.', }), ('names',", "'doc': 'An array of groups that the account is a", "version information for. Notes: This first attempts to parse strings", "host that the account logged in to.', }), ('account', ('it:account',", "is registered.', }), ('groups', ('array', {'type': 'it:group'}), { 'doc': 'Groups", "None: raise s_exc.BadTypeValu(valu=valu, name='bruteVersionStr', mesg='Unable to brute force version parts", "the process that bound the listening port. Typically the same", "that document the ATT&CK mitigation.', }), ('addresses', ('array', {'type': 'it:mitre:attack:technique',", "'doc': 'An array of URLs that document the CVE ID.',", "('account', ('it:account', {}), { 'doc': 'The account that logged in.',", "patch number.', }), ('semver:pre', ('str', {}), { 'doc': 'Semver prerelease", "{}, { 'doc': 'Semantic Version type.', }), ('it:sec:cpe', 'synapse.models.infotech.Cpe23Str', {},", "malware sandbox) host.', }), ('it:exec:thread', ('guid', {}), { 'doc': 'A", "a registry key.', }), ('it:app:yara:rule', ('guid', {}), { 'doc': 'A", "{ 'doc': 'The bound (listening) TCP port.' }), )), ('it:fs:file',", "imported library.', }), ), 'interfaces': ( ('it:host:activity', { 'props': (", "description field.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url', {}), {", "located in a process.', }), ('it:cmd', ('str', {'strip': True}), {", "'ro': True, 'doc': 'The \"product\" field from the CPE 2.3", "{ 'doc': 'A description of the ATT&CK group.', 'disp': {'hint':", "group.', }), ('desc', ('str', {}), { 'doc': 'A description of", "of a snort rule hit.', }), ('it:reveng:function', ('guid', {}), {", "nodes = await node.snap.nodes('it:prod:soft=$soft', opts=opts) if nodes: name = nodes[0].get('name')", "software.', }), ('isos', ('bool', {}), { 'doc': 'Set to True", "created.', }), ('path', ('file:path', {}), { 'doc': 'The path where", "'The base memory address where the map was created in", "an integer to allow version ordering. Prerelease information is disregarded", "string.', }), ('int', ('it:dev:int', {}), { 'doc': 'The value of", "version into a integer value Major, minor and patch levels", "('it:prod:softver', {}), {'ro': True, 'doc': 'The software which distributes the", "alert on a specific antivirus signature.' }), ('it:auth:passwdhash', ('guid', {}),", "('it:exec:file:del', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The main", "the library was loaded from.', }), ('file', ('file:bytes', {}), {", "Checking Size of Input (Classic Buffer Overflow)', }), ('desc', ('str',", "'doc': 'The android permission requested by the app.'}), )), ('it:prod:softos',", "the host.', }), ('serial', ('str', {}), { 'doc': 'The serial", "process that bound the listening port. Typically the same host", "{}), { 'doc': 'The process where the library was loaded.',", "('status', ('it:mitre:attack:status', {}), { 'doc': 'The status of this ATT&CK", ")), ('it:app:yara:match', {}, ( ('rule', ('it:app:yara:rule', {}), { 'ro': True,", "new file. Typically the same host referenced in :proc, if", "{ 'ro': True, 'doc': 'The string that the function references.'}),", "where the file was created.', }), ('path:dir', ('file:path', {}), {", "if prop: await node.snap.addNode('it:dev:str', prop) async def _onPropSoftverVers(self, node, oldv):", "'it:prod:soft'), ('name', ('str', {'lower': True})))}), { 'doc': 'A signature name", "is None: raise s_exc.BadTypeValu(valu=valu, name=self.name, mesg='Unable to parse string as", "v) except asyncio.CancelledError: # pragma: no cover raise except Exception:", "for the logon.', }), ('duration', ('duration', {}), { 'doc': 'The", "{ 'doc': 'An array of ATT&CK technique IDs addressed by", "released.', }), ('semver', ('it:semver', {}), { 'doc': 'System normalized semantic", ": target_hw : other * = \"any\" - = N/A", "a process.', }), ('it:app:snort:rule', ('guid', {}), { 'doc': 'A snort", "6.2) cpe:2.3: part : vendor : product : version :", "password hash value.', }), ('hash:sha512', ('hash:sha512', {}), { 'doc': 'The", "network.' }), ('it:domain', ('guid', {}), { 'doc': 'A logical boundary", "('int', {}), { 'doc': 'The process ID.', }), ('time', ('time',", "{'ro': True, 'doc': 'The android intent which is broadcast by", "bar', }), ('it:exec:mutex', ('guid', {}), { 'doc': 'A mutex created", "('inet:url', {}), { 'doc': 'A reference URL for information about", "'An android advertising identification string.'}), ('it:os:android:perm', ('str', {}), { 'doc':", "host where the logon originated.', }), ('client:ipv4', ('inet:ipv4', {}), {", "{}), { 'doc': 'Semver prerelease string.', }), ('semver:build', ('str', {}),", "after stripping whitespace') subs = s_version.parseSemver(valu) if subs is None:", "'disp': {'hint': 'text'}, }), ('desc:short', ('str', {'lower': True}), { 'doc':", "'The GECOS field for the POSIX account.', }), ('posix:home', ('file:path',", "('posix:uid', ('int', {}), { 'doc': 'The user ID of the", "('author', ('ps:contact', {}), { 'doc': 'The contact information of the", "{'major': major, 'minor': minor, 'patch': patch} return valu, {'subs': subs}", "'doc': 'The serial number of the host.', }), ('operator', ('ps:contact',", "'The host that executed the process. May be an actual", "('str', {}), { 'doc': 'An android permission string.'}), ('it:os:android:intent', ('str',", "'doc': 'The file modification time.', }), ('atime', ('time', {}), {", "normalization before attempting to extract version parts out of the", "Cpe23Str(s_types.Str): ''' CPE 2.3 Formatted String https://nvlpubs.nist.gov/nistpubs/Legacy/IR/nistir7695.pdf (Section 6.2) cpe:2.3:", "detected the signature.' }), )), ('it:auth:passwdhash', {}, ( ('salt', ('hex',", "synapse.lib.version as s_version logger = logging.getLogger(__name__) class Cpe23Str(s_types.Str): ''' CPE", "{}), { 'doc': 'Version string associated with this version instance.',", "a full description.', }), ('parents', ('array', {'type': 'it:sec:cwe', 'uniq': True,", "array of URLs that document the ATT&CK tactic.', }), )),", "version string [%s]', prop) def getModelDefs(self): modl = { 'ctors':", "()), ('it:os:android:reqperm', {}, ( ('app', ('it:prod:softver', {}), {'ro': True, 'doc':", "password for this password hash.', }), )), ('it:cmd', {}, ()),", "or system.' }), ('it:log:event', ('guid', {}), { 'doc': 'A GUID", "a function in an executable.', }), ('it:reveng:funcstr', ('comp', {'fields': (('function',", "{}), { 'doc': 'An instance of a password hash.', }),", "{ 'doc': 'The base memory address where the library was", "{}), { 'doc': 'The destination port of the flow that", "the flow that caused the hit.'}), ('dst:port', ('inet:port', {}), {", "for. Notes: This first attempts to parse strings using the", "the URL retrieval.' }), ('client:ipv4', ('inet:ipv4', {}), { 'doc': 'The", "('it:host', {}), { 'doc': 'The host where the logon originated.',", "{ 'doc': 'The host that executed the process. May be", "minor, patch = s_version.unpackVersion(valu) valu = s_version.packVersion(major, minor, patch) subs", "mesg='Unable to parse string as a semver.') valu = s_version.packVersion(subs.get('major'),", "{ 'doc': 'An iOS advertising identification string.'}), ('it:os:android:aaid', ('it:adid', {}),", "{ 'doc': 'Name of the software.', }), ('names', ('array', {'type':", "'A free-form description of the host.', }), ('domain', ('it:domain', {}),", "'sorted': True}), { 'doc': 'Associated names for the ATT&CK software.',", "{ 'doc': 'The address of the client during the URL", "registry key.', }), ('str', ('it:dev:str', {}), { 'doc': 'The value", "by a process at runtime.', }), ('it:exec:url', ('guid', {}), {", "'ro': True, 'doc': 'The signature name.', }), ('sig:soft', ('it:prod:soft', {}),", "{}, ()), ('it:dev:regkey', {}, ()), ('it:dev:regval', {}, ( ('key', ('it:dev:regkey',", "a host creating or setting a registry key.', }), ('it:exec:reg:del',", "host on which the activity occurred.'}), ('time', ('time', {}), {", "('str', {}), { 'doc': 'A string representing a named pipe.',", "'doc': 'The primary name for the ATT&CK technique.', }), ('status',", "node.ndef[1] await node.snap.addNode('it:dev:str', pprop) async def _onPropSoftverSoft(self, node, oldv): #", "{ 'doc': 'The file access time.', }), ('user', ('inet:user', {}),", "a member of.', }), ('ipv4', ('inet:ipv4', {}), { 'doc': 'The", "function in an executable.', }), ('it:reveng:funcstr', ('comp', {'fields': (('function', 'it:reveng:function'),", "{ 'doc': 'An instance of a host reading a file", "'doc': 'The time that the AV engine detected the signature.'", "'doc': 'A library load event in a process.', }), ('it:exec:mmap',", "'An array of ATT&CK technique IDs addressed by the mitigation.',", "a filesystem.', }), ('it:exec:file:write', ('guid', {}), { 'doc': 'An instance", "node.set('software:name', name) async def _onPropSoftverArch(self, node, oldv): # make it:dev:str", "('it:mitre:attack:software', ('str', {'regex': r'^S[0-9]{4}$'}), { 'doc': 'A Mitre ATT&CK Software", "{ 'doc': 'The specific file containing code that deleted the", "s_exc.BadTypeValu(valu=valu, name=self.name, mesg='Cannot norm a integer larger than 1152921504606846975 as", "'The primary name for the ATT&CK software.', }), ('names', ('array',", "binding a listening port.', }), ('it:fs:file', ('guid', {}), { 'doc':", "technique IDs used by the group.', }), ('software', ('array', {'type':", "the hit.'}), ('dst:port', ('inet:port', {}), { 'doc': 'The destination port", "patch = s_version.unpackVersion(valu) valu = s_version.packVersion(major, minor, patch) subs =", "description of the software.', }), ('cpe', ('it:sec:cpe', {}), { 'doc':", "True, 'doc': 'The android app which requests the permission.'}), ('perm',", "('mesg', ('str', {}), { 'doc': 'The log messsage text.', }),", "NIST CPE 2.3 string specifying this software version', }), ('cves',", "ATT&CK element status.', 'ex': 'current', }), ('it:mitre:attack:group', ('str', {'regex': r'^G[0-9]{4}$'}),", "{ 'doc': 'The host where the group is registered.', }),", "{ 'doc': 'The main process executing code that deleted data", "description of the ATT&CK tactic.', 'disp': {'hint': 'text'}, }), ('url',", "hash value.', }), ('hash:ntlm', ('hash:ntlm', {}), { 'doc': 'The NTLM", "this software.', }), ('desc', ('str', {}), { 'doc': 'A description", "software.', }), ('author:acct', ('inet:web:acct', {}), { 'deprecated': True, 'doc': 'Web", ": other * = \"any\" - = N/A ''' def", "'it:os:android:intent'))}), { 'doc': 'The given software listens for an android", "('ps:contact', {}), { 'doc': 'Contact info for the author of", "named pipe was created.', }), ('name', ('it:dev:pipe', {}), { 'doc':", "domain that the host is a member of.', }), ('ipv4',", "the ATT&CK technique.', }), ('status', ('it:mitre:attack:status', {}), { 'doc': 'The", "password hash.', }), ('it:exec:proc', ('guid', {}), { 'doc': 'A process", "{ 'doc': 'The IPv4 of the client during the URL", "'The file that was deleted.', }), )), ('it:exec:file:read', {}, (", "deleted.', }), ('path', ('file:path', {}), { 'doc': 'The file path", "that read the registry. Typically the same host referenced in", "{ 'doc': 'An array of groups that the account is", "external process which created the thread.', }), ('src:thread', ('it:exec:thread', {}),", "caused the hit.'}), ('dst', ('inet:addr', {}), { 'doc': 'The destination", "{}), { 'doc': 'The org that operates the given host.',", "specific antivirus signature.', }), ('it:av:prochit', ('guid', {}), { 'doc': 'An", "that wrote to the registry. May or may not be", "permissions.', }), ('created', ('time', {}), { 'doc': 'The time the", "process.', }), ('src:proc', ('it:exec:proc', {}), { 'doc': 'The process which", "mmap is mapped with read permissions.', }), ('perms:write', ('bool', {}),", "node.set(f'semver:{k}', v) except asyncio.CancelledError: # pragma: no cover raise except", "'Associated names for the ATT&CK software.', }), ('desc', ('str', {'strip':", "('it:dev:pipe', {}), { 'doc': 'The named pipe string.', }), )),", "'ro': True, 'doc': 'The \"edition\" field from the CPE 2.3", "the ATT&CK software.', }), ('techniques', ('array', {'type': 'it:mitre:attack:technique', 'uniq': True,", "mitigation.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url', {}), { 'doc':", "of the memory map in bytes.', }), ('perms:read', ('bool', {}),", "For example, rundll32.exe may be considered the \"main\" executable for", "( ('proc', ('it:exec:proc', {}), { 'doc': 'The main process executing", "anti-virus product which contains the signature.', }), ('name', ('str', {'lower':", "except StopIteration: parts.append(part) return parts def _normPyStr(self, valu): if not", "'The path to the executable which started the process.', }),", "'ro': True, 'doc': 'The \"target_sw\" field from the CPE 2.3", "that caused the hit.'}), ('src:ipv6', ('inet:ipv6', {}), { 'doc': 'The", "}), ('deleted', ('time', {}), { 'doc': 'The time the memory", "parts = [] genr = iter(text) try: while True: c", "= '' parts = [] genr = iter(text) try: while", "hit.'}), ('sensor', ('it:host', {}), { 'doc': 'The sensor host node", "the process that deleted the file. Typically the same host", "the group.', }), ('desc', ('str', {}), { 'doc': 'A brief", "for the technique.', }), ('desc', ('str', {'strip': True}), { 'doc':", "('it:os:android:aaid', {}, ()), ('it:os:android:perm', {}, ()), ('it:os:android:intent', {}, ()), ('it:os:android:reqperm',", "the file.'}), ('flow', ('inet:flow', {}), { 'doc': 'The inet:flow that", "the ATT&CK mitigation.', }), ('tag', ('syn:tag', {}), { 'doc': 'The", "''' def postTypeInit(self): s_types.Int.postTypeInit(self) self.setNormFunc(str, self._normPyStr) self.setNormFunc(int, self._normPyInt) def _normPyStr(self,", "tag used to annotate nodes included in this ATT&CK mitigation.',", "for the file.', }), ('path:dir', ('file:path', {}), { 'ro': True,", "group.', }), ('techniques', ('array', {'type': 'it:mitre:attack:technique', 'uniq': True, 'sorted': True,", "}), ('created', ('time', {}), { 'doc': 'The time the memory", "from the CPE 2.3 string.'}), ('vendor', ('ou:name', {}), { 'ro':", "'The app software which listens for the android intent.'}), ('intent',", "ID.', 'ex': 'cno.mitre.g0100', }), ('references', ('array', {'type': 'inet:url', 'uniq': True}),", "main process executing code that created the mutex.', }), ('host',", "('client:ipv6', ('inet:ipv6', {}), { 'doc': 'The IPv6 of the client", "specified to bind().' }), ('server:ipv6', ('inet:ipv6', {}), { 'doc': 'The", "{}), { 'doc': 'The credentials that were used for the", "{}), { 'doc': 'A brief description of the group.', }),", "'An instance of a host deleting a file from a", "{ 'doc': 'An instance of a host deleting a registry", "thread.', }), )), ('it:exec:loadlib', {}, ( ('proc', ('it:exec:proc', {}), {", "'A function inside an executable.', }), ('it:reveng:filefunc', ('comp', {'fields': (('file',", "('it:hosturl', ('comp', {'fields': (('host', 'it:host'), ('url', 'inet:url'))}), { 'doc': 'A", "string.'}), ('it:os:android:aaid', ('it:adid', {}), { 'doc': 'An android advertising identification", "{}), { 'doc': 'The org that operates the given domain.',", "}), ('it:exec:file:del', ('guid', {}), { 'doc': 'An instance of a", "{ 'ro': True, 'doc': 'The \"target_hw\" field from the CPE", "{}), { 'doc': 'Timestamp for when this version of the", "{}), { 'doc': 'A snort rule unique identifier.', }), ('it:app:snort:hit',", "s_version.packVersion(subs.get('major'), subs.get('minor'), subs.get('patch')) return valu, {'subs': subs} def _normPyInt(self, valu):", "('other', ('str', {'lower': True, 'strip': True}), { 'ro': True, 'doc':", "for the software.', }), ('isos', ('bool', {}), { 'doc': 'Set", "{ 'doc': 'Version string associated with this version instance.', }),", "('bool', {}), { 'doc': 'True if the mmap is mapped", "'doc': 'The time the file was deleted.', }), ('path', ('file:path',", "unversioned software product.', }), ('it:adid', ('str', {'lower': True, 'strip': True}),", "{}), { 'doc': 'The YARA rule that matched the file.'}),", "ATT&CK tactic.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url', {}), {", "time the thread was created.', }), ('exited', ('time', {}), {", "the function.'}), ('string', ('str', {}), { 'ro': True, 'doc': 'The", "mutex. May or may not be the same :exe specified", "= next(genr) if c == '\\\\': c += next(genr) if", "'The URL that documents the ATT&CK technique.', }), ('tag', ('syn:tag',", "'The operator of the host.', }), ('org', ('ou:org', {}), {", "{ 'doc': 'Additional contact information associated with this account.', }),", "containing code that wrote to the file. May or may", ")), ('it:account', {}, ( ('user', ('inet:user', {}), { 'doc': 'The", "'The inet:flow that matched the snort rule.'}), ('src', ('inet:addr', {}),", "who authored the software.', }), ('url', ('inet:url', {}), { 'doc':", "{}), { 'doc': 'The complexity of the function.'}), ('funccalls', ('array',", "('it:mitre:attack:tactic', {}, ( ('name', ('str', {'strip': True}), { 'doc': 'The", "{}), { 'doc': 'The time the logon occured.', }), ('success',", "owner of the file.', }), ('group', ('inet:user', {}), { 'doc':", "GUID that represents an individual logon/logoff event.' }), ('it:hosturl', ('comp',", "('guid', {}), { 'doc': 'An instance of a password hash.',", "Version type.', }), ('it:sec:cpe', 'synapse.models.infotech.Cpe23Str', {}, { 'doc': 'A NIST", "the CPE 2.3 string.'}), ('update', ('str', {'lower': True, 'strip': True}),", "{ 'doc': 'The current version of the rule.'}), ('enabled', ('bool',", "the function.', }), ('strings', ('array', {'type': 'it:dev:str', 'uniq': True}), {", "{'fields': ( ('soft', 'it:prod:softver'), ('lib', 'it:prod:softver'))}), { 'doc': 'A software", "{}), { 'doc': \"The path to the POSIX account's default", "if subs: valu = s_version.packVersion(subs.get('major'), subs.get('minor', 0), subs.get('patch', 0)) return", ":exe referenced in :proc, if present.', }), ('time', ('time', {}),", "True, 'doc': 'Organization which authored the software.', }), ('author:acct', ('inet:web:acct',", "{}), { 'doc': \"The path to the POSIX account's home", "file that was loaded.', }), )), ('it:exec:mmap', {}, ( ('proc',", "{ 'doc': 'A GUID that represents an individual logon/logoff event.'", "process.', }), ('it:exec:mmap', ('guid', {}), { 'doc': 'A memory mapped", "('author:person', ('ps:person', {}), { 'deprecated': True, 'doc': 'Person who authored", "node.snap.addNode('it:dev:str', prop) # form the semver properly or bruteforce parts", "{ 'doc': 'A function from an imported library.', }), ),", "('array', {'type': 'it:dev:str', 'uniq': True, 'sorted': True}), { 'doc': 'Observed/variant", "= info.get('subs') return valu, subs except s_exc.BadTypeValu: # Try doing", "running the process that deleted data from the registry. Typically", "'A thread executing in a process.', }), ('it:exec:loadlib', ('guid', {}),", "key, if the value is a string.', }), ('int', ('it:dev:int',", "annotate nodes included in this ATT&CK technique.', 'ex': 'cno.mitre.t0100', }),", "creation time.', }), ('mtime', ('time', {}), { 'doc': 'The file", "{ 'doc': 'The value of the registry key, if the", "{ 'doc': 'The name of the software at a particular", "'doc': 'A named pipe created by a process at runtime.',", "'doc': 'The time the named pipe was created.', }), ('name',", "}), ('semver:patch', ('int', {}), { 'doc': 'Version patch number.', }),", "}), ('it:network', ('guid', {}), { 'doc': 'A GUID that represents", "('str', {}), { 'doc': 'The snort rule text.', 'disp': {'hint':", "contains the function.'}), ('va', ('int', {}), { 'doc': 'The virtual", "{ 'doc': 'The signature that the file triggered on.' }),", "software version.'}), ('it:prod:softlib', ('comp', {'fields': ( ('soft', 'it:prod:softver'), ('lib', 'it:prod:softver'))}),", "'strip': True}), { 'ro': True, 'doc': 'The \"sw_edition\" field from", "signature name within the namespace of an antivirus engine name.'", "{}), { 'doc': 'A free-form description of the CVE vulnerability.',", "file containing code that requested the URL. May or may", "group ID of the account.', 'ex': '1001', }), ('posix:gecos', ('int',", "{ 'doc': 'An android intent string.'}), ('it:os:android:reqperm', ('comp', {'fields': (", "('it:sec:cpe', 'synapse.models.infotech.Cpe23Str', {}, { 'doc': 'A NIST CPE 2.3 Formatted", "setting a registry key.', }), ('it:exec:reg:del', ('guid', {}), { 'doc':", "the app.'}), )), ('it:os:android:ibroadcast', {}, ( ('app', ('it:prod:softver', {}), {'ro':", "('url', ('inet:url', {}), { 'doc': 'URL where a specific version", "{}, ( ('time', ('time', {}), { 'doc': 'The time the", "{}), { 'doc': 'An instance of a host binding a", "password hash.', }), ('hash:md5', ('hash:md5', {}), { 'doc': 'The MD5", "{ 'doc': 'The YARA rule that matched the file.'}), ('proc',", "('posix:gecos', ('int', {}), { 'doc': 'The GECOS field for the", "('file:bytes', {}), { 'doc': 'The file that was modified.', }),", "this sub-technique.', }), ('tactics', ('array', {'type': 'it:mitre:attack:tactic', 'uniq': True, 'sorted':", "permissions.', }), ('perms:write', ('bool', {}), { 'doc': 'True if the", "created.', }), )), ('it:exec:file:del', {}, ( ('proc', ('it:exec:proc', {}), {", "process executing code that requested the URL.', }), ('host', ('it:host',", "{}), { 'doc': 'The main process executing code that created", "}), ('it:exec:reg:set', ('guid', {}), { 'doc': 'An instance of a", "YARA rule.'}), ('version', ('it:semver', {}), { 'doc': 'The most recent", "that deleted data from the registry. May or may not", "valu, {'subs': subs} def _normPyInt(self, valu): if valu < 0:", "('it:app:yara:rule', ('guid', {}), { 'doc': 'A YARA rule unique identifier.',", ")), ('it:auth:passwdhash', {}, ( ('salt', ('hex', {}), { 'doc': 'The", "exhibits interesting behavior.'}), ('complexity', ('int', {}), { 'doc': 'The complexity", "}), ('severity', ('int', {'enums': loglevels}), { 'doc': 'A log level", "}), ('parent', ('it:mitre:attack:technique', {}), { 'doc': 'The parent ATT&CK technique", "nodes[0].get('name') if name: await node.set('software:name', name) async def _onPropSoftverArch(self, node,", "the activity.'}), ('proc', ('it:exec:proc', {}), { 'doc': 'The host process", "where the logon originated.', }), ('client:ipv6', ('inet:ipv6', {}), { 'doc':", "{}, ( ('name', ('str', {'strip': True}), { 'doc': 'The primary", "'doc': 'Groups that are a member of this group.', }),", "'doc': 'The base memory address where the library was loaded", "of URLs that document the ATT&CK group.', }), ('techniques', ('array',", "file creation time.', }), ('mtime', ('time', {}), { 'doc': 'The", "the activity occurred.'}), ('time', ('time', {}), { 'doc': 'The time", "(40, 'warning'), (50, 'err'), (60, 'crit'), (70, 'alert'), (80, 'emerg'),", "SemVer(s_types.Int): ''' Provides support for parsing a semantic version string", "the logon.', }), ('duration', ('duration', {}), { 'doc': 'The duration", "in this ATT&CK group ID.', 'ex': 'cno.mitre.g0100', }), ('references', ('array',", "where the group is registered.', }), ('groups', ('array', {'type': 'it:group'}),", "system.' }), ('it:log:event', ('guid', {}), { 'doc': 'A GUID representing", "{}), { 'doc': 'An instance of a host adding a", "array of URLs that document the CVE ID.', }), )),", "or value that was written to.', }), )), ('it:exec:reg:del', {},", "'doc': 'The optional contiguous IPv6 address range of this network.',", "of the log event.', }), )), ('it:domain', {}, ( ('name',", "{}), { 'doc': 'A string representing a mutex.', }), ('it:dev:int',", "the group is registered.', }), ('domain', ('it:domain', {}), { 'doc':", "'ro': True, 'doc': 'The \"vendor\" field from the CPE 2.3", "'An array of strings referenced within the function.', }), )),", "ID.', 'ex': 'M1036', }), ('it:mitre:attack:software', ('str', {'regex': r'^S[0-9]{4}$'}), { 'doc':", "def _normPyStr(self, valu): valu = valu.strip() if not valu: raise", "specific file containing code that created the new file. May", "{ 'doc': 'The CWE description field.', 'disp': {'hint': 'text'}, }),", "the port.' }), ('server:ipv4', ('inet:ipv4', {}), { 'doc': 'The IPv4", "= {'vars': {'soft': prop}} nodes = await node.snap.nodes('it:prod:soft=$soft', opts=opts) if", "instance of a YARA rule match to a process.', }),", "be parsed out and available as strings if that information", "{}), { 'doc': 'A URL linking this CWE to a", "by that program.', }), ('cmd', ('it:cmd', {}), { 'doc': 'The", "True}), { 'doc': 'A list of CVEs that apply to", "parts out of the string. Returns: int, dict: The system", "the file.', }), )), ('it:exec:file:add', {}, ( ('proc', ('it:exec:proc', {}),", "}), ('it:domain', ('guid', {}), { 'doc': 'A logical boundary of", "('atime', ('time', {}), { 'doc': 'The file access time.', }),", "True, 'sorted': True}), { 'doc': 'A list of CVEs that", "executing code that read the registry.', }), ('host', ('it:host', {}),", "library was unloaded.', }), ('path', ('file:path', {}), { 'doc': 'The", "'The file that was created.', }), )), ('it:exec:file:del', {}, (", "prop: return await node.set('vers:norm', prop) # Make it:dev:str from version", "complexity of the function.'}), ('funccalls', ('array', {'type': 'it:reveng:filefunc'}), { 'doc':", "version of the version string.', }), ('arch', ('it:dev:str', {}), {", "of the file.', }), ('group', ('inet:user', {}), { 'doc': 'The", "key or value that was deleted.', }), )), ('it:app:snort:rule', {},", "list of CVEs that apply to this software version.', }),", "('mtime', ('time', {}), { 'doc': 'The file modification time.', }),", "type.', }), ('it:sec:cpe', 'synapse.models.infotech.Cpe23Str', {}, { 'doc': 'A NIST CPE", "('ps:person', {}), { 'deprecated': True, 'doc': 'Person who authored the", "!= 13: mesg = f'CPE 2.3 string has {len(parts)} parts,", "'The main process executing code that bound the listening port.',", "{'type': 'it:mitre:attack:technique', 'uniq': True, 'sorted': True, 'split': ','}), { 'doc':", "}), ('creds', ('auth:creds', {}), { 'doc': 'The credentials that were", "('str', {'lower': True}), { 'doc': 'Lower case normalized version of", "the CPE 2.3 string.'}), ('target_sw', ('str', {'lower': True, 'strip': True}),", "registered.', }), ('posix:uid', ('int', {}), { 'doc': 'The user ID", "'deprecated': True, 'doc': 'Email address of the sofware author.', }),", "the software.', }), ('cpe', ('it:sec:cpe', {}), { 'doc': 'The NIST", "the account is a member of.', }), )), ('it:group', {},", "NIST CPE 2.3 string specifying this software.', }), ('author', ('ps:contact',", "'Version major number.', }), ('semver:minor', ('int', {}), { 'doc': 'Version", "('it:host', {}), { 'doc': 'The host where the group is", "client during the URL retrieval..' }), ('client:port', ('inet:port', {}), {", "'doc': 'System normalized semantic version number.', }), ('semver:major', ('int', {}),", "'The host running the process that read the registry. Typically", "('host', ('it:host', {}), { 'doc': 'The host on which the", "match.'}), )), ('it:app:yara:procmatch', {}, ( ('rule', ('it:app:yara:rule', {}), { 'doc':", "host thread which caused the activity.'}), ('host', ('it:host', {}), {", "killed this process.', }), )), ('it:exec:thread', {}, ( ('proc', ('it:exec:proc',", "{}), { 'doc': 'The main process executing code that bound", "this ATT&CK technique.', }), ('isnow', ('it:mitre:attack:technique', {}), { 'doc': 'If", "{ 'doc': 'A developer selected integer constant.', }), ('it:dev:regkey', ('str',", "{'type': 'it:sec:cve', 'uniq': True, 'sorted': True}), { 'doc': 'A list", "session ended.', }), ('host', ('it:host', {}), { 'doc': 'The host", "that the YARA engine matched the process to the rule.'}),", "key or value that was written to.', }), )), ('it:exec:reg:del',", "'A GUID that represents a group on a host or", "1}), { 'doc': 'A function from an imported library.', }),", "used to annotate nodes included in this ATT&CK mitigation.', 'ex':", "{}, ( ('name', ('str', {}), { 'doc': 'The name of", "field from the CPE 2.3 string.'}), ('product', ('str', {'lower': True,", "'doc': 'The mutex string.', }), )), ('it:exec:pipe', {}, ( ('proc',", "{ 'doc': 'The path for the file.', }), ('path:dir', ('file:path',", "True, 'doc': 'The signature name.' }), ('desc', ('str', {}), {", "or value that was deleted.', }), )), ('it:app:snort:rule', {}, (", "group is registered.', }), ('groups', ('array', {'type': 'it:group'}), { 'doc':", "('int', {'enums': loglevels}), { 'doc': 'A log level integer that", "('os', ('it:prod:softver', {}), { 'doc': 'The operating system of the", "{}), { 'doc': 'The name of the host or system.',", "named pipe string.', }), )), ('it:exec:url', {}, ( ('proc', ('it:exec:proc',", "were used for the logon.', }), ('duration', ('duration', {}), {", "line parameters.', 'disp': {'hint': 'text'}, }), ('pid', ('int', {}), {", "field from the CPE 2.3 string.'}), ('target_sw', ('str', {'lower': True,", "of alternate names for the ATT&CK group.', }), ('desc', ('str',", "URL that documents the ATT&CK group.', }), ('tag', ('syn:tag', {}),", "{ 'doc': 'The main process executing code that deleted the", "'other': parts[12], } return ':'.join(parts), {'subs': subs} class SemVer(s_types.Int): '''", "product which is present on a given host.', }), ('it:av:sig',", "that include this technique.', }), )), ('it:mitre:attack:software', {}, ( ('software',", "Windows registry key/value pair.', }), ('it:prod:soft', ('guid', {}), { 'doc':", "left after stripping whitespace') subs = s_version.parseSemver(valu) if subs is", "of the registry key, if the value is a string.',", "operates the given domain.', }), )), ('it:network', {}, ( ('name',", "'The specific file containing code that bound the listening port.", "where the file was deleted.', }), ('path:dir', ('file:path', {}), {", "that created the named pipe. Typically the same host referenced", "('path:base', ('file:base', {}), { 'ro': True, 'doc': 'The final component", "process executing code that created the new file.', }), ('host',", "('it:app:yara:match', ('comp', {'fields': (('rule', 'it:app:yara:rule'), ('file', 'file:bytes'))}), { 'doc': 'A", "('islib', ('bool', {}), { 'doc': 'Set to True if the", ")), ('it:exec:mmap', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The", "ATT&CK technique.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url', {}), {", "key.', }), ('it:app:yara:rule', ('guid', {}), { 'doc': 'A YARA rule", "{}), { 'doc': 'The signature that the file triggered on.'", "value is binary data.', }), )), ('it:prod:soft', {}, ( ('name',", "('it:exec:loadlib', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The process", "{ 'doc': 'A free-form description of the signature.', 'disp': {'hint':", "SHA256 hash of the memory map. Bytes may optionally be", "software product which is present on a given host.', }),", "hash.', }), )), ('it:cmd', {}, ()), ('it:exec:proc', {}, ( ('host',", "to the registry. Typically the same host referenced in :proc,", "'debug'), (20, 'info'), (30, 'notice'), (40, 'warning'), (50, 'err'), (60,", "the function.', }), )), ('it:reveng:filefunc', {}, ( ('function', ('it:reveng:function', {}),", "pipe created by a process at runtime.', }), ('it:exec:url', ('guid',", "True, 'doc': 'The guid matching the function.'}), ('file', ('file:bytes', {}),", "produced the hit.'}), ('version', ('it:semver', {}), { 'doc': 'The version", "get version information for. Notes: This first attempts to parse", "the CPE 2.3 string.'}), ('target_hw', ('str', {'lower': True, 'strip': True}),", "('bool', {}), { 'doc': 'Set to false to indicate an", "'doc': 'The app software which listens for the android intent.'}),", "except asyncio.CancelledError: # pragma: no cover raise except Exception: logger.exception('Failed", "'doc': 'Used to map an ATT&CK group to a synapse", "{}, ()), ('it:os:android:aaid', {}, ()), ('it:os:android:perm', {}, ()), ('it:os:android:intent', {},", "True, 'doc': 'The \"target_sw\" field from the CPE 2.3 string.'}),", "YARA engine matched the process to the rule.'}), ('version', ('it:semver',", "field.', 'ex': 'Buffer Copy without Checking Size of Input (Classic", "where the memory was mapped.', }), ('va', ('int', {}), {", "True, 'doc': 'The file that matched the YARA rule.'}), ('version',", "{ 'doc': 'The host where the logon originated.', }), ('client:ipv4',", "if the mmap is mapped with write permissions.', }), ('perms:execute',", "/ modified the existing file.', }), ('host', ('it:host', {}), {", "client during the URL retrieval..' }), ('client:ipv6', ('inet:ipv6', {}), {", "code that created the new file. May or may not", "{ 'doc': 'Set to false to indicate an unsuccessful logon", "{}), { 'doc': 'Additional contact information associated with this account.',", "array of URLs that document the ATT&CK group.', }), ('techniques',", "created.', }), ('deleted', ('time', {}), { 'doc': 'The time the", "value of the registry key, if the value is binary", "{}), { 'doc': 'The process which created the process.' }),", "rule that matched the file.'}), ('flow', ('inet:flow', {}), { 'doc':", "pipe was created.', }), ('name', ('it:dev:pipe', {}), { 'doc': 'The", "{}), { 'doc': 'An instance of a YARA rule match", "filesystem.', }), ('it:exec:file:write', ('guid', {}), { 'doc': 'An instance of", "('it:exec:bind', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The main", "'A NIST CPE 2.3 Formatted String', }), ), 'types': (", "('passwd', ('inet:passwd', {}), { 'doc': 'The (optional) clear text password", "to get version information for. Notes: This first attempts to", "the process.', }), ('src:exe', ('file:path', {}), { 'doc': 'The path", "{}), { 'doc': 'The owner of the file.', }), ('group',", "integer constant.', }), ('it:dev:regkey', ('str', {}), { 'doc': 'A Windows", "'The host containing the file.', }), ('path', ('file:path', {}), {", "{}), { 'doc': 'The time the memory map was deleted.',", "'The software which distributes the file.'}), ('file', ('file:bytes', {}), {'ro':", "path (parsed from :path).', }), ('file', ('file:bytes', {}), { 'doc':", ")), ('it:mitre:attack:mitigation', {}, ( # TODO map to an eventual", "a host. May be an actual (e.g., endpoint) or virtual", "= node.ndef[1] await node.snap.addNode('it:dev:str', pprop) async def _onPropSoftverSoft(self, node, oldv):", "present.'}), ('time', ('time', {}), { 'doc': 'The time the file", "if valu < 0: raise s_exc.BadTypeValu(valu=valu, name=self.name, mesg='Cannot norm a", "('it:mitre:attack:technique', {}, ( ('name', ('str', {'strip': True}), { 'doc': 'The", "of a host binding a listening port.', }), ('it:fs:file', ('guid',", "self._normPyStr) self.setNormFunc(int, self._normPyInt) def _normPyStr(self, valu): valu = valu.strip() if", "{ 'doc': 'The file that triggered the signature hit.', }),", "was created.', }), )), ('it:exec:file:del', {}, ( ('proc', ('it:exec:proc', {}),", "''' try: valu, info = self.core.model.type('it:semver').norm(valu) subs = info.get('subs') return", "()), ('it:os:android:aaid', {}, ()), ('it:os:android:perm', {}, ()), ('it:os:android:intent', {}, ()),", "alternate names for the ATT&CK group.', }), ('desc', ('str', {}),", "that triggered the signature hit.', }), ('sig', ('it:av:sig', {}), {", "the account logged in to.', }), ('account', ('it:account', {}), {", "('released', ('time', {}), { 'doc': 'Timestamp for when this version", "'uniq': True, 'sorted': True, 'split': ','}), { 'doc': 'An array", "('guid', {}), { 'doc': 'A GUID that represents a logical", "'doc': 'A process executing on a host. May be an", "{ 'ro': True, 'doc': 'The \"sw_edition\" field from the CPE", "'The \"sw_edition\" field from the CPE 2.3 string.'}), ('target_sw', ('str',", "string representing a mutex.', }), ('it:dev:int', ('int', {}), { 'doc':", "--dostuff bar', }), ('it:exec:mutex', ('guid', {}), { 'doc': 'A mutex", "( ('software', ('it:prod:soft', {}), { 'doc': 'Used to map an", "), }), ), 'forms': ( ('it:hostname', {}, ()), ('it:host', {},", "'doc': 'The process which contains the thread.', }), ('created', ('time',", "('inet:url', {}), { 'doc': 'A URL linking this CVE to", "('path:dir', ('file:path', {}), { 'ro': True, 'doc': 'The parent directory", "from the CPE 2.3 string.'}), ('target_sw', ('str', {'lower': True, 'strip':", ")), ('it:exec:thread', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The", "('sig', ('it:av:sig', {}), { 'ro': True, 'doc': 'The signature that", "'The name of a host or system.', }), ('it:host', ('guid',", "for integer comparison purposes, as we cannot map an arbitrary", "{ 'doc': 'Other function calls within the scope of the", "'deprecated': True, 'doc': 'Web account of the software author.', }),", "('it:os:android:reqperm', {}, ( ('app', ('it:prod:softver', {}), {'ro': True, 'doc': 'The", "{'strip': True, 'lower': True}), { 'doc': 'The name of a", "time the URL was requested.', }), ('url', ('inet:url', {}), {", "file containing code that deleted data from the registry. May", "{}), { 'ro': True, 'doc': 'The file that triggered the", "by the group.', }), ('software', ('array', {'type': 'it:mitre:attack:software', 'uniq': True,", "normalizes a version string into an integer to allow version", "('name', ('str', {}), { 'doc': 'The name of the function.'}),", "of the file.'}), )), ('it:hostsoft', {}, ( ('host', ('it:host', {}),", "during the URL retrieval.' }), ('client:ipv4', ('inet:ipv4', {}), { 'doc':", "( ('rule', ('it:app:yara:rule', {}), { 'doc': 'The YARA rule that", "this technique.', }), )), ('it:mitre:attack:software', {}, ( ('software', ('it:prod:soft', {}),", "signature hit.', }), ('sig', ('it:av:sig', {}), { 'doc': 'The signature", "'doc': 'The current version of the rule.'}), ('enabled', ('bool', {}),", "negative integer as a semver.') if valu > s_version.mask60: raise", "support for parsing a semantic version string into its component", "field from the CPE 2.3 string.'}), ('language', ('str', {'lower': True,", "{}), { 'doc': 'The file representing the value of the", "subs = { 'part': parts[2], 'vendor': parts[3], 'product': parts[4], 'version':", "of the CVE vulnerability.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url',", "('file:bytes', {}), { 'doc': 'The file on the host.', }),", "('file:bytes', {}), { 'doc': 'The specific file containing code that", "return valu, {'subs': subs} def repr(self, valu): major, minor, patch", "{ 'doc': 'The specific file containing code that requested the", "host or network.' }), ('it:logon', ('guid', {}), { 'doc': 'A", "the ATT&CK group.', }), ('techniques', ('array', {'type': 'it:mitre:attack:technique', 'uniq': True,", "a host deleting a registry key.', }), ('it:app:yara:rule', ('guid', {}),", "'The \"other\" field from the CPE 2.3 string.'}), )), ('it:sec:cwe',", "class Cpe23Str(s_types.Str): ''' CPE 2.3 Formatted String https://nvlpubs.nist.gov/nistpubs/Legacy/IR/nistir7695.pdf (Section 6.2)", "string.'}), ('version', ('str', {'lower': True, 'strip': True}), { 'ro': True,", "'1001', }), ('posix:gid', ('int', {}), { 'doc': 'The primary group", "('file', ('file:bytes', {}), { 'doc': 'The file that was modified.',", ")), ('it:prod:softos', {}, ( ('soft', ('it:prod:softver', {}), {'ro': True, 'doc':", "{ 'doc': 'A Windows registry key/value pair.', }), ('it:prod:soft', ('guid',", "is disregarded for integer comparison purposes, as we cannot map", "the library.'}), ('lib', ('it:prod:softver', {}), {'ro': True, 'doc': 'The library", "string.'}), ('it:os:windows:sid', ('str', {'regex': r'^S-1-[0-59]-\\d{2}-\\d{8,10}-\\d{8,10}-\\d{8,10}-[1-9]\\d{3}$'}), { 'doc': 'A Microsoft Windows", "{'lower': True, 'strip': True}), { 'ro': True, 'doc': 'The \"target_sw\"", "'Software architecture.', }), ('released', ('time', {}), { 'doc': 'Timestamp for", "apply to this software version.', }), ('vers', ('it:dev:str', {}), {", "is listened for by the app.'}), )), ('it:os:android:ibroadcast', {}, (", "{ 'doc': 'A software version contains a library software version.'}),", "'doc': 'The signature name.' }), ('desc', ('str', {}), { 'doc':", "valu = s_version.packVersion(major, minor, patch) subs = {'major': major, 'minor':", "}), ('atime', ('time', {}), { 'doc': 'The file access time.',", "that was modified.', }), )), ('it:exec:reg:get', {}, ( ('proc', ('it:exec:proc',", "'The user ID of the account.', 'ex': '1001', }), ('posix:gid',", "to imported library functions within the scope of the function.',", "https://nvlpubs.nist.gov/nistpubs/Legacy/IR/nistir7695.pdf (Section 6.2) cpe:2.3: part : vendor : product :", "('hash:lm', {}), { 'doc': 'The LM password hash value.', }),", "description of the domain.', }), ('org', ('ou:org', {}), { 'doc':", "{ 'doc': 'Software architecture.', }), ('released', ('time', {}), { 'doc':", "file that was deleted.', }), )), ('it:exec:file:read', {}, ( ('proc',", "that represents a logical network.' }), ('it:domain', ('guid', {}), {", "('user', ('inet:user', {}), { 'doc': 'The owner of the file.',", "brief description of the group.', }), ('host', ('it:host', {}), {", "'strip': True}), { 'doc': 'An advertising identification string.'}), ('it:os:windows:sid', ('str',", "}), ('duration', ('duration', {}), { 'doc': 'The duration of the", "{}), {'ro': True, 'doc': 'The android intent which is listened", "('enabled', ('bool', {}), { 'doc': 'The rule enabled status to", "file was read.', }), ('path', ('file:path', {}), { 'doc': 'The", "'Set to false to indicate an unsuccessful logon attempt.', }),", "IPv4 address of the flow that caused the hit.'}), ('src:ipv6',", "{}), { 'doc': 'The synapse tag used to annotate nodes", "'doc': 'The group owner of the file.', }), )), ('it:exec:file:add',", "this ATT&CK tactic.', 'ex': 'cno.mitre.ta0100', }), ('references', ('array', {'type': 'inet:url',", "port of the flow that caused the hit.'}), ('time', ('time',", "'A URL linking this CWE to a full description.', }),", "known ipv4 address for the host.' }), ('latlong', ('geo:latlong', {}),", "inet:flow that matched the snort rule.'}), ('src', ('inet:addr', {}), {", "by the software.'}), ('path', ('file:path', {}), { 'doc': 'The default", "normed valu prop = node.get('vers') if not prop: return await", "{}), { 'doc': 'The rule enabled status to be used", "file was deleted.', }), ('path:dir', ('file:path', {}), { 'ro': True,", "'regex': r'(?i)^CVE-[0-9]{4}-[0-9]{4,}$'}), { 'doc': 'A vulnerability as designated by a", "String https://nvlpubs.nist.gov/nistpubs/Legacy/IR/nistir7695.pdf (Section 6.2) cpe:2.3: part : vendor : product", "caused the hit.'}), ('sensor', ('it:host', {}), { 'doc': 'The sensor", "= N/A ''' def __init__(self, modl, name, info, opts): opts['lower']", "('it:prod:softver', {}), {'ro': True, 'doc': 'The library software version.'}), )),", "that the file triggered on.' }), ('sig:name', ('str', {'lower': True}),", "be compatible with the given os software version.'}), ('it:hostsoft', ('comp',", "('file', ('file:bytes', {}), { 'doc': 'The file that was created.',", "{ 'doc': 'A list of CVEs that apply to this", "'doc': 'An instance of a YARA rule match to a", "('software:name', ('str', {'lower': True, 'strip': True}), { 'doc': 'The name", "the namespace of an antivirus engine name.' }), ('it:av:filehit', ('comp',", "'The time the logon occured.', }), ('success', ('bool', {}), {", "operates the given host.', }), )), ('it:log:event', {}, ( ('mesg',", "('client', ('inet:client', {}), { 'doc': 'The address of the client", "version string into its component parts. This normalizes a version", "'lower': True}), { 'doc': 'The name of a host or", "IPv4 address range of this network.', }), ('net6', ('inet:net6', {}),", "value.', }), ('passwd', ('inet:passwd', {}), { 'doc': 'The (optional) clear", "bound the listening port. Typically the same host referenced in", "{}), { 'doc': 'The optional contiguous IPv4 address range of", "user ID of the account.', 'ex': '1001', }), ('posix:gid', ('int',", "port.' }), )), ('it:fs:file', {}, ( ('host', ('it:host', {}), {", "file.', }), ('it:app:yara:procmatch', ('guid', {}), { 'doc': 'An instance of", "{}), { 'doc': 'An android advertising identification string.'}), ('it:os:android:perm', ('str',", "executing code that created the new file.', }), ('host', ('it:host',", "'The authentication domain where the group is registered.', }), ('groups',", "'The parent directory of the file path (parsed from :path).',", "'doc': 'An instance of a host reading a file from", "was requested.', }), ('client', ('inet:client', {}), { 'doc': 'The address", "'An instance of a host deleting a registry key.', }),", "it:semver normalization before attempting to extract version parts out of", "the given os software version.'}), ('it:hostsoft', ('comp', {'fields': (('host', 'it:host'),", "function.', }), ('strings', ('array', {'type': 'it:dev:str', 'uniq': True}), { 'doc':", "'patch': patch} return valu, {'subs': subs} def repr(self, valu): major,", "{'lower': True}), { 'doc': 'Normalized version of the version string.',", "}), ('int', ('it:dev:int', {}), { 'doc': 'The value of the", "URL.', }), ('it:exec:bind', ('guid', {}), { 'doc': 'An instance of", "account is registered.', }), ('domain', ('it:domain', {}), { 'doc': 'The", "group on a host or network.' }), ('it:logon', ('guid', {}),", "domain.' }), ('it:account', ('guid', {}), { 'doc': 'A GUID that", "CVE vulnerability.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url', {}), {", "number.', 'ex': 'cve-2012-0158' }), ('it:sec:cwe', ('str', {'regex': r'^CWE-[0-9]{1,8}$'}), { 'doc':", "\"version\" field from the CPE 2.3 string.'}), ('update', ('str', {'lower':", "valu: raise s_exc.BadTypeValu(valu=valu, name=self.name, mesg='No text left after stripping whitespace')", "'Notes concerning the function.'}), ('impcalls', ('array', {'type': 'it:reveng:impfunc'}), { 'doc':", "a file to a filesystem.', }), ('it:exec:file:del', ('guid', {}), {", "{}), { 'deprecated': True, 'doc': 'Email address of the sofware", "}), ('it:mitre:attack:software', ('str', {'regex': r'^S[0-9]{4}$'}), { 'doc': 'A Mitre ATT&CK", "of a host getting a registry key.', }), ('it:exec:reg:set', ('guid',", "information of the org or person who authored the software.',", "('name', ('it:dev:pipe', {}), { 'doc': 'The named pipe string.', }),", "'sorted': True}), { 'doc': 'Observed/variant names for this software.', }),", "actual or a virtual / notional host.', }), ('exe', ('file:bytes',", "in this ATT&CK mitigation.', 'ex': 'cno.mitre.m0100', }), ('references', ('array', {'type':", "('inet:url', {}), { 'doc': 'The URL that was requested.', }),", "mitigation.', }), ('addresses', ('array', {'type': 'it:mitre:attack:technique', 'uniq': True, 'sorted': True,", "'doc': 'The main process executing code that read the file.',", "where the host resides.', }), ('loc', ('loc', {}), { 'doc':", "}), ('it:logon', ('guid', {}), { 'doc': 'A GUID that represents", "of the host.', }), ('domain', ('it:domain', {}), { 'doc': 'The", "semver is the bitwise concatenation of the major, minor and", "CWE Relationships.' }), )), ('it:mitre:attack:group', {}, ( ('org', ('ou:org', {}),", "'doc': 'The source IPv6 address of the flow that caused", "= f'CPE 2.3 string has {len(parts)} parts, expected 13.' raise", "architecture.', }), ('released', ('time', {}), { 'doc': 'Timestamp for when", "parsing a semantic version string into its component parts. This", "file triggered on.' }), ('sig:name', ('str', {'lower': True}), { 'ro':", "('time', ('time', {}), { 'doc': 'The time that the activity", "CVE to a full description.', }), ('references', ('array', {'type': 'inet:url',", "{}), { 'ro': True, 'doc': 'The guid matching the function.'}),", "represented as integers, with a max width of 20 bits.", "( ('soft', 'it:prod:softver'), ('lib', 'it:prod:softver'))}), { 'doc': 'A software version", "pprop = node.ndef[1] await node.snap.addNode('it:dev:str', pprop) async def _onPropSoftverSoft(self, node,", "{ 'doc': 'The duration of the logon session.', }), ('client:host',", "}), ('net6', ('inet:net6', {}), { 'doc': 'The optional contiguous IPv6", "where the map was created in the process.', }), ('size',", "{}), { 'doc': 'The current version of the rule.'}), ('enabled',", "of a process triggering an alert on a specific antivirus", "not prop: return await node.set('vers:norm', prop) # Make it:dev:str from", "string') if subs: valu = s_version.packVersion(subs.get('major'), subs.get('minor', 0), subs.get('patch', 0))", "'doc': 'The process where the library was loaded.', }), ('va',", "('it:hostname', {}), { 'doc': 'The name of the host or", "('array', {'type': 'inet:url', 'uniq': True}), { 'doc': 'An array of", "2.3 Formatted String https://nvlpubs.nist.gov/nistpubs/Legacy/IR/nistir7695.pdf (Section 6.2) cpe:2.3: part : vendor", "# TODO map to an eventual risk:mitigation ('name', ('str', {'strip':", "address of the trigger.'}), ('dst:ipv4', ('inet:ipv4', {}), { 'doc': 'The", "instance of a snort rule hit.', }), ('it:reveng:function', ('guid', {}),", "'doc': 'True if the mmap is mapped with read permissions.',", "'doc': 'The main process executing code that deleted the file.',", "'ro': True, 'doc': 'The file that triggered the signature hit.',", "'G0100', }), ('it:mitre:attack:tactic', ('str', {'regex': r'^TA[0-9]{4}$'}), { 'doc': 'A Mitre", "android intent.'}), ('intent', ('it:os:android:intent', {}), {'ro': True, 'doc': 'The android", "{ 'doc': 'The process which contains the thread.', }), ('created',", "IPv6 address specified to bind().' }), ('server:port', ('inet:port', {}), {", "}), ('time', ('time', {}), { 'doc': 'The time the URL", "document the ATT&CK mitigation.', }), ('addresses', ('array', {'type': 'it:mitre:attack:technique', 'uniq':", "'doc': 'A description of the ATT&CK mitigation.', 'disp': {'hint': 'text'},", "True, 'split': ','}), { 'doc': 'An array of ATT&CK tactics", "'it:prod:softver'), ('lib', 'it:prod:softver'))}), { 'doc': 'A software version contains a", "ATT&CK tactic.', }), ('desc', ('str', {}), { 'doc': 'A description", "'it:av:sig'))}), { 'doc': 'A file that triggered an alert on", "part = '' parts = [] genr = iter(text) try:", "not be the same :exe specified in :proc, if present.',", "True, 'sorted': True}), { 'doc': 'Observed/variant names for this software.',", "except s_exc.BadTypeValu: # Try doing version part extraction by noming", "first attempts to parse strings using the it:semver normalization before", "account.', 'ex': '1001', }), ('posix:gecos', ('int', {}), { 'doc': 'The", "{}), { 'doc': 'The host running the process that wrote", "'doc': 'The thread which created this thread.', }), )), ('it:exec:loadlib',", "than 1152921504606846975 as a semver.') major, minor, patch = s_version.unpackVersion(valu)", "'The file that matched the YARA rule.'}), ('version', ('it:semver', {}),", "clear text password for this password hash.', }), )), ('it:cmd',", "}), ('it:mitre:attack:technique', ('str', {'regex': r'^T[0-9]{4}(.[0-9]{3})?$'}), { 'doc': 'A Mitre ATT&CK", "'The time the memory map was created.', }), ('deleted', ('time',", "an arbitrary pre-release version into a integer value Major, minor", "}), )), ('it:cmd', {}, ()), ('it:exec:proc', {}, ( ('host', ('it:host',", "{ 'doc': 'The path to the executable which started the", "{ 'doc': 'An array of ATT&CK software IDs used by", "{ 'ro': True, 'doc': 'URL available on the host.', }),", "{ 'ro': True, 'doc': 'The \"target_sw\" field from the CPE", "{ 'doc': 'The optional contiguous IPv4 address range of this", "{ 'doc': 'Normalized version of the version string.', }), ('arch',", "('techniques', ('array', {'type': 'it:mitre:attack:technique', 'uniq': True, 'sorted': True, 'split': ','}),", "be considered the \"main\" executable for DLLs loaded by that", "command-line string.', 'ex': 'foo.exe --dostuff bar', }), ('it:exec:mutex', ('guid', {}),", "function.'}), ('rank', ('int', {}), { 'doc': 'The function rank score", "2.3 string.'}), ('update', ('str', {'lower': True, 'strip': True}), { 'ro':", "subs.get('patch', 0)) return valu, subs async def _onFormItDevStr(self, node): await", "the password hash.', }), ('hash:md5', ('hash:md5', {}), { 'doc': 'The", "specific version of a software product.'}), ('it:prod:softfile', ('comp', {'fields': (", "( ('soft', ('it:prod:softver', {}), {'ro': True, 'doc': 'The software version", "version of the rule.'}), ('enabled', ('bool', {}), { 'doc': 'The", "was read.', }), )), ('it:exec:reg:set', {}, ( ('proc', ('it:exec:proc', {}),", "{}), { 'doc': 'The host running the process that read", "'doc': 'A version of a software product which is present", "{}), { 'doc': 'The time the process exited.', }), ('exitcode',", "ID of the account.', 'ex': '1001', }), ('posix:gid', ('int', {}),", "{ 'doc': 'The size of the memory map in bytes.',", "parts = self._splitCpe23(text) if len(parts) != 13: mesg = f'CPE", "('parents', ('array', {'type': 'it:sec:cwe', 'uniq': True, 'sorted': True, 'split': ','}),", "process that matched the YARA rule.'}), ('time', ('time', {}), {", "{ 'deprecated': True, 'doc': 'A reference to a string inside", "minor and patch levels are represented as integers, with a", "('guid', {}), { 'doc': 'An instance of a host writing", "('perms:read', ('bool', {}), { 'doc': 'True if the mmap is", "(('file', 'file:bytes'), ('sig', 'it:av:sig'))}), { 'doc': 'A file that triggered", "mitigation.', 'ex': 'cno.mitre.m0100', }), ('references', ('array', {'type': 'inet:url', 'uniq': True}),", "('logoff:time', ('time', {}), { 'doc': 'The time the logon session", "to.', }), ('account', ('it:account', {}), { 'doc': 'The account that", "description of the network.', }), ('org', ('ou:org', {}), { 'doc':", "}), ('url', ('inet:url', {}), { 'doc': 'A reference URL for", "('src:port', ('inet:port', {}), { 'doc': 'The source port of the", "to annotate nodes included in this ATT&CK technique.', 'ex': 'cno.mitre.t0100',", "('time', {}), { 'doc': 'The time that the AV engine", "signature.' }), )), ('it:auth:passwdhash', {}, ( ('salt', ('hex', {}), {", "}), ('author:email', ('inet:email', {}), { 'deprecated': True, 'doc': 'Email address", "code that created the new file.', }), ('host', ('it:host', {}),", "def repr(self, valu): major, minor, patch = s_version.unpackVersion(valu) valu =", "{ 'doc': 'A url hosted on or served by a", "specifying this software version', }), ('cves', ('array', {'type': 'it:sec:cve', 'uniq':", "('file:bytes', {}), { 'ro': True, 'doc': 'The file that triggered", "{ 'doc': 'An instance of a snort rule hit.', }),", "'doc': 'The software which can run on the operating system.'}),", "tactic.', }), ('tag', ('syn:tag', {}), { 'doc': 'The synapse tag", "{}), { 'doc': 'The user name of the process owner.',", "specified in :proc, if present.'}), ('time', ('time', {}), { 'doc':", "}), )), ('it:auth:passwdhash', {}, ( ('salt', ('hex', {}), { 'doc':", "''' CPE 2.3 Formatted String https://nvlpubs.nist.gov/nistpubs/Legacy/IR/nistir7695.pdf (Section 6.2) cpe:2.3: part", "address of the client during the URL retrieval.' }), ('client:ipv4',", "}), ('status', ('it:mitre:attack:status', {}), { 'doc': 'The status of this", "by the app.'}), )), ('it:prod:softver', {}, ( ('software', ('it:prod:soft', {}),", "'doc': 'The time the memory map was deleted.', }), ('path',", "path to the executable which started the process.', }), ('src:proc',", "Bytes may optionally be present in the axon.', }), )),", "node.', }), ('os', ('it:prod:softver', {}), { 'doc': 'The operating system", "the signature hit.', }), ('sig', ('it:av:sig', {}), { 'doc': 'The", "('it:mitre:attack:status', ('str', {'enums': 'current,deprecated,withdrawn'}), { 'doc': 'A Mitre ATT&CK element", "{}), { 'doc': 'The host thread which caused the activity.'}),", "range of this network.', }), ('net6', ('inet:net6', {}), { 'doc':", ")), ('it:prod:softfile', {}, ( ('soft', ('it:prod:softver', {}), {'ro': True, 'doc':", "of the process owner.', }), ('path', ('file:path', {}), { 'doc':", "software.', }), ('url', ('inet:url', {}), { 'doc': 'URL relevant for", "caused the hit.'}), ('src:ipv6', ('inet:ipv6', {}), { 'doc': 'The source", "'doc': 'The host running the process that created the new", "('it:dev:regkey', {}), { 'doc': 'The Windows registry key.', }), ('str',", "on the operating system.'}), ('os', ('it:prod:softver', {}), {'ro': True, 'doc':", "'The specific file containing code that deleted the file. May", "'The file access time.', }), ('user', ('inet:user', {}), { 'doc':", "memory mapped segment located in a process.', }), ('it:cmd', ('str',", "'doc': 'The main process executing code that wrote to the", "by noming through the string subs = s_version.parseVersionParts(valu) if subs", "'doc': 'The operating system which the software can run on.'}),", "'doc': 'URL where a specific version of the software is", "registry key.', }), ('it:exec:reg:del', ('guid', {}), { 'doc': 'An instance", "which caused the activity.'}), ('proc', ('it:exec:proc', {}), { 'doc': 'The", "the process owner.', }), ('path', ('file:path', {}), { 'doc': 'The", "in the process.', }), ('loaded', ('time', {}), { 'doc': 'The", "where the logon originated.', }), )), ('it:hosturl', {}, ( ('host',", "def _normPyInt(self, valu): if valu < 0: raise s_exc.BadTypeValu(valu=valu, name=self.name,", "Prerelease information is disregarded for integer comparison purposes, as we", "an alert on a specific antivirus signature.' }), ('it:auth:passwdhash', ('guid',", "'The operating system of the host.' }), ('manu', ('str', {}),", "value that was written to.', }), )), ('it:exec:reg:del', {}, (", "app.'}), )), ('it:prod:softver', {}, ( ('software', ('it:prod:soft', {}), { 'doc':", "record of the log event.', }), )), ('it:domain', {}, (", "('vendor', ('ou:name', {}), { 'ro': True, 'doc': 'The \"vendor\" field", "the library was loaded in the process.', }), ('loaded', ('time',", "('inet:port', {}), { 'doc': 'The destination port of the flow", "'A SHA256 hash of the memory map. Bytes may optionally", "{'lower': True}), { 'doc': 'A short description of the software.',", "}), ('perms:execute', ('bool', {}), { 'doc': 'True if the mmap", "URL retrieval..' }), ('client:ipv6', ('inet:ipv6', {}), { 'doc': 'The IPv6", "{ 'doc': 'A description of the ATT&CK software.', 'disp': {'hint':", "not valu.startswith('cpe:2.3:'): mesg = 'CPE 2.3 string is expected to", "('names', ('array', {'type': 'str', 'uniq': True, 'sorted': True}), { 'doc':", "cannot map an arbitrary pre-release version into a integer value", "True}), { 'doc': 'The primary name for the ATT&CK technique.',", "('str', {'regex': r'^G[0-9]{4}$'}), { 'doc': 'A Mitre ATT&CK Group ID.',", "{ 'doc': 'The manufacturer of the host.', }), ('model', ('str',", "{ 'doc': 'The MD5 password hash value.', }), ('hash:sha1', ('hash:sha1',", "'doc': 'The time the port was bound.', }), ('server', ('inet:server',", "host.', }), )), ('it:log:event', {}, ( ('mesg', ('str', {}), {", "rule that matched the file.'}), ('file', ('file:bytes', {}), { 'ro':", "mmap is a mapped view of a file.', }), ('hash:sha256',", "rule that matched the file.'}), ('proc', ('it:exec:proc', {}), { 'doc':", "the host.', }), ('org', ('ou:org', {}), { 'doc': 'The org", "{ 'doc': 'Version patch number.', }), ('semver:pre', ('str', {}), {", "to the POSIX account's home directory.\", 'ex': '/home/visi', }), ('posix:shell',", "{}), { 'doc': 'The address of the client during the", "Formatted String https://nvlpubs.nist.gov/nistpubs/Legacy/IR/nistir7695.pdf (Section 6.2) cpe:2.3: part : vendor :", "component of the file path (parsed from :path).', }), ('file',", "('proc', ('it:exec:proc', {}), { 'doc': 'The main process executing code", "'doc': 'The time the logon session ended.', }), ('host', ('it:host',", "the CPE 2.3 string.'}), ('version', ('str', {'lower': True, 'strip': True}),", "{'ro': True, 'doc': 'The android intent which is listened for", "( ('software', ('it:prod:soft', {}), { 'doc': 'Software associated with this", "linking this CVE to a full description.', }), ('references', ('array',", "group owner of the file.', }), )), ('it:exec:file:add', {}, (", "{ 'doc': 'The serial number of the host.', }), ('operator',", "software.'}), ('softver', ('it:prod:softver', {}), {'ro': True, 'doc': 'Software on the", "the file.'}), ('proc', ('it:exec:proc', {}), { 'doc': 'The process that", "'ro': True, 'doc': 'The \"target_hw\" field from the CPE 2.3", "text.', 'disp': {'hint': 'text'}, }), ('name', ('str', {}), { 'doc':", "v in subs.items(): await node.set(f'semver:{k}', v) except asyncio.CancelledError: # pragma:", "'it:dev:str', 'uniq': True, 'sorted': True}), { 'doc': 'Observed/variant names for", "'it:host'), ('url', 'inet:url'))}), { 'doc': 'A url hosted on or", "ID of the account.', 'ex': '1001', }), ('windows:sid', ('it:os:windows:sid', {}),", "the registry.', }), ('host', ('it:host', {}), { 'doc': 'The host", "('groups', ('array', {'type': 'it:group'}), { 'doc': 'Groups that are a", "for DLLs loaded by that program.', }), ('cmd', ('it:cmd', {}),", "matched the file.'}), ('proc', ('it:exec:proc', {}), { 'doc': 'The process", "enabled status to be used for YARA evaluation engines.'}), )),", "name.' }), ('desc', ('str', {}), { 'doc': 'A free-form description", "to an eventual risk:mitigation ('name', ('str', {'strip': True}), { 'doc':", "}), ('name', ('str', {}), { 'doc': 'The name of the", "group.', }), ('software', ('array', {'type': 'it:mitre:attack:software', 'uniq': True, 'sorted': True,", "is present. ''' def postTypeInit(self): s_types.Int.postTypeInit(self) self.setNormFunc(str, self._normPyStr) self.setNormFunc(int, self._normPyInt)", "max width of 20 bits. The comparable integer value representing", "the thread.', }), ('src:thread', ('it:exec:thread', {}), { 'doc': 'The thread", "( ('user', ('inet:user', {}), { 'doc': 'The username associated with", "( ('host', ('it:host', {}), { 'doc': 'The host containing the", "name: await node.set('software:name', name) async def _onPropSoftverArch(self, node, oldv): #", "{}), { 'doc': 'The file that was created.', }), )),", "\"part\" field from the CPE 2.3 string.'}), ('vendor', ('ou:name', {}),", "('ps:contact', {}), { 'doc': 'The operator of the host.', }),", "{}), { 'doc': 'Version minor number.', }), ('semver:patch', ('int', {}),", "used to annotate nodes included in this ATT&CK technique.', 'ex':", "(80, 'emerg'), ) class ItModule(s_module.CoreModule): async def initCoreModule(self): self.model.form('it:dev:str').onAdd(self._onFormItDevStr) self.model.form('it:dev:pipe').onAdd(self._onFormMakeDevStr)", "('it:mitre:attack:group', {}, ( ('org', ('ou:org', {}), { 'doc': 'Used to", "process executing code that created the mutex.', }), ('host', ('it:host',", "requested the URL. May or may not be the same", "{'enums': loglevels}), { 'doc': 'A log level integer that increases", "the hit.'}), ('src:ipv6', ('inet:ipv6', {}), { 'doc': 'The source IPv6", "the URL was requested.', }), ('url', ('inet:url', {}), { 'doc':", "'Semantic Version type.', }), ('it:sec:cpe', 'synapse.models.infotech.Cpe23Str', {}, { 'doc': 'A", "(30, 'notice'), (40, 'warning'), (50, 'err'), (60, 'crit'), (70, 'alert'),", "{ 'doc': 'The most recent version of the rule evaluated", "('sensor', ('it:host', {}), { 'doc': 'The sensor host node that", "URLs that document the ATT&CK group.', }), ('techniques', ('array', {'type':", "{ 'doc': 'The Microsoft Windows Security Identifier of the group.',", "runtime.', }), ('it:exec:url', ('guid', {}), { 'doc': 'An instance of", "if the software is an operating system.'}), ('islib', ('bool', {}),", ")), ('it:app:snort:hit', {}, ( ('rule', ('it:app:snort:rule', {}), { 'doc': 'The", "listened for by the app.'}), )), ('it:os:android:ibroadcast', {}, ( ('app',", "}), ('it:reveng:function', ('guid', {}), { 'doc': 'A function inside an", "{'ro': True, 'doc': 'The android permission requested by the app.'}),", "'The path where the file was written to/modified.', }), ('path:dir',", "{ 'doc': 'The time the mutex was created.', }), ('name',", "}), ('model', ('str', {}), { 'doc': 'The product model of", "app.'}), )), ('it:prod:softos', {}, ( ('soft', ('it:prod:softver', {}), {'ro': True,", "'doc': 'The specific file containing code that wrote to the", "'doc': 'The geo-political location string for the node.', }), ('os',", "{}), { 'doc': 'The size of the memory map in", "}), ('perms:write', ('bool', {}), { 'doc': 'True if the mmap", "process, including any command line parameters.', 'disp': {'hint': 'text'}, }),", "address range of this network.', }), )), ('it:account', {}, (", "','}), { 'doc': 'An array of ATT&CK technique IDs used", "ended.', }), ('host', ('it:host', {}), { 'doc': 'The host that", "representing a named pipe.', }), ('it:dev:mutex', ('str', {}), { 'doc':", "system.'}), ('os', ('it:prod:softver', {}), {'ro': True, 'doc': 'The operating system", "{}, ()), ('it:exec:proc', {}, ( ('host', ('it:host', {}), { 'doc':", "def __init__(self, modl, name, info, opts): opts['lower'] = True s_types.Str.__init__(self,", "True, 'doc': 'The \"version\" field from the CPE 2.3 string.'}),", "process.' }), ('killedby', ('it:exec:proc', {}), { 'doc': 'The process which", "s_version.packVersion(major, minor, patch) subs = {'major': major, 'minor': minor, 'patch':", "instance.', }), ('vers:norm', ('str', {'lower': True}), { 'doc': 'Normalized version", "{ 'doc': 'True if the mmap is mapped with write", "'str'))}), { 'deprecated': True, 'doc': 'A reference to a string", "description of the software.', 'disp': {'hint': 'text'}, }), ('desc:short', ('str',", "was loaded.', }), ('va', ('int', {}), { 'doc': 'The base", "source port of the flow that caused the hit.'}), ('dst',", "{}), { 'doc': 'The IPv6 address specified to bind().' }),", "( ('it:hostname', ('str', {'strip': True, 'lower': True}), { 'doc': 'The", "{}, ( ('salt', ('hex', {}), { 'doc': 'The (optional) hex", "'doc': 'The signature that the file triggered on.' }), ('time',", "'A snort rule unique identifier.', }), ('it:app:snort:hit', ('guid', {}), {", "'doc': 'Software associated with this version instance.', }), ('software:name', ('str',", "strings referenced within the function.', }), )), ('it:reveng:filefunc', {}, (", "'A Mitre ATT&CK Software ID.', 'ex': 'S0154', }), ('it:dev:str', ('str',", "of the file.', }), )), ('it:exec:file:add', {}, ( ('proc', ('it:exec:proc',", "file that triggered an alert on a specific antivirus signature.',", "('it:mitre:attack:status', {}), { 'doc': 'The status of this ATT&CK technique.',", "android app which requests the permission.'}), ('perm', ('it:os:android:perm', {}), {'ro':", "}), ('passwd', ('inet:passwd', {}), { 'doc': 'The (optional) clear text", "'sorted': True}), { 'doc': 'A list of CVEs that apply", "{}), { 'doc': 'The time the file was deleted.', }),", "where the group is registered.', }), ('domain', ('it:domain', {}), {", "{ 'doc': 'The client port during the URL retrieval..' }),", "the AV engine detected the signature.' }), )), ('it:auth:passwdhash', {},", "software author.', }), ('author:email', ('inet:email', {}), { 'deprecated': True, 'doc':", "the account.', 'ex': '1001', }), ('windows:sid', ('it:os:windows:sid', {}), { 'doc':", "N/A ''' def __init__(self, modl, name, info, opts): opts['lower'] =", "'doc': 'A GUID that represents an account on a host", "{}), { 'doc': 'The contact information of the org or", "'doc': 'The file that triggered the signature hit.', }), ('sig',", "a registry key.', }), ('it:exec:reg:del', ('guid', {}), { 'doc': 'An", "integers, with a max width of 20 bits. The comparable", "which contains the thread.', }), ('created', ('time', {}), { 'doc':", "'doc': 'The IPv4 where the logon originated.', }), ('client:ipv6', ('inet:ipv6',", "( ('app', 'it:prod:soft'), ('perm', 'it:os:android:perm'))}), { 'doc': 'The given software", "host running the process that wrote to the registry. Typically", "= valu.strip() if not valu: raise s_exc.BadTypeValu(valu=valu, name=self.name, mesg='No text", "retrieval..' }), ('client:ipv6', ('inet:ipv6', {}), { 'doc': 'The IPv6 of", "{ 'doc': 'An instance of a host deleting a file", "('proc', ('it:exec:proc', {}), { 'doc': 'The process which contains the", "info, opts): opts['lower'] = True s_types.Str.__init__(self, modl, name, info, opts)", "('time', {}), { 'doc': 'The time the port was bound.',", "unique identifier.', }), ('it:app:snort:hit', ('guid', {}), { 'doc': 'An instance", "memory address where the map was created in the process.',", "'CPE 2.3 string is expected to start with \"cpe:2.3:\"' raise", "('str', {}), { 'doc': 'A free-form description of the CVE", "('array', {'type': 'it:mitre:attack:technique', 'uniq': True, 'sorted': True, 'split': ','}), {", "the ATT&CK software.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url', {}),", "'The file considered the \"main\" executable for the process. For", "to.', }), )), ('it:exec:reg:del', {}, ( ('proc', ('it:exec:proc', {}), {", "'doc': 'An instance of a password hash.', }), ('it:exec:proc', ('guid',", "if possible prop = node.get('software') if prop: opts = {'vars':", "or bruteforce parts try: valu, subs = self.bruteVersionStr(prop) await node.set('semver',", "{}), { 'doc': 'Set to false to indicate an unsuccessful", "to evaluate if it exhibits interesting behavior.'}), ('complexity', ('int', {}),", "the URL.', }), ('host', ('it:host', {}), { 'doc': 'The host", "s_version logger = logging.getLogger(__name__) class Cpe23Str(s_types.Str): ''' CPE 2.3 Formatted", "the file name (parsed from :path).', }), ('path:base', ('file:base', {}),", "'ex': 'cno.mitre.ta0100', }), ('references', ('array', {'type': 'inet:url', 'uniq': True}), {", "{'hint': 'text'}, }), ('url', ('inet:url', {}), { 'doc': 'A URL", "to map an ATT&CK group to a synapse ou:org.', }),", "ID.', 'ex': 'S0154', }), ('it:dev:str', ('str', {}), { 'doc': 'A", "library file that was loaded.', }), )), ('it:exec:mmap', {}, (", "0)) return valu, subs async def _onFormItDevStr(self, node): await node.set('norm',", "target_sw : target_hw : other * = \"any\" - =", "await node.set('vers:norm', prop) # Make it:dev:str from version str await", "('it:host', {}), { 'doc': 'The host containing the file.', }),", "the software was released.', }), ('semver', ('it:semver', {}), { 'doc':", "techniques used by the software.', }), )), ('it:mitre:attack:mitigation', {}, (", "('it:dev:str', ('str', {}), { 'doc': 'A developer-selected string.' }), ('it:dev:pipe',", "('file', ('file:bytes', {}), { 'doc': 'The file that was deleted.',", "file is distributed by a specific software version.'}), ('it:prod:softlib', ('comp',", "if c == '\\\\': c += next(genr) if c ==", "that documents the ATT&CK mitigation.', }), ('tag', ('syn:tag', {}), {", "True, 'doc': 'The library software version.'}), )), ('it:prod:softfile', {}, (", "'The virtual address of the first codeblock of the function.'}),", "if name: await node.set('software:name', name) async def _onPropSoftverArch(self, node, oldv):", "{ 'doc': 'The last known location for the host.' }),", "at a particular version.', }), ('names', ('array', {'type': 'it:dev:str', 'uniq':", "}), ('names', ('array', {'type': 'it:dev:str', 'uniq': True, 'sorted': True}), {", "GECOS field for the POSIX account.', }), ('posix:home', ('file:path', {}),", "{}, ( ('host', ('it:host', {}), {'ro': True, 'doc': 'Host with", "{}), {'ro': True, 'doc': 'Software on the host.'}) )), ('it:av:sig',", "'An array of ATT&CK software IDs used by the group.',", "of the org or person who authored the software.', }),", "'doc': 'The specific file containing code that read the registry.", "{ 'doc': 'True if the mmap is mapped with read", "file.'}), ('file', ('file:bytes', {}), { 'ro': True, 'doc': 'The file", "'An instance of a host writing a file to a", "'doc': 'A GUID that represents an individual logon/logoff event.' }),", "{ 'doc': 'A GUID that represents a group on a", "('names', ('array', {'type': 'it:dev:str', 'uniq': True, 'sorted': True}), { 'doc':", "('hash:sha256', {}), { 'doc': 'The SHA256 password hash value.', }),", "{}), { 'doc': 'The library file that was loaded.', }),", "{}, ( ('org', ('ou:org', {}), { 'doc': 'Used to map", "specific file containing code that read the registry. May or", "s_exc.BadTypeValu(valu=valu, name=self.name, mesg='Cannot norm a negative integer as a semver.')", "SHA1 password hash value.', }), ('hash:sha256', ('hash:sha256', {}), { 'doc':", "description field.', 'ex': 'Buffer Copy without Checking Size of Input", "a specific software version.'}), ('it:prod:softlib', ('comp', {'fields': ( ('soft', 'it:prod:softver'),", "True, 'split': ','}), { 'doc': 'An array of ChildOf CWE", "{ 'doc': 'The default installation path of the file.'}), )),", "return await node.set('vers:norm', prop) # Make it:dev:str from version str", "'The base memory address where the library was loaded in", "the logon originated.', }), ('client:ipv4', ('inet:ipv4', {}), { 'doc': 'The", "'doc': 'A arbitrary, unversioned software product.', }), ('it:adid', ('str', {'lower':", "the named pipe was created.', }), ('name', ('it:dev:pipe', {}), {", "}), ('it:app:yara:procmatch', ('guid', {}), { 'doc': 'An instance of a", "or network.' }), ('it:group', ('guid', {}), { 'doc': 'A GUID", "where the file was written to/modified.', }), ('path:dir', ('file:path', {}),", "of authentication and configuration such as a windows domain.' }),", "this process.', }), )), ('it:exec:thread', {}, ( ('proc', ('it:exec:proc', {}),", "Security Identifier of the group.', }), )), ('it:logon', {}, (", "('it:app:snort:rule', {}, ( ('text', ('str', {}), { 'doc': 'The snort", "process.', }), ('it:app:snort:rule', ('guid', {}), { 'doc': 'A snort rule", "may not be the same :exe referenced in :proc, if", "{ 'doc': 'The given software broadcasts the given Android intent.'}),", "'The time the process exited.', }), ('exitcode', ('int', {}), {", "org that operates the given host.', }), )), ('it:log:event', {},", "value.', }), ('hash:lm', ('hash:lm', {}), { 'doc': 'The LM password", "hit.'}), ('dst:ipv6', ('inet:ipv6', {}), { 'doc': 'The destination IPv4 address", "process ID.', }), ('time', ('time', {}), { 'doc': 'The start", "from the registry. Typically the same host referenced in :proc,", "created.', }), ('name', ('it:dev:pipe', {}), { 'doc': 'The named pipe", "( ('desc', ('str', {}), { 'doc': 'A free-form description of", "registry key.', }), ('it:exec:reg:set', ('guid', {}), { 'doc': 'An instance", "{ 'doc': 'An array of URLs that document the ATT&CK", "'The host thread which caused the activity.'}), ('host', ('it:host', {}),", "prop = node.get('arch') if prop: await node.snap.addNode('it:dev:str', prop) async def", "'The status of this ATT&CK technique.', }), ('isnow', ('it:mitre:attack:technique', {}),", "specifying this software.', }), ('author', ('ps:contact', {}), { 'doc': 'The", "{ 'doc': 'The last known ipv4 address for the host.'", "'ex': 'HKEY_LOCAL_MACHINE\\\\SOFTWARE\\\\Microsoft\\\\Windows\\\\CurrentVersion\\\\Run', }), ('it:dev:regval', ('guid', {}), { 'doc': 'A Windows", "('it:app:yara:match', {}, ( ('rule', ('it:app:yara:rule', {}), { 'ro': True, 'doc':", "executable which started the process.', }), ('src:proc', ('it:exec:proc', {}), {", "'The mutex string.', }), )), ('it:exec:pipe', {}, ( ('proc', ('it:exec:proc',", "('str', {'strip': True}), { 'doc': 'A description of the ATT&CK", "}), )), ('it:mitre:attack:group', {}, ( ('org', ('ou:org', {}), { 'doc':", "{ 'doc': 'The time that the AV engine detected the", "if len(parts) != 13: mesg = f'CPE 2.3 string has", "hit.'}), ('time', ('time', {}), { 'doc': 'The time of the", "host.', }), ('ctime', ('time', {}), { 'doc': 'The file creation", "the process that read the file. Typically the same host", "'deprecated': True, 'doc': 'A reference to a string inside a", "of a host deleting a file from a filesystem.', }),", "('it:exec:file:read', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The main", "'doc': 'An android permission string.'}), ('it:os:android:intent', ('str', {}), { 'doc':", "'The optional contiguous IPv4 address range of this network.', }),", "('data', {}), { 'doc': 'A raw JSON record of the", "('hash:sha256', {}), { 'doc': 'A SHA256 hash of the memory", "'sorted': True, 'split': ','}), { 'doc': 'An array of techniques", "'The process which created the process.' }), ('killedby', ('it:exec:proc', {}),", "{ 'doc': 'A file is distributed by a specific software", "{}), { 'doc': 'The username associated with the account', }),", "(optional) hex encoded salt value used to calculate the password", "started.'}), ), }), ), 'forms': ( ('it:hostname', {}, ()), ('it:host',", "'doc': 'The time the process exited.', }), ('exitcode', ('int', {}),", "('str', {'strip': True, 'lower': True}), { 'doc': 'The name of", "}), ('path', ('file:path', {}), { 'doc': 'The path that the", "{}), { 'doc': 'Notes concerning the function.'}), ('impcalls', ('array', {'type':", "('path', ('file:path', {}), { 'doc': 'The default installation path of", "self.bruteVersionStr(prop) await node.set('semver', valu) for k, v in subs.items(): await", "'disp': {'hint': 'text'}, }), ('url', ('inet:url', {}), { 'doc': 'A", "def _onPropSoftverVers(self, node, oldv): # Set vers:norm and make it's", "parts[4], 'version': parts[5], 'update': parts[6], 'edition': parts[7], 'language': parts[8], 'sw_edition':", "'it:prod:soft'), ('intent', 'it:os:android:intent'))}), { 'doc': 'The given software listens for", "'doc': 'NIST NVD Common Weaknesses Enumeration Specification', 'ex': 'CWE-120', }),", "to parse string as a semver.') valu = s_version.packVersion(subs.get('major'), subs.get('minor'),", "specific file containing code that deleted data from the registry.", "'doc': 'The (optional) hex encoded salt value used to calculate", "{'lower': True, 'strip': True}), { 'ro': True, 'doc': 'The \"target_hw\"", "'1001', }), ('posix:gecos', ('int', {}), { 'doc': 'The GECOS field", "is registered.', }), ('posix:uid', ('int', {}), { 'doc': 'The user", "'The inet:addr of the server when binding the port.' }),", "NTLM password hash value.', }), ('passwd', ('inet:passwd', {}), { 'doc':", "of the flow that caused the hit.'}), ('dst:port', ('inet:port', {}),", "the semver is the bitwise concatenation of the major, minor", "s_version.fmtVersion(major, minor, patch) return valu loglevels = ( (10, 'debug'),", "'doc': 'The host on which the activity occurred.'}), ('time', ('time',", "string.'}), ('update', ('str', {'lower': True, 'strip': True}), { 'ro': True,", "{'type': 'it:reveng:impfunc'}), { 'doc': 'Calls to imported library functions within", "{}), { 'doc': 'The main process executing code that requested", "that read the file.', }), ('host', ('it:host', {}), { 'doc':", "access time.', }), ('user', ('inet:user', {}), { 'doc': 'The owner", "}), ('vers:norm', ('str', {'lower': True}), { 'doc': 'Normalized version of", "}), )), ('it:hosturl', {}, ( ('host', ('it:host', {}), { 'ro':", "as s_version logger = logging.getLogger(__name__) class Cpe23Str(s_types.Str): ''' CPE 2.3", "path that the library was loaded from.', }), ('file', ('file:bytes',", "the version out of a string. Args: valu (str): String", "{ 'doc': 'The org that owns/operates the network.', }), ('net4',", "except Exception: logger.exception('Failed to brute force version string [%s]', prop)", "}), ('it:auth:passwdhash', ('guid', {}), { 'doc': 'An instance of a", "('it:domain', {}), { 'doc': 'The authentication domain where the group", "('time', {}), { 'doc': 'The time the data from the", "the software.'}), ('softver', ('it:prod:softver', {}), {'ro': True, 'doc': 'Software on", "'The destination IPv4 address of the flow that caused the", "'doc': 'The host where the logon originated.', }), ('client:ipv4', ('inet:ipv4',", "('inet:url', {}), { 'doc': 'URL where a specific version of", "The system normalized version integer and a subs dictionary. '''", "of the sofware author.', }), ('author:person', ('ps:person', {}), { 'deprecated':", "('inet:ipv6', {}), { 'doc': 'The IPv6 of the client during", "'ro': True, 'doc': 'The signature name.' }), ('desc', ('str', {}),", "('rule', ('it:app:yara:rule', {}), { 'doc': 'The YARA rule that matched", "to a file.', }), ('it:app:yara:procmatch', ('guid', {}), { 'doc': 'An", "'doc': 'A description of the ATT&CK tactic.', 'disp': {'hint': 'text'},", "'An array of URLs that document the ATT&CK tactic.', }),", "{ 'deprecated': True, 'doc': 'Organization which authored the software.', }),", "value Major, minor and patch levels are represented as integers,", "prop) async def _onPropSoftverVers(self, node, oldv): # Set vers:norm and", "main process executing code that wrote to / modified the", "{}), { 'doc': 'The time that the activity started.'}), ),", "node.snap.addNode('it:dev:str', prop) async def _onPropSoftverVers(self, node, oldv): # Set vers:norm", "with the software.'}), ('softver', ('it:prod:softver', {}), {'ro': True, 'doc': 'Software", "True s_types.Str.__init__(self, modl, name, info, opts) def _splitCpe23(self, text): part", "{ 'doc': 'A log level integer that increases with severity.',", "'An array of ChildOf CWE Relationships.' }), )), ('it:mitre:attack:group', {},", "'The file that triggered the signature hit.', }), ('sig', ('it:av:sig',", "{ 'doc': 'A free-form description of the host.', }), ('domain',", "of the account.', }), ('groups', ('array', {'type': 'it:group'}), { 'doc':", "('hash:md5', {}), { 'doc': 'The MD5 password hash value.', }),", "minor and patch levels. Prerelease and build information will be", "CPE 2.3 string.'}), ('language', ('str', {'lower': True, 'strip': True}), {", "'Semver build string.', }), ('url', ('inet:url', {}), { 'doc': 'URL", "file was created.', }), ('path', ('file:path', {}), { 'doc': 'The", "('it:os:android:perm', ('str', {}), { 'doc': 'An android permission string.'}), ('it:os:android:intent',", "{ 'doc': 'An instance of a process triggering an alert", "}), ('server:ipv6', ('inet:ipv6', {}), { 'doc': 'The IPv6 address specified", "raise s_exc.BadTypeValu(valu=valu, name='bruteVersionStr', mesg='Unable to brute force version parts out", "r'^S-1-[0-59]-\\d{2}-\\d{8,10}-\\d{8,10}-\\d{8,10}-[1-9]\\d{3}$'}), { 'doc': 'A Microsoft Windows Security Identifier.', 'ex': 'S-1-5-21-1220945662-1202665555-839525555-5555',", "IPv4 of the client during the URL retrieval..' }), ('client:ipv6',", "for by the app.'}), )), ('it:os:android:ibroadcast', {}, ( ('app', ('it:prod:softver',", "technique on this sub-technique.', }), ('tactics', ('array', {'type': 'it:mitre:attack:tactic', 'uniq':", "a process triggering an alert on a specific antivirus signature.'", "True, 'sorted': True, 'split': ','}), { 'doc': 'An array of", "{}), { 'doc': 'The CWE description field.', 'disp': {'hint': 'text'},", "software is available from.', }), )), ('it:prod:softlib', {}, ( ('soft',", "'The registry key or value that was read.', }), )),", "{ 'doc': 'The time the logon session ended.', }), ('host',", "ID.', 'ex': 'T1548', }), ('it:mitre:attack:mitigation', ('str', {'regex': r'^M[0-9]{4}$'}), { 'doc':", "('guid', {}), { 'doc': 'An instance of a host requesting", "('hex', {}), { 'doc': 'The (optional) hex encoded salt value", "the mmap is a mapped view of a file.', }),", "valu (str): String to attempt to get version information for.", "'doc': 'The host running the process that created the named", "}), ('logoff:time', ('time', {}), { 'doc': 'The time the logon", "run on.'}), )), ('it:os:android:ilisten', {}, ( ('app', ('it:prod:softver', {}), {'ro':", "opts): opts['lower'] = True s_types.Str.__init__(self, modl, name, info, opts) def", "mapped with write permissions.', }), ('perms:execute', ('bool', {}), { 'doc':", "'doc': 'The time that the activity started.'}), ), }), ),", "'The owner of the file.', }), ('group', ('inet:user', {}), {", "a group on a host or network.' }), ('it:logon', ('guid',", "of a file.', }), ('hash:sha256', ('hash:sha256', {}), { 'doc': 'A", "'The operating system which the software can run on.'}), )),", "in this ATT&CK tactic.', 'ex': 'cno.mitre.ta0100', }), ('references', ('array', {'type':", "time of match.'}), )), ('it:app:yara:rule', {}, ( ('text', ('str', {}),", "'The Windows registry key.', }), ('str', ('it:dev:str', {}), { 'doc':", "'The file that was modified.', }), )), ('it:exec:reg:get', {}, (", "the first codeblock of the function.'}), ('rank', ('int', {}), {", "('str', {}), { 'doc': 'The CWE description field.', 'disp': {'hint':", "}), ('url', ('inet:url', {}), { 'doc': 'URL where a specific", "('str', {}), { 'ro': True, 'doc': 'The string that the", "}), ('software', ('array', {'type': 'it:mitre:attack:software', 'uniq': True, 'sorted': True, 'split':", "'The time the mutex was created.', }), ('name', ('it:dev:mutex', {}),", "('file:path', {}), { 'doc': 'The path for the file.', }),", "{}), { 'ro': True, 'doc': 'The file that matched the", "await node.set(f'semver:{k}', v) except asyncio.CancelledError: # pragma: no cover raise", "YARA rule that matched the file.'}), ('proc', ('it:exec:proc', {}), {", "('it:cmd', ('str', {'strip': True}), { 'doc': 'A unique command-line string.',", "2.3 Formatted String', }), ), 'types': ( ('it:hostname', ('str', {'strip':", "edition : language : sw_edition : target_sw : target_hw :", "}), )), ('it:mitre:attack:technique', {}, ( ('name', ('str', {'strip': True}), {", "map. Bytes may optionally be present in the axon.', }),", "using the it:semver normalization before attempting to extract version parts", "included in this ATT&CK technique.', 'ex': 'cno.mitre.t0100', }), ('references', ('array',", "TODO map to an eventual risk:mitigation ('name', ('str', {'strip': True}),", "{ 'doc': 'The time the file was written to/modified.', }),", "'An array of ATT&CK technique IDs used by the group.',", "( ('it:host:activity', { 'props': ( ('exe', ('file:bytes', {}), { 'doc':", "group.', }), ('names', ('array', {'type': 'ou:name', 'uniq': True, 'sorted': True}),", "'doc': 'The NTLM password hash value.', }), ('passwd', ('inet:passwd', {}),", "}), )), ('it:prod:softlib', {}, ( ('soft', ('it:prod:softver', {}), {'ro': True,", "modl = { 'ctors': ( ('it:semver', 'synapse.models.infotech.SemVer', {}, { 'doc':", "group.', }), ('desc', ('str', {}), { 'doc': 'A brief description", "system of the host.' }), ('manu', ('str', {}), { 'doc':", "POSIX account's home directory.\", 'ex': '/home/visi', }), ('posix:shell', ('file:path', {}),", "URL retrieval.' }), ('client:ipv4', ('inet:ipv4', {}), { 'doc': 'The IPv4", "CWE description field.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url', {}),", "that represents an account on a host or network.' }),", "main process executing code that deleted the file.', }), ('host',", "used to annotate nodes included in this ATT&CK tactic.', 'ex':", "version of a software product.'}), ('it:prod:softfile', ('comp', {'fields': ( ('soft',", "{ 'doc': 'The named pipe string.', }), )), ('it:exec:url', {},", "'The guid matching the function.'}), ('file', ('file:bytes', {}), { 'ro':", "('it:exec:file:read', ('guid', {}), { 'doc': 'An instance of a host", "('host', ('it:host', {}), { 'doc': 'The host running the process", "'ro': True, 'doc': 'Host serving a url.', }), ('url', ('inet:url',", "'A Microsoft Windows Security Identifier.', 'ex': 'S-1-5-21-1220945662-1202665555-839525555-5555', }), ('it:os:ios:idfa', ('it:adid',", "library was loaded.', }), ('va', ('int', {}), { 'doc': 'The", "('file:path', {}), { 'doc': 'The path to the executable which", "{ 'doc': 'Notes concerning the function.'}), ('impcalls', ('array', {'type': 'it:reveng:impfunc'}),", "'The guid matching the function.'}), ('string', ('str', {}), { 'ro':", "network.', }), ('net4', ('inet:net4', {}), { 'doc': 'The optional contiguous", "host.', }), ('it:exec:thread', ('guid', {}), { 'doc': 'A thread executing", "activity started.'}), ), }), ), 'forms': ( ('it:hostname', {}, ()),", "node.get('vers') if not prop: return await node.set('vers:norm', prop) # Make", "parent directory of the file path (parsed from :path).', }),", "('time', {}), { 'doc': 'The time the registry was read.',", "('it:prod:softver', {}, ( ('software', ('it:prod:soft', {}), { 'doc': 'Software associated", "'The source IPv6 address of the flow that caused the", "('server:port', ('inet:port', {}), { 'doc': 'The bound (listening) TCP port.'", "information is present. ''' def postTypeInit(self): s_types.Int.postTypeInit(self) self.setNormFunc(str, self._normPyStr) self.setNormFunc(int,", "rule hit.', }), ('it:reveng:function', ('guid', {}), { 'doc': 'A function", "logon originated.', }), ('client:ipv6', ('inet:ipv6', {}), { 'doc': 'The IPv6", "triggered an alert on a specific antivirus signature.', }), ('it:av:prochit',", "extension of the file name (parsed from :path).', }), ('path:base',", "('time', {}), { 'doc': 'The file creation time.', }), ('mtime',", "address of the flow that caused the hit.'}), ('src:port', ('inet:port',", "13.' raise s_exc.BadTypeValu(valu=valu, mesg=mesg) subs = { 'part': parts[2], 'vendor':", "'Normalized version of the version string.', }), ('arch', ('it:dev:str', {}),", "('os', 'it:prod:softver'))}), { 'doc': 'The software version is known to", "Windows registry key.', }), ('str', ('it:dev:str', {}), { 'doc': 'The", "on a given host.', }), ('it:av:sig', ('comp', {'fields': (('soft', 'it:prod:soft'),", "to/modified.', }), ('path:dir', ('file:path', {}), { 'ro': True, 'doc': 'The", "the YARA rule.'}), ('version', ('it:semver', {}), { 'doc': 'The current", "'doc': 'URL available on the host.', }), )), ('it:dev:str', {},", "('file:path', {}), { 'ro': True, 'doc': 'The parent directory of", "('str', {}), { 'doc': 'A developer-selected string.' }), ('it:dev:pipe', ('str',", "account of the software author.', }), ('author:email', ('inet:email', {}), {", "mutex.', }), ('it:dev:int', ('int', {}), { 'doc': 'A developer selected", "'The registry key or value that was deleted.', }), )),", "hit.', }), ('it:reveng:function', ('guid', {}), { 'doc': 'A function inside", "recent version of the rule evaluated as a match.'}), )),", "{}), { 'doc': 'A file on a host.' }), ('it:exec:file:add',", "''' Brute force the version out of a string. Args:", "URL linking this CVE to a full description.', }), ('references',", "that deleted the file.', }), ('host', ('it:host', {}), { 'doc':", "{}), { 'doc': 'An android intent string.'}), ('it:os:android:reqperm', ('comp', {'fields':", "}), ('it:reveng:filefunc', ('comp', {'fields': (('file', 'file:bytes'), ('function', 'it:reveng:function'))}), { 'doc':", "path where the file was read.', }), ('path:dir', ('file:path', {}),", "'doc': 'The org that operates the given domain.', }), )),", "('path', ('file:path', {}), { 'doc': 'The path for the file.',", "('key', ('it:dev:regkey', {}), { 'doc': 'The Windows registry key.', }),", "('software', ('array', {'type': 'it:mitre:attack:software', 'uniq': True, 'sorted': True, 'split': ','}),", "'The specific file containing code that read the registry. May", "a semver.') if valu > s_version.mask60: raise s_exc.BadTypeValu(valu=valu, name=self.name, mesg='Cannot", "('target_hw', ('str', {'lower': True, 'strip': True}), { 'ro': True, 'doc':", "True}), { 'doc': 'A description of the ATT&CK mitigation.', 'disp':", "account.', }), ('groups', ('array', {'type': 'it:group'}), { 'doc': 'An array", "{}), { 'doc': 'Used to map an ATT&CK software to", "modified.', }), )), ('it:exec:reg:get', {}, ( ('proc', ('it:exec:proc', {}), {", "to the file. May or may not be the same", "of a host writing a file to a filesystem.', }),", "ItModule(s_module.CoreModule): async def initCoreModule(self): self.model.form('it:dev:str').onAdd(self._onFormItDevStr) self.model.form('it:dev:pipe').onAdd(self._onFormMakeDevStr) self.model.form('it:dev:mutex').onAdd(self._onFormMakeDevStr) self.model.form('it:dev:regkey').onAdd(self._onFormMakeDevStr) self.model.prop('it:prod:softver:arch').onSet(self._onPropSoftverArch) self.model.prop('it:prod:softver:vers').onSet(self._onPropSoftverVers)", "'ex': 'S-1-5-21-1220945662-1202665555-839525555-5555', }), ('it:os:ios:idfa', ('it:adid', {}), { 'doc': 'An iOS", "is a string.', }), ('int', ('it:dev:int', {}), { 'doc': 'The", "True, 'doc': 'The string that the function references.'}), )), ('it:reveng:impfunc',", "start time for the process.', }), ('exited', ('time', {}), {", "('org', ('ou:org', {}), { 'doc': 'The org that owns/operates the", "signature name.' }), ('desc', ('str', {}), { 'doc': 'A free-form", "('domain', ('it:domain', {}), { 'doc': 'The authentication domain where the", "('guid', {}), { 'doc': 'A named pipe created by a", "}), ('name', ('it:dev:mutex', {}), { 'doc': 'The mutex string.', }),", "the process exited.', }), ('exitcode', ('int', {}), { 'doc': 'The", "wrote to the registry. Typically the same host referenced in", "'ro': True, 'doc': 'The guid matching the function.'}), ('string', ('str',", "the group is registered.', }), ('groups', ('array', {'type': 'it:group'}), {", "('guid', {}), { 'doc': 'A GUID that represents a host", "its component parts. This normalizes a version string into an", "during the URL retrieval..' }), ('client:ipv6', ('inet:ipv6', {}), { 'doc':", "This first attempts to parse strings using the it:semver normalization", "'The host running the process that wrote to the registry.", "Try doing version part extraction by noming through the string", "the host.' }), ('manu', ('str', {}), { 'doc': 'The manufacturer", "containing code that wrote to the registry. May or may", "executable file which caused the activity.'}), ('proc', ('it:exec:proc', {}), {", "that created the named pipe. May or may not be", "description of the group.', }), ('host', ('it:host', {}), { 'doc':", "requested the URL. Typically the same host referenced in :proc,", "'doc': 'The host running the process that read the registry.", "}), ('manu', ('str', {}), { 'doc': 'The manufacturer of the", "a host binding a listening port.', }), ('it:fs:file', ('guid', {}),", "include this technique.', }), )), ('it:mitre:attack:software', {}, ( ('software', ('it:prod:soft',", "('file:path', {}), { 'doc': \"The path to the POSIX account's", "group ID.', 'ex': 'cno.mitre.g0100', }), ('references', ('array', {'type': 'inet:url', 'uniq':", "in to.', }), ('account', ('it:account', {}), { 'doc': 'The account", "the host.', }), )), ('it:dev:str', {}, ( ('norm', ('str', {'lower':", "authored the software.', }), ('url', ('inet:url', {}), { 'doc': 'URL", "'The anti-virus product which contains the signature.', }), )), ('it:av:prochit',", "this password hash.', }), )), ('it:cmd', {}, ()), ('it:exec:proc', {},", "{ 'doc': 'The path to the executable of the process.',", "domain.', }), ('desc', ('str', {}), { 'doc': 'A brief description", "ATT&CK technique.', }), ('parent', ('it:mitre:attack:technique', {}), { 'doc': 'The parent", "('it:sec:cpe', {}), { 'doc': 'The NIST CPE 2.3 string specifying", "ATT&CK mitigation.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url', {}), {", "{}), { 'doc': 'The file access time.', }), ('user', ('inet:user',", "available as strings if that information is present. ''' def", "running the process that created the named pipe. Typically the", "('it:exec:pipe', ('guid', {}), { 'doc': 'A named pipe created by", "{ 'doc': 'A file that triggered an alert on a", "CPE 2.3 string.'}), ('vendor', ('ou:name', {}), { 'ro': True, 'doc':", "version of the rule.'}), )), ('it:app:snort:hit', {}, ( ('rule', ('it:app:snort:rule',", "'doc': 'The NIST CPE 2.3 string specifying this software version',", "the it:dev:str.', }), )), ('it:sec:cve', {}, ( ('desc', ('str', {}),", "'doc': 'The manufacturer of the host.', }), ('model', ('str', {}),", "that the AV engine detected the signature.' }), )), ('it:auth:passwdhash',", "the memory was mapped.', }), ('va', ('int', {}), { 'doc':", "2.3 string.'}), ('edition', ('str', {'lower': True, 'strip': True}), { 'ro':", ":exe specified in :proc, if present.'}), ('time', ('time', {}), {", "{}), { 'doc': 'The path that the library was loaded", "pipe string.', }), )), ('it:exec:url', {}, ( ('proc', ('it:exec:proc', {}),", "written to/modified.', }), ('path', ('file:path', {}), { 'doc': 'The path", "('it:prod:soft', {}), { 'doc': 'Software associated with this version instance.',", "integer.', }), ('bytes', ('file:bytes', {}), { 'doc': 'The file representing", "'split': ','}), { 'doc': 'An array of ChildOf CWE Relationships.'", "was released.', }), ('semver', ('it:semver', {}), { 'doc': 'System normalized", "from.', }), ('file', ('file:bytes', {}), { 'doc': 'The library file", "thread executing in a process.', }), ('it:exec:loadlib', ('guid', {}), {", "where the library was loaded.', }), ('va', ('int', {}), {", "}), ('server:ipv4', ('inet:ipv4', {}), { 'doc': 'The IPv4 address specified", "the function references.'}), )), ('it:reveng:impfunc', {}, ()), ), } name", "('soft', 'it:prod:softver'), ('file', 'file:bytes'))}), { 'doc': 'A file is distributed", "value.', }), ('hash:sha1', ('hash:sha1', {}), { 'doc': 'The SHA1 password", "may contain the current value for the technique.', }), ('desc',", "names for this software.', }), ('desc', ('str', {}), { 'doc':", "used by the group.', }), ('software', ('array', {'type': 'it:mitre:attack:software', 'uniq':", "'An array of URLs that document the ATT&CK group.', }),", "{ 'doc': 'The time the file was read.', }), ('path',", "}), ('ipv4', ('inet:ipv4', {}), { 'doc': 'The last known ipv4", "software.', }), ('tag', ('syn:tag', {}), { 'doc': 'The synapse tag", "'doc': 'The path to the executable of the process.', }),", "operator of the host.', }), ('org', ('ou:org', {}), { 'doc':", "('file:bytes', {}), {'ro': True, 'doc': 'The file distributed by the", "raise s_exc.BadTypeValu(valu=valu, name=self.name, mesg='No text left after stripping whitespace') subs", "file on a host.' }), ('it:exec:file:add', ('guid', {}), { 'doc':", "as s_module import synapse.lib.version as s_version logger = logging.getLogger(__name__) class", "2.3 string.'}), ('version', ('str', {'lower': True, 'strip': True}), { 'ro':", "thread which caused the activity.'}), ('host', ('it:host', {}), { 'doc':", "rule.'}), ('enabled', ('bool', {}), { 'doc': 'The rule enabled status", "mmap is mapped with execute permissions.', }), ('created', ('time', {}),", "('int', {}), { 'doc': 'The primary group ID of the", "}), ('semver', ('it:semver', {}), { 'doc': 'System normalized semantic version", "software was released.', }), ('semver', ('it:semver', {}), { 'doc': 'System", "or may not be the same :exe specified in :proc,", "path (parsed from :path).', }), ('path:ext', ('str', {'lower': True, 'strip':", "file containing code that wrote to the registry. May or", "{ 'doc': 'The base memory address where the map was", "memory address where the library was loaded in the process.',", "'The main process executing code that wrote to / modified", "which started the process.', }), ('src:proc', ('it:exec:proc', {}), { 'doc':", "('guid', {}), { 'doc': 'A function inside an executable.', }),", "map was deleted.', }), ('path', ('file:path', {}), { 'doc': 'The", "(('function', 'it:reveng:function'), ('string', 'str'))}), { 'deprecated': True, 'doc': 'A reference", "that was loaded.', }), )), ('it:exec:mmap', {}, ( ('proc', ('it:exec:proc',", "a max width of 20 bits. The comparable integer value", "('file', ('file:bytes', {}), { 'doc': 'The file that was read.',", "}), ('it:exec:file:add', ('guid', {}), { 'doc': 'An instance of a", "that document the ATT&CK group.', }), ('techniques', ('array', {'type': 'it:mitre:attack:technique',", "'doc': 'The path where the file was read.', }), ('path:dir',", "'ro': True, 'doc': 'The \"other\" field from the CPE 2.3", "value.', }), ('hash:sha256', ('hash:sha256', {}), { 'doc': 'The SHA256 password", "('it:prod:soft', {}), { 'ro': True, 'doc': 'The anti-virus product which", "the function.'}), ('description', ('str', {}), { 'doc': 'Notes concerning the", "instance of a password hash.', }), ('it:exec:proc', ('guid', {}), {", "('array', {'type': 'it:mitre:attack:software', 'uniq': True, 'sorted': True, 'split': ','}), {", "{ 'doc': 'A signature name within the namespace of an", "}), ('reg', ('it:dev:regval', {}), { 'doc': 'The registry key or", "'doc': 'The YARA rule text.', 'disp': {'hint': 'text'}, }), ('name',", "'ex': 'cno.mitre.m0100', }), ('references', ('array', {'type': 'inet:url', 'uniq': True}), {", "( ('proc', ('it:exec:proc', {}), { 'doc': 'The file that triggered", "available and set it if possible prop = node.get('software') if", "logged in to.', }), ('account', ('it:account', {}), { 'doc': 'The", "'ex': 'cno.mitre.g0100', }), ('references', ('array', {'type': 'inet:url', 'uniq': True}), {", "software.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url', {}), { 'doc':", "('time', {}), { 'doc': 'The time the file was deleted.',", "an android intent.'}), ('it:os:android:ibroadcast', ('comp', {'fields': ( ('app', 'it:prod:soft'), ('intent',", "geo-political location string for the node.', }), ('os', ('it:prod:softver', {}),", "contains the library.'}), ('lib', ('it:prod:softver', {}), {'ro': True, 'doc': 'The", "product.'}), ('it:prod:softfile', ('comp', {'fields': ( ('soft', 'it:prod:softver'), ('file', 'file:bytes'))}), {", "valu > s_version.mask60: raise s_exc.BadTypeValu(valu=valu, name=self.name, mesg='Cannot norm a integer", "{}), { 'doc': 'A library load event in a process.',", "('str', {}), { 'doc': 'The product model of the host.',", "}), ('it:mitre:attack:status', ('str', {'enums': 'current,deprecated,withdrawn'}), { 'doc': 'A Mitre ATT&CK", "time of the network flow that caused the hit.'}), ('sensor',", "string has {len(parts)} parts, expected 13.' raise s_exc.BadTypeValu(valu=valu, mesg=mesg) subs", "'doc': 'The URL that documents the ATT&CK software.', }), ('tag',", "'text'}, }), ('url', ('inet:url', {}), { 'doc': 'A reference URL", "\"sw_edition\" field from the CPE 2.3 string.'}), ('target_sw', ('str', {'lower':", "'ex': '1001', }), ('posix:gecos', ('int', {}), { 'doc': 'The GECOS", "{ 'doc': 'A string representing a named pipe.', }), ('it:dev:mutex',", "'disp': {'hint': 'text'}, }), ('url', ('inet:url', {}), { 'doc': 'The", "('it:prod:softver', {}), {'ro': True, 'doc': 'The android app which requests", "POSIX account.', }), ('posix:home', ('file:path', {}), { 'doc': \"The path", "{}), { 'doc': 'Version major number.', }), ('semver:minor', ('int', {}),", "hash value.', }), ('hash:sha1', ('hash:sha1', {}), { 'doc': 'The SHA1", "('guid', {}), { 'doc': 'A process executing on a host.", "('net6', ('inet:net6', {}), { 'doc': 'The optional contiguous IPv6 address", "'Software on the host.'}) )), ('it:av:sig', {}, ( ('soft', ('it:prod:soft',", "('str', {}), { 'doc': 'The manufacturer of the host.', }),", "(20, 'info'), (30, 'notice'), (40, 'warning'), (50, 'err'), (60, 'crit'),", "was read.', }), ('path:dir', ('file:path', {}), { 'ro': True, 'doc':", "('int', {}), { 'doc': 'The function rank score used to", "patch) return valu loglevels = ( (10, 'debug'), (20, 'info'),", "containing code that read the file. May or may not", "home directory.\", 'ex': '/home/visi', }), ('posix:shell', ('file:path', {}), { 'doc':", "android intent which is listened for by the app.'}), )),", "the function.'}), ('va', ('int', {}), { 'doc': 'The virtual address", "CWE description field.', 'ex': 'Buffer Copy without Checking Size of", "{}), { 'doc': 'The main process executing code that read", "code or return value for the thread.', }), ('src:proc', ('it:exec:proc',", "('it:host:activity', { 'props': ( ('exe', ('file:bytes', {}), { 'doc': 'The", "mesg='Cannot norm a negative integer as a semver.') if valu", "process at runtime.', }), ('it:exec:pipe', ('guid', {}), { 'doc': 'A", "field for the POSIX account.', }), ('posix:home', ('file:path', {}), {", "name for the ATT&CK group.', }), ('names', ('array', {'type': 'ou:name',", "created the thread.', }), ('src:thread', ('it:exec:thread', {}), { 'doc': 'The", "that requested the URL. May or may not be the", "{}), { 'doc': 'A developer selected integer constant.', }), ('it:dev:regkey',", "the account.', 'ex': '1001', }), ('posix:gid', ('int', {}), { 'doc':", "the CPE 2.3 string.'}), ('sw_edition', ('str', {'lower': True, 'strip': True}),", "information will be parsed out and available as strings if", "was created.', }), ('exited', ('time', {}), { 'doc': 'The time", "'doc': 'The destination address of the trigger.'}), ('dst:ipv4', ('inet:ipv4', {}),", "'doc': 'A unique command-line string.', 'ex': 'foo.exe --dostuff bar', }),", "('deleted', ('time', {}), { 'doc': 'The time the memory map", "{}), { 'doc': 'A string representing a named pipe.', }),", "}), ('desc', ('str', {'strip': True}), { 'doc': 'A description of", "of a host reading a file from a filesystem.', }),", "hex encoded salt value used to calculate the password hash.',", "{ 'doc': 'Contact info for the author of the YARA", "the rule.'}), )), ('it:app:snort:hit', {}, ( ('rule', ('it:app:snort:rule', {}), {", "{}), { 'doc': 'The process which contains the thread.', }),", "unloaded.', }), ('path', ('file:path', {}), { 'doc': 'The path that", "the host.', }), ('domain', ('it:domain', {}), { 'doc': 'The authentication", "the listening port.', }), ('host', ('it:host', {}), { 'doc': 'The", "{ 'ro': True, 'doc': 'The signature name.', }), ('sig:soft', ('it:prod:soft',", "was read.', }), ('reg', ('it:dev:regval', {}), { 'doc': 'The registry", "URL for information about the signature.', }) )), ('it:av:filehit', {},", "prop: opts = {'vars': {'soft': prop}} nodes = await node.snap.nodes('it:prod:soft=$soft',", "{'type': 'it:dev:str', 'uniq': True, 'sorted': True}), { 'doc': 'Observed/variant names", "is known to be compatible with the given os software", "requested.', }), ('client', ('inet:client', {}), { 'doc': 'The address of", "( ('name', ('it:hostname', {}), { 'doc': 'The name of the", "permission.'}), ('perm', ('it:os:android:perm', {}), {'ro': True, 'doc': 'The android permission", "subs.get('minor'), subs.get('patch')) return valu, {'subs': subs} def _normPyInt(self, valu): if", "{}), { 'doc': 'Semver build string.', }), ('url', ('inet:url', {}),", "'ro': True, 'doc': 'The file extension of the file name", "parts def _normPyStr(self, valu): if not valu.startswith('cpe:2.3:'): mesg = 'CPE", "{ 'doc': \"The path to the POSIX account's default shell.\",", "written to.', }), )), ('it:exec:reg:del', {}, ( ('proc', ('it:exec:proc', {}),", "\"product\" field from the CPE 2.3 string.'}), ('version', ('str', {'lower':", "('it:adid', {}), { 'doc': 'An iOS advertising identification string.'}), ('it:os:android:aaid',", "'doc': 'The software version is known to be compatible with", "{}), { 'doc': 'The group owner of the file.', }),", "of the network.', }), ('desc', ('str', {}), { 'doc': 'A", "range of this network.', }), )), ('it:account', {}, ( ('user',", "('guid', {}), { 'doc': 'A snort rule unique identifier.', }),", "'doc': 'The \"version\" field from the CPE 2.3 string.'}), ('update',", "(('host', 'it:host'), ('softver', 'it:prod:softver'))}), { 'doc': 'A version of a", "('it:semver', {}), { 'doc': 'The current version of the rule.'}),", "function from an imported library.', }), ), 'interfaces': ( ('it:host:activity',", "('posix:shell', ('file:path', {}), { 'doc': \"The path to the POSIX", "}), ('cves', ('array', {'type': 'it:sec:cve', 'uniq': True, 'sorted': True}), {", "('isos', ('bool', {}), { 'doc': 'Set to True if the", "considered the \"main\" executable for DLLs loaded by that program.',", "the signature.', }), )), ('it:av:prochit', {}, ( ('proc', ('it:exec:proc', {}),", "}), ('posix:gecos', ('int', {}), { 'doc': 'The GECOS field for", "used to evaluate if it exhibits interesting behavior.'}), ('complexity', ('int',", "('killedby', ('it:exec:proc', {}), { 'doc': 'The process which killed this", "{ 'doc': 'An array of URLs that document the CVE", "True, 'doc': 'The \"product\" field from the CPE 2.3 string.'}),", "'doc': 'The SHA512 password hash value.', }), ('hash:lm', ('hash:lm', {}),", "{}), { 'doc': 'A process executing on a host. May", "{ 'doc': 'A reference URL for information about the signature.',", "}), ('src:thread', ('it:exec:thread', {}), { 'doc': 'The thread which created", "('exitcode', ('int', {}), { 'doc': 'The exit code or return", "a host.' }), ('it:exec:file:add', ('guid', {}), { 'doc': 'An instance", "occured.', }), ('success', ('bool', {}), { 'doc': 'Set to false", "('str', {'lower': 1}), { 'doc': 'A function from an imported", "{'regex': r'^S[0-9]{4}$'}), { 'doc': 'A Mitre ATT&CK Software ID.', 'ex':", "s_version.mask60: raise s_exc.BadTypeValu(valu=valu, name=self.name, mesg='Cannot norm a integer larger than", "len(parts) != 13: mesg = f'CPE 2.3 string has {len(parts)}", "be used for YARA evaluation engines.'}), )), ('it:app:yara:match', {}, (", "{}), { 'doc': 'The host that the account logged in", "('array', {'type': 'it:reveng:filefunc'}), { 'doc': 'Other function calls within the", "code that deleted data from the registry. May or may", "('guid', {}), { 'doc': 'An instance of a host getting", "function.'}), ('description', ('str', {}), { 'doc': 'Notes concerning the function.'}),", "{}), { 'doc': 'A SHA256 hash of the memory map.", "), 'forms': ( ('it:hostname', {}, ()), ('it:host', {}, ( ('name',", "True, 'onespace': True}), { 'doc': 'The name of the network.',", "'A brief description of the network.', }), ('org', ('ou:org', {}),", "host running the process that read the registry. Typically the", "True, 'split': ','}), { 'doc': 'An array of ATT&CK technique", "'A Mitre ATT&CK Tactic ID.', 'ex': 'TA0040', }), ('it:mitre:attack:technique', ('str',", "}), ('domain', ('it:domain', {}), { 'doc': 'The authentication domain that", "engine name.' }), ('it:av:filehit', ('comp', {'fields': (('file', 'file:bytes'), ('sig', 'it:av:sig'))}),", "True, 'doc': 'The software version that contains the library.'}), ('lib',", "('app', 'it:prod:soft'), ('intent', 'it:os:android:intent') )}), { 'doc': 'The given software", "LM password hash value.', }), ('hash:ntlm', ('hash:ntlm', {}), { 'doc':", "read the file.', }), ('host', ('it:host', {}), { 'doc': 'The", "('it:exec:proc', {}), { 'doc': 'An external process which created the", "software which distributes the file.'}), ('file', ('file:bytes', {}), {'ro': True,", "port of the flow that caused the hit.'}), ('dst', ('inet:addr',", "is distributed by a specific software version.'}), ('it:prod:softlib', ('comp', {'fields':", "{ 'doc': 'A mutex created by a process at runtime.',", "or setting a registry key.', }), ('it:exec:reg:del', ('guid', {}), {", "string inside a function.', }), ('it:reveng:impfunc', ('str', {'lower': 1}), {", "code that deleted the file. May or may not be", "for the android intent.'}), ('intent', ('it:os:android:intent', {}), {'ro': True, 'doc':", "host requesting a URL.', }), ('it:exec:bind', ('guid', {}), { 'doc':", "('it:exec:mmap', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The process", "hit.'}), ('src:ipv4', ('inet:ipv4', {}), { 'doc': 'The source IPv4 address", "c == ':': parts.append(part) part = '' continue part +=", "{'ro': True, 'doc': 'The software version that contains the library.'}),", "the process that deleted data from the registry. Typically the", "'/home/visi', }), ('posix:shell', ('file:path', {}), { 'doc': \"The path to", "await node.snap.nodes('it:prod:soft=$soft', opts=opts) if nodes: name = nodes[0].get('name') if name:", "}), ('src:exe', ('file:path', {}), { 'doc': 'The path to the", "('soft', 'it:prod:softver'), ('lib', 'it:prod:softver'))}), { 'doc': 'A software version contains", "{}), { 'doc': 'A GUID that represents an account on", "{ 'doc': 'The destination address of the trigger.'}), ('dst:ipv4', ('inet:ipv4',", "process that read the file. Typically the same host referenced", "'The name of the function.'}), ('description', ('str', {}), { 'doc':", ")), ('it:exec:reg:del', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The", "group.', }), ('host', ('it:host', {}), { 'doc': 'The host where", "number.', }), ('semver:patch', ('int', {}), { 'doc': 'Version patch number.',", "{}), { 'doc': 'The path where the file was deleted.',", "parts[10], 'target_hw': parts[11], 'other': parts[12], } return ':'.join(parts), {'subs': subs}", "( ('file', ('file:bytes', {}), { 'ro': True, 'doc': 'The file", "'The URL that documents the ATT&CK mitigation.', }), ('tag', ('syn:tag',", "the URL retrieval..' }), ('client:port', ('inet:port', {}), { 'doc': 'The", "('place', ('geo:place', {}), { 'doc': 'The place where the host", "the registry key, if the value is binary data.', }),", "network.' }), ('it:logon', ('guid', {}), { 'doc': 'A GUID that", "information for. Notes: This first attempts to parse strings using", "('it:hostsoft', {}, ( ('host', ('it:host', {}), {'ro': True, 'doc': 'Host", "{ 'doc': 'NIST NVD Common Weaknesses Enumeration Specification', 'ex': 'CWE-120',", "with read permissions.', }), ('perms:write', ('bool', {}), { 'doc': 'True", "True, 'doc': 'The app software which listens for the android", "('soft', ('it:prod:soft', {}), { 'ro': True, 'doc': 'The anti-virus product", "True}), { 'ro': True, 'doc': 'The file extension of the", "mutex was created.', }), ('name', ('it:dev:mutex', {}), { 'doc': 'The", "'doc': 'The default installation path of the file.'}), )), ('it:hostsoft',", "host.' }), ('place', ('geo:place', {}), { 'doc': 'The place where", "android permission string.'}), ('it:os:android:intent', ('str', {}), { 'doc': 'An android", "document the CVE ID.', }), )), ('it:sec:cpe', {}, ( ('part',", "value used to calculate the password hash.', }), ('hash:md5', ('hash:md5',", "user name of the process owner.', }), ('path', ('file:path', {}),", "= ( (10, 'debug'), (20, 'info'), (30, 'notice'), (40, 'warning'),", "logger = logging.getLogger(__name__) class Cpe23Str(s_types.Str): ''' CPE 2.3 Formatted String", "('proc', ('it:exec:proc', {}), { 'doc': 'The host process which caused", "('it:prod:softver', {}), {'ro': True, 'doc': 'The app software which broadcasts", "the scope of the function.', }), )), ('it:reveng:funcstr', {}, (", "{}, ( ('desc', ('str', {}), { 'doc': 'A free-form description", "s_exc.BadTypeValu(valu=valu, name=self.name, mesg='No text left after stripping whitespace') subs =", "on a host or network.' }), ('it:group', ('guid', {}), {", "'A YARA rule match to a file.', }), ('it:app:yara:procmatch', ('guid',", "of a YARA rule match to a process.', }), ('it:app:snort:rule',", "{ 'doc': 'The username associated with the account', }), ('contact',", "library.', }), ), 'interfaces': ( ('it:host:activity', { 'props': ( ('exe',", "a process at runtime.', }), ('it:exec:pipe', ('guid', {}), { 'doc':", "the activity started.'}), ), }), ), 'forms': ( ('it:hostname', {},", "('syn:tag', {}), { 'doc': 'The synapse tag used to annotate", "{ 'doc': 'The SHA1 password hash value.', }), ('hash:sha256', ('hash:sha256',", "('str', {}), { 'doc': 'The log messsage text.', }), ('severity',", "software version', }), ('cves', ('array', {'type': 'it:sec:cve', 'uniq': True, 'sorted':", "('it:adid', {}), { 'doc': 'An android advertising identification string.'}), ('it:os:android:perm',", "from a filesystem.', }), ('it:exec:file:read', ('guid', {}), { 'doc': 'An", "the thread.', }), ('created', ('time', {}), { 'doc': 'The time", "salt value used to calculate the password hash.', }), ('hash:md5',", "client during the URL retrieval.' }), ('client:ipv4', ('inet:ipv4', {}), {", "{ 'doc': 'The org that operates the given domain.', }),", "opts=opts) if nodes: name = nodes[0].get('name') if name: await node.set('software:name',", "deleted data from the registry. Typically the same host referenced", "'ro': True, 'doc': 'The guid matching the function.'}), ('file', ('file:bytes',", "ATT&CK software.', }), ('tag', ('syn:tag', {}), { 'doc': 'The synapse", "force version string [%s]', prop) def getModelDefs(self): modl = {", "def postTypeInit(self): s_types.Int.postTypeInit(self) self.setNormFunc(str, self._normPyStr) self.setNormFunc(int, self._normPyInt) def _normPyStr(self, valu):", "time the named pipe was created.', }), ('name', ('it:dev:pipe', {}),", ":path).', }), ('path:base', ('file:base', {}), { 'ro': True, 'doc': 'The", "an alert on a specific antivirus signature.', }), ('it:av:prochit', ('guid',", "the library was loaded.', }), ('va', ('int', {}), { 'doc':", "string.'}), ('target_hw', ('str', {'lower': True, 'strip': True}), { 'ro': True,", "created the mutex. May or may not be the same", "process that wrote to the registry. Typically the same host", "{}), { 'doc': 'The process ID.', }), ('time', ('time', {}),", "with write permissions.', }), ('perms:execute', ('bool', {}), { 'doc': 'True", "{}), { 'doc': 'The time that the AV engine detected", "a file from a filesystem.', }), ('it:exec:file:write', ('guid', {}), {", "('guid', {}), { 'doc': 'A memory mapped segment located in", "'onespace': True}), { 'doc': 'The name of the group.', }),", "self.setNormFunc(str, self._normPyStr) self.setNormFunc(int, self._normPyInt) def _normPyStr(self, valu): valu = valu.strip()", "this ATT&CK mitigation.', 'ex': 'cno.mitre.m0100', }), ('references', ('array', {'type': 'inet:url',", "version number.', }), ('semver:major', ('int', {}), { 'doc': 'Version major", "'The time the data from the registry was deleted.', }),", "executing code that created the mutex.', }), ('host', ('it:host', {}),", "app.'}), )), ('it:os:android:ibroadcast', {}, ( ('app', ('it:prod:softver', {}), {'ro': True,", "the ATT&CK mitigation.', }), ('desc', ('str', {'strip': True}), { 'doc':", "}), ('sig', ('it:av:sig', {}), { 'doc': 'The signature that the", "Typically the same host referenced in :proc, if present.', }),", "is a library.'}), )), ('it:adid', {}, ()), ('it:os:ios:idfa', {}, ()),", "where the library was loaded in the process.', }), ('loaded',", "'doc': 'Version minor number.', }), ('semver:patch', ('int', {}), { 'doc':", "is binary data.', }), )), ('it:prod:soft', {}, ( ('name', ('str',", "designated by a Common Vulnerabilities and Exposures (CVE) number.', 'ex':", "{}), { 'doc': 'The current version of the rule.'}), )),", "'it:mitre:attack:software', 'uniq': True, 'sorted': True, 'split': ','}), { 'doc': 'An", "('it:host', {}), {'ro': True, 'doc': 'Host with the software.'}), ('softver',", "('it:exec:proc', {}, ( ('host', ('it:host', {}), { 'doc': 'The host", "{}), { 'doc': 'The last known location for the host.'", "that operates the given host.', }), )), ('it:log:event', {}, (", "{ 'doc': 'The NTLM password hash value.', }), ('passwd', ('inet:passwd',", "originated.', }), )), ('it:hosturl', {}, ( ('host', ('it:host', {}), {", "'Version minor number.', }), ('semver:patch', ('int', {}), { 'doc': 'Version", "oldv): # Check to see if name is available and", "'it:prod:softver'), ('file', 'file:bytes'))}), { 'doc': 'A file is distributed by", "nodes included in this ATT&CK software.', 'ex': 'cno.mitre.s0100', }), ('references',", "= node.get('software') if prop: opts = {'vars': {'soft': prop}} nodes", "the mutex. May or may not be the same :exe", "string. Args: valu (str): String to attempt to get version", "{'fields': (('function', 'it:reveng:function'), ('string', 'str'))}), { 'deprecated': True, 'doc': 'A", "memory was mapped.', }), ('va', ('int', {}), { 'doc': 'The", "and set it if possible prop = node.get('software') if prop:", "{'lower': True, 'strip': True}), { 'ro': True, 'doc': 'The \"version\"", "'The file path if the mmap is a mapped view", "}), )), ('it:dev:str', {}, ( ('norm', ('str', {'lower': True}), {", "'Observed/variant names for this software.', }), ('desc', ('str', {}), {", "function.'}), ('file', ('file:bytes', {}), { 'ro': True, 'doc': 'The file", "signature.' }), ('it:auth:passwdhash', ('guid', {}), { 'doc': 'An instance of", "reading a file from a filesystem.', }), ('it:exec:file:write', ('guid', {}),", "{}, { 'doc': 'A NIST CPE 2.3 Formatted String', }),", "('time', {}), { 'doc': 'The time the logon session ended.',", "field from the CPE 2.3 string.'}), ('version', ('str', {'lower': True,", "('dst:ipv4', ('inet:ipv4', {}), { 'doc': 'The destination IPv4 address of", "('str', {}), { 'doc': 'A description of the software.', 'disp':", "'doc': 'The \"vendor\" field from the CPE 2.3 string.'}), ('product',", "of the first codeblock of the function.'}), ('rank', ('int', {}),", "s_types.Int.postTypeInit(self) self.setNormFunc(str, self._normPyStr) self.setNormFunc(int, self._normPyInt) def _normPyStr(self, valu): valu =", "AV engine detected the signature.' }), )), ('it:auth:passwdhash', {}, (", "'doc': 'The host thread which caused the activity.'}), ('host', ('it:host',", "by a specific software version.'}), ('it:prod:softlib', ('comp', {'fields': ( ('soft',", "language : sw_edition : target_sw : target_hw : other *", "hash value.', }), ('hash:lm', ('hash:lm', {}), { 'doc': 'The LM", "that read the file. May or may not be the", "'The android intent which is broadcast by the app.'}), )),", "a semantic version string into its component parts. This normalizes", "the new file. May or may not be the same", "('comp', {'fields': (('host', 'it:host'), ('url', 'inet:url'))}), { 'doc': 'A url", "{ 'doc': 'The library file that was loaded.', }), )),", "('name', ('it:hostname', {}), { 'doc': 'The name of the host", "raise s_exc.BadTypeValu(valu=valu, mesg=mesg) subs = { 'part': parts[2], 'vendor': parts[3],", "represents an individual logon/logoff event.' }), ('it:hosturl', ('comp', {'fields': (('host',", "'doc': 'Version major number.', }), ('semver:minor', ('int', {}), { 'doc':", "('str', {'strip': True}), { 'doc': 'A unique command-line string.', 'ex':", "('time', {}), { 'doc': 'The time of the network flow", "('it:prod:soft', ('guid', {}), { 'doc': 'A arbitrary, unversioned software product.',", "group to a synapse ou:org.', }), ('name', ('ou:name', {}), {", "'doc': 'The specific file containing code that deleted the file.", "'An array of alternate names for the ATT&CK group.', }),", "on.'}), )), ('it:os:android:ilisten', {}, ( ('app', ('it:prod:softver', {}), {'ro': True,", "domain.', }), ('org', ('ou:org', {}), { 'doc': 'The org that", "bound (listening) TCP port.' }), )), ('it:fs:file', {}, ( ('host',", "version is known to be compatible with the given os", "( ('app', 'it:prod:soft'), ('intent', 'it:os:android:intent') )}), { 'doc': 'The given", "}), ('it:av:sig', ('comp', {'fields': (('soft', 'it:prod:soft'), ('name', ('str', {'lower': True})))}),", "True, 'strip': True}), { 'ro': True, 'doc': 'The \"part\" field", "asyncio.CancelledError: # pragma: no cover raise except Exception: logger.exception('Failed to", "{}), { 'doc': 'The SHA1 password hash value.', }), ('hash:sha256',", "{}, ( ('host', ('it:host', {}), { 'doc': 'The host that", "android permission requested by the app.'}), )), ('it:prod:softos', {}, (", "process to the rule.'}), ('version', ('it:semver', {}), { 'doc': 'The", "{}, ( ('soft', ('it:prod:softver', {}), {'ro': True, 'doc': 'The software", "which caused the activity.'}), ('host', ('it:host', {}), { 'doc': 'The", "'doc': 'Semver build string.', }), ('url', ('inet:url', {}), { 'doc':", "('inet:ipv4', {}), { 'doc': 'The source IPv4 address of the", "','}), { 'doc': 'An array of ATT&CK software IDs used", "patch) subs = {'major': major, 'minor': minor, 'patch': patch} return", "try: valu, info = self.core.model.type('it:semver').norm(valu) subs = info.get('subs') return valu,", "{ 'doc': 'A logical boundary of authentication and configuration such", "in :proc, if present.', }), ('exe', ('file:bytes', {}), { 'doc':", "('it:exec:proc', {}), { 'doc': 'The process which contains the thread.',", "return valu, {'subs': subs} def _normPyInt(self, valu): if valu <", "info = s_types.Str._normPyStr(self, valu) parts = self._splitCpe23(text) if len(parts) !=", "{'lower': True, 'strip': True}), { 'ro': True, 'doc': 'The \"product\"", "executable for DLLs loaded by that program.', }), ('cmd', ('it:cmd',", "the group.', }), ('software', ('array', {'type': 'it:mitre:attack:software', 'uniq': True, 'sorted':", "password hash value.', }), ('hash:sha1', ('hash:sha1', {}), { 'doc': 'The", "Windows Security Identifier of the account.', }), ('groups', ('array', {'type':", "'cno.mitre.s0100', }), ('references', ('array', {'type': 'inet:url', 'uniq': True}), { 'doc':", "occurred.'}), ('time', ('time', {}), { 'doc': 'The time that the", "android intent which is broadcast by the app.'}), )), ('it:prod:softver',", "that matched the YARA rule.'}), ('version', ('it:semver', {}), { 'doc':", "'The user name of the process owner.', }), ('path', ('file:path',", "was written to/modified.', }), ('path:dir', ('file:path', {}), { 'ro': True,", "{ 'doc': 'The name of a host or system.', }),", "(('soft', 'it:prod:soft'), ('name', ('str', {'lower': True})))}), { 'doc': 'A signature", "version.'}), )), ('it:prod:softfile', {}, ( ('soft', ('it:prod:softver', {}), {'ro': True,", "('semver:major', ('int', {}), { 'doc': 'Version major number.', }), ('semver:minor',", "{ 'doc': 'An advertising identification string.'}), ('it:os:windows:sid', ('str', {'regex': r'^S-1-[0-59]-\\d{2}-\\d{8,10}-\\d{8,10}-\\d{8,10}-[1-9]\\d{3}$'}),", "used for YARA evaluation engines.'}), )), ('it:app:yara:match', {}, ( ('rule',", "('guid', {}), { 'doc': 'A logical boundary of authentication and", "{}), {'ro': True, 'doc': 'The library software version.'}), )), ('it:prod:softfile',", "{}), {'ro': True, 'doc': 'The software version that contains the", "{}), { 'doc': 'Used to map an ATT&CK group to", "the registry was deleted.', }), ('reg', ('it:dev:regval', {}), { 'doc':", "('it:exec:reg:del', ('guid', {}), { 'doc': 'An instance of a host", "True, 'doc': 'The \"sw_edition\" field from the CPE 2.3 string.'}),", "hosted on or served by a host or system.', }),", ")), ('it:reveng:impfunc', {}, ()), ), } name = 'it' return", ")), ('it:sec:cwe', {}, ( ('name', ('str', {}), { 'doc': 'The", "1152921504606846975 as a semver.') major, minor, patch = s_version.unpackVersion(valu) valu", "Buffer Overflow)', }), ('desc', ('str', {}), { 'doc': 'The CWE", "minor number.', }), ('semver:patch', ('int', {}), { 'doc': 'Version patch", "( ('rule', ('it:app:yara:rule', {}), { 'ro': True, 'doc': 'The YARA", "YARA rule match to a file.', }), ('it:app:yara:procmatch', ('guid', {}),", "True, 'strip': True}), { 'doc': 'Name of the software.', }),", "('it:dev:mutex', {}, ()), ('it:dev:regkey', {}, ()), ('it:dev:regval', {}, ( ('key',", "'Set to True if the software is a library.'}), )),", "('severity', ('int', {'enums': loglevels}), { 'doc': 'A log level integer", "included in this ATT&CK group ID.', 'ex': 'cno.mitre.g0100', }), ('references',", "}), ('names', ('array', {'type': 'str', 'uniq': True, 'sorted': True}), {", "True, 'doc': 'A reference to a string inside a function.',", "GUID that represents a host or system.' }), ('it:log:event', ('guid',", "('it:host', {}), { 'doc': 'The host on which the activity", "memory map. Bytes may optionally be present in the axon.',", "parsed out and available as strings if that information is", "a process.', }), ('it:exec:loadlib', ('guid', {}), { 'doc': 'A library", "into its component parts. This normalizes a version string into", "'doc': 'The operating system of the host.' }), ('manu', ('str',", "{ 'doc': 'The main process executing code that read the", "code that deleted data from the registry.', }), ('host', ('it:host',", "the client during the URL retrieval..' }), ('client:port', ('inet:port', {}),", "domain where the account is registered.', }), ('posix:uid', ('int', {}),", "import synapse.lib.types as s_types import synapse.lib.module as s_module import synapse.lib.version", "product model of the host.', }), ('serial', ('str', {}), {", "}), ('path:dir', ('file:path', {}), { 'ro': True, 'doc': 'The parent", "from version str await node.snap.addNode('it:dev:str', prop) # form the semver", "process. May be an actual or a virtual / notional", "retrieval..' }), ('client:port', ('inet:port', {}), { 'doc': 'The client port", "{}), { 'doc': 'The SHA512 password hash value.', }), ('hash:lm',", "read the file. Typically the same host referenced in :proc,", "a version string into an integer to allow version ordering.", "{ 'doc': 'The given software requests the android permission.'}), ('it:os:android:ilisten',", "}), ('user', ('inet:user', {}), { 'doc': 'The user name of", "a virtual / notional host.', }), ('exe', ('file:bytes', {}), {", "wrote to the file. Typically the same host referenced in", "'doc': 'A brief description of the network.', }), ('org', ('ou:org',", "parts try: valu, subs = self.bruteVersionStr(prop) await node.set('semver', valu) for", "'doc': 'The CWE description field.', 'disp': {'hint': 'text'}, }), ('url',", "subs except s_exc.BadTypeValu: # Try doing version part extraction by", "was loaded.', }), )), ('it:exec:mmap', {}, ( ('proc', ('it:exec:proc', {}),", "subs async def _onFormItDevStr(self, node): await node.set('norm', node.ndef[1]) async def", "{ 'doc': 'The specific file containing code that deleted data", "logon/logoff event.' }), ('it:hosturl', ('comp', {'fields': (('host', 'it:host'), ('url', 'inet:url'))}),", "('it:log:event', {}, ( ('mesg', ('str', {}), { 'doc': 'The log", "the logon session ended.', }), ('host', ('it:host', {}), { 'doc':", "'A description of the ATT&CK software.', 'disp': {'hint': 'text'}, }),", "the file.', }), ('group', ('inet:user', {}), { 'doc': 'The group", "('it:exec:thread', {}), { 'doc': 'The host thread which caused the", "{}), { 'doc': 'The user ID of the account.', 'ex':", "'The YARA rule that matched the file.'}), ('proc', ('it:exec:proc', {}),", "}), ('time', ('time', {}), { 'doc': 'The time the mutex", ")), ('it:av:prochit', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The", "'doc': 'The most recent version of the rule evaluated as", "guid matching the function.'}), ('string', ('str', {}), { 'ro': True,", "'doc': 'Associated names for the ATT&CK software.', }), ('desc', ('str',", ")), ('it:reveng:filefunc', {}, ( ('function', ('it:reveng:function', {}), { 'ro': True,", "'doc': 'The time the logon occured.', }), ('success', ('bool', {}),", "the file was deleted.', }), ('path', ('file:path', {}), { 'doc':", "(('rule', 'it:app:yara:rule'), ('file', 'file:bytes'))}), { 'doc': 'A YARA rule match", "process.', }), ('it:cmd', ('str', {'strip': True}), { 'doc': 'A unique", "member of this group.', }), ('posix:gid', ('int', {}), { 'doc':", "created in the process.', }), ('size', ('int', {}), { 'doc':", "that created the mutex. May or may not be the", "('it:mitre:attack:tactic', ('str', {'regex': r'^TA[0-9]{4}$'}), { 'doc': 'A Mitre ATT&CK Tactic", "('comp', {'fields': (('file', 'file:bytes'), ('function', 'it:reveng:function'))}), { 'doc': 'An instance", "'ro': True, 'doc': 'The file that contains the function.'}), ('va',", "'doc': 'The \"sw_edition\" field from the CPE 2.3 string.'}), ('target_sw',", "{}), { 'doc': 'The URL that documents the ATT&CK mitigation.',", "('str', {}), { 'doc': 'A brief description of the domain.',", "the thread.', }), ('src:proc', ('it:exec:proc', {}), { 'doc': 'An external", "{}), { 'doc': 'The exit code or return value for", "('array', {'type': 'it:reveng:impfunc'}), { 'doc': 'Calls to imported library functions", "unique identifier.', }), ('it:app:yara:match', ('comp', {'fields': (('rule', 'it:app:yara:rule'), ('file', 'file:bytes'))}),", "prop) # form the semver properly or bruteforce parts try:", "URL. Typically the same host referenced in :proc, if present.',", "'doc': 'The path where the file was written to/modified.', }),", "on which the activity occurred.'}), ('time', ('time', {}), { 'doc':", "= self._splitCpe23(text) if len(parts) != 13: mesg = f'CPE 2.3", "}), ('it:adid', ('str', {'lower': True, 'strip': True}), { 'doc': 'An", "('array', {'type': 'ou:name', 'uniq': True, 'sorted': True}), { 'doc': 'An", "}), ('author:acct', ('inet:web:acct', {}), { 'deprecated': True, 'doc': 'Web account", "'The \"target_sw\" field from the CPE 2.3 string.'}), ('target_hw', ('str',", "{'lower': True, 'strip': True}), { 'doc': 'Name of the software.',", "the logon session.', }), ('client:host', ('it:host', {}), { 'doc': 'The", "('it:host', {}), { 'doc': 'The host that executed the process.", "('sig:name', ('str', {'lower': True}), { 'ro': True, 'doc': 'The signature", "the domain.', }), ('org', ('ou:org', {}), { 'doc': 'The org", "The comparable integer value representing the semver is the bitwise", "'doc': 'The destination IPv4 address of the flow that caused", "library load event in a process.', }), ('it:exec:mmap', ('guid', {}),", "from.', }), )), ('it:prod:softlib', {}, ( ('soft', ('it:prod:softver', {}), {'ro':", "IDs addressed by the mitigation.', }), )), ('it:dev:int', {}, ()),", "key or value that was read.', }), )), ('it:exec:reg:set', {},", "android permission.'}), ('it:os:android:ilisten', ('comp', {'fields': ( ('app', 'it:prod:soft'), ('intent', 'it:os:android:intent'))}),", "{}), { 'doc': 'The MD5 password hash value.', }), ('hash:sha1',", "the account.', }), ('groups', ('array', {'type': 'it:group'}), { 'doc': 'An", "the CPE 2.3 string.'}), ('other', ('str', {'lower': True, 'strip': True}),", "('host', ('it:host', {}), { 'doc': 'The host where the account", "Weaknesses Enumeration Specification', 'ex': 'CWE-120', }), ('it:mitre:attack:status', ('str', {'enums': 'current,deprecated,withdrawn'}),", "('it:reveng:filefunc', ('comp', {'fields': (('file', 'file:bytes'), ('function', 'it:reveng:function'))}), { 'doc': 'An", "description.', }), ('parents', ('array', {'type': 'it:sec:cwe', 'uniq': True, 'sorted': True,", "the process that created the named pipe. Typically the same", "('array', {'type': 'str', 'uniq': True, 'sorted': True}), { 'doc': 'Associated", "{}), { 'doc': 'The path where the file was created.',", "('pid', ('int', {}), { 'doc': 'The process ID.', }), ('time',", "time the logon session ended.', }), ('host', ('it:host', {}), {", "subs = s_version.parseVersionParts(valu) if subs is None: raise s_exc.BadTypeValu(valu=valu, name='bruteVersionStr',", "which contains the signature.', }), )), ('it:av:prochit', {}, ( ('proc',", "'A YARA rule unique identifier.', }), ('it:app:yara:match', ('comp', {'fields': (('rule',", "and patch levels. Prerelease and build information will be parsed", "}), ('author:person', ('ps:person', {}), { 'deprecated': True, 'doc': 'Person who", "if the value is an integer.', }), ('bytes', ('file:bytes', {}),", "of the software.', }), ('names', ('array', {'type': 'it:dev:str', 'uniq': True,", "'doc': 'The library file that was loaded.', }), )), ('it:exec:mmap',", "file.'}), ('file', ('file:bytes', {}), {'ro': True, 'doc': 'The file distributed", "('unloaded', ('time', {}), { 'doc': 'The time the library was", "was read.', }), )), ('it:exec:file:write', {}, ( ('proc', ('it:exec:proc', {}),", "('author:org', ('ou:org', {}), { 'deprecated': True, 'doc': 'Organization which authored", "that read the registry. May or may not be the", "name for the ATT&CK software.', }), ('names', ('array', {'type': 'str',", "address of the flow that caused the hit.'}), ('dst:ipv6', ('inet:ipv6',", "part += c except StopIteration: parts.append(part) return parts def _normPyStr(self,", "True}), { 'ro': True, 'doc': 'The \"target_sw\" field from the", "'it:host'), ('softver', 'it:prod:softver'))}), { 'doc': 'A version of a software", "caused the activity.'}), ('host', ('it:host', {}), { 'doc': 'The host", "'it:mitre:attack:technique', 'uniq': True, 'sorted': True, 'split': ','}), { 'doc': 'An", "containing code that created the named pipe. May or may", "{ 'ro': True, 'doc': 'The signature name.' }), ('desc', ('str',", "the new file.', }), ('host', ('it:host', {}), { 'doc': 'The", "'The time the file was created.', }), ('path', ('file:path', {}),", "'doc': 'The time the file was written to/modified.', }), ('path',", "present on a given host.', }), ('it:av:sig', ('comp', {'fields': (('soft',", "('bool', {}), { 'doc': 'Set to True if the software", "{}), { 'doc': 'A named pipe created by a process", "True, 'strip': True}), { 'ro': True, 'doc': 'The \"edition\" field", "of the flow that caused the hit.'}), ('dst:ipv6', ('inet:ipv6', {}),", "registered.', }), ('groups', ('array', {'type': 'it:group'}), { 'doc': 'Groups that", "string.'}), ('language', ('str', {'lower': True, 'strip': True}), { 'ro': True,", "time the registry was read.', }), ('reg', ('it:dev:regval', {}), {", ")}), { 'doc': 'The given software broadcasts the given Android", "}), ('it:exec:mmap', ('guid', {}), { 'doc': 'A memory mapped segment", "'text'}, }), ('url', ('inet:url', {}), { 'doc': 'A URL linking", "names for this software version.', }), ('cpe', ('it:sec:cpe', {}), {", "registry key or value that was read.', }), )), ('it:exec:reg:set',", "tactics that include this technique.', }), )), ('it:mitre:attack:software', {}, (", "to brute force version string [%s]', prop) def getModelDefs(self): modl", "'doc': 'A GUID representing an individual log event.', 'interfaces': ('it:host:activity',),", "software version.'}), ('it:hostsoft', ('comp', {'fields': (('host', 'it:host'), ('softver', 'it:prod:softver'))}), {", "was created.', }), ('deleted', ('time', {}), { 'doc': 'The time", "{ 'doc': 'The specific file containing code that bound the", "a host or system.' }), ('it:log:event', ('guid', {}), { 'doc':", "{ 'doc': 'A Mitre ATT&CK Tactic ID.', 'ex': 'TA0040', }),", "}), ('it:exec:file:read', ('guid', {}), { 'doc': 'An instance of a", "or system.', }), ('desc', ('str', {}), { 'doc': 'A free-form", "host creating or setting a registry key.', }), ('it:exec:reg:del', ('guid',", "default installation path of the file.'}), )), ('it:hostsoft', {}, (", "ID of the account.', 'ex': '1001', }), ('posix:gecos', ('int', {}),", "'doc': 'The file representing the value of the registry key,", "('src', ('inet:addr', {}), { 'doc': 'The source address of flow", "}), ('it:group', ('guid', {}), { 'doc': 'A GUID that represents", "s_version.parseSemver(valu) if subs is None: raise s_exc.BadTypeValu(valu=valu, name=self.name, mesg='Unable to", "}), )), ('it:sec:cve', {}, ( ('desc', ('str', {}), { 'doc':", "True, 'strip': True}), { 'ro': True, 'doc': 'The \"target_hw\" field", "('comp', {'fields': (('rule', 'it:app:yara:rule'), ('file', 'file:bytes'))}), { 'doc': 'A YARA", "'URL available on the host.', }), )), ('it:dev:str', {}, (", "'A description of the software.', 'disp': {'hint': 'text'}, }), ('desc:short',", "from :path).', }), ('path:ext', ('str', {'lower': True, 'strip': True}), {", "}), ('org', ('ou:org', {}), { 'doc': 'The org that operates", "software version.', }), ('vers', ('it:dev:str', {}), { 'doc': 'Version string", "'doc': 'Notes concerning the function.'}), ('impcalls', ('array', {'type': 'it:reveng:impfunc'}), {", "inet:addr of the server when binding the port.' }), ('server:ipv4',", "{ 'doc': 'The place where the host resides.', }), ('loc',", "identifier.', }), ('it:app:snort:hit', ('guid', {}), { 'doc': 'An instance of", "system.', }), ('desc', ('str', {}), { 'doc': 'A free-form description", "('it:dev:mutex', ('str', {}), { 'doc': 'A string representing a mutex.',", "developer selected integer constant.', }), ('it:dev:regkey', ('str', {}), { 'doc':", "text left after stripping whitespace') subs = s_version.parseSemver(valu) if subs", "on.' }), ('time', ('time', {}), { 'doc': 'The time that", "True}), { 'doc': 'The name of the group.', }), ('desc',", "Microsoft Windows Security Identifier of the account.', }), ('groups', ('array',", "('inet:user', {}), { 'doc': 'The user name of the process", "('str', {}), { 'doc': 'Semver prerelease string.', }), ('semver:build', ('str',", "True, 'doc': 'The file that triggered the signature hit.', }),", "logon originated.', }), )), ('it:hosturl', {}, ( ('host', ('it:host', {}),", "{'ro': True, 'doc': 'Software on the host.'}) )), ('it:av:sig', {},", "}), ('strings', ('array', {'type': 'it:dev:str', 'uniq': True}), { 'doc': 'An", "= s_version.fmtVersion(major, minor, patch) return valu loglevels = ( (10,", "('target_sw', ('str', {'lower': True, 'strip': True}), { 'ro': True, 'doc':", "for information about the signature.', }) )), ('it:av:filehit', {}, (", "{ 'ro': True, 'doc': 'The YARA rule that matched the", "('user', ('inet:user', {}), { 'doc': 'The username associated with the", "matched the YARA rule.'}), ('time', ('time', {}), { 'doc': 'The", "in this ATT&CK technique.', 'ex': 'cno.mitre.t0100', }), ('references', ('array', {'type':", "'The command string used to launch the process, including any", "'doc': 'The process which created the process.' }), ('killedby', ('it:exec:proc',", "'doc': 'The rule enabled status to be used for YARA", "True}), { 'doc': 'A short description of the software.', }),", "URL that documents the ATT&CK software.', }), ('tag', ('syn:tag', {}),", "}), ('killedby', ('it:exec:proc', {}), { 'doc': 'The process which killed", "( ('text', ('str', {}), { 'doc': 'The YARA rule text.',", "('it:dev:int', {}, ()), ('it:dev:pipe', {}, ()), ('it:dev:mutex', {}, ()), ('it:dev:regkey',", "a string inside a function.', }), ('it:reveng:impfunc', ('str', {'lower': 1}),", "'doc': 'The path for the file.', }), ('path:dir', ('file:path', {}),", "rule.'}), ('src', ('inet:addr', {}), { 'doc': 'The source address of", "'doc': 'A description of the ATT&CK technique.', 'disp': {'hint': 'text'},", "('it:domain', ('guid', {}), { 'doc': 'A logical boundary of authentication", "Size of Input (Classic Buffer Overflow)', }), ('desc', ('str', {}),", "the library was loaded.', }), ('unloaded', ('time', {}), { 'doc':", "{'fields': (('rule', 'it:app:yara:rule'), ('file', 'file:bytes'))}), { 'doc': 'A YARA rule", "the software.', }), ('isos', ('bool', {}), { 'doc': 'Set to", "}), )), ('it:app:snort:rule', {}, ( ('text', ('str', {}), { 'doc':", "time.', }), ('atime', ('time', {}), { 'doc': 'The file access", "'ex': '/bin/bash', }), ('windows:sid', ('it:os:windows:sid', {}), { 'doc': 'The Microsoft", "('it:adid', {}, ()), ('it:os:ios:idfa', {}, ()), ('it:os:android:aaid', {}, ()), ('it:os:android:perm',", "instance of a function in an executable.', }), ('it:reveng:funcstr', ('comp',", "'doc': 'A Mitre ATT&CK Software ID.', 'ex': 'S0154', }), ('it:dev:str',", "Identifier of the account.', }), ('groups', ('array', {'type': 'it:group'}), {", "host.', }), ('exe', ('file:bytes', {}), { 'doc': 'The file considered", "of the group.', }), ('desc', ('str', {}), { 'doc': 'A", "'doc': 'The file creation time.', }), ('mtime', ('time', {}), {", "that wrote to / modified the existing file.', }), ('host',", "'doc': 'The host running the process that bound the listening", "{}), { 'doc': 'The file that was read.', }), )),", "loglevels = ( (10, 'debug'), (20, 'info'), (30, 'notice'), (40,", "'doc': 'The username associated with the account', }), ('contact', ('ps:contact',", "}), ('name', ('str', {'lower': True}), { 'ro': True, 'doc': 'The", "{ 'doc': 'The IPv4 address specified to bind().' }), ('server:ipv6',", "prop) def getModelDefs(self): modl = { 'ctors': ( ('it:semver', 'synapse.models.infotech.SemVer',", "string into an integer to allow version ordering. Prerelease information", "Provides support for parsing a semantic version string into its", "('it:sec:cwe', ('str', {'regex': r'^CWE-[0-9]{1,8}$'}), { 'doc': 'NIST NVD Common Weaknesses", "IDs used by the group.', }), )), ('it:mitre:attack:tactic', {}, (", "registry. Typically the same host referenced in :proc, if present.',", "CPE 2.3 string.'}), ('update', ('str', {'lower': True, 'strip': True}), {", "s_types.Str.__init__(self, modl, name, info, opts) def _splitCpe23(self, text): part =", "the signature hit.', }), ('sig', ('it:av:sig', {}), { 'ro': True,", "('inet:passwd', {}), { 'doc': 'The (optional) clear text password for", "thread.', }), ('created', ('time', {}), { 'doc': 'The time the", "calculate the password hash.', }), ('hash:md5', ('hash:md5', {}), { 'doc':", "{ 'doc': 'The file creation time.', }), ('mtime', ('time', {}),", "{}, ( ('name', ('it:hostname', {}), { 'doc': 'The name of", "created.', }), ('name', ('it:dev:mutex', {}), { 'doc': 'The mutex string.',", "{}), { 'doc': 'The IPv4 of the client during the", "('it:reveng:filefunc', {}, ( ('function', ('it:reveng:function', {}), { 'ro': True, 'doc':", "false to indicate an unsuccessful logon attempt.', }), ('logoff:time', ('time',", "{ 'doc': 'The host where the account is registered.', }),", "}), )), ('it:sec:cpe', {}, ( ('part', ('str', {'lower': True, 'strip':", "), 'types': ( ('it:hostname', ('str', {'strip': True, 'lower': True}), {", "function.', }), )), ('it:reveng:filefunc', {}, ( ('function', ('it:reveng:function', {}), {", "as a match.'}), )), ('it:reveng:function', {}, ( ('name', ('str', {}),", "with this version instance.', }), ('software:name', ('str', {'lower': True, 'strip':", "update : edition : language : sw_edition : target_sw :", "the snort rule.'}), ('version', ('it:semver', {}), { 'doc': 'The current", "may optionally be present in the axon.', }), )), ('it:exec:mutex',", "file that matched the YARA rule.'}), ('version', ('it:semver', {}), {", "{ 'part': parts[2], 'vendor': parts[3], 'product': parts[4], 'version': parts[5], 'update':", "True}), { 'ro': True, 'doc': 'The signature name.', }), ('sig:soft',", "{ 'doc': 'The path where the file was deleted.', }),", "}), )), ('it:logon', {}, ( ('time', ('time', {}), { 'doc':", "('author:acct', ('inet:web:acct', {}), { 'deprecated': True, 'doc': 'Web account of", "server when binding the port.' }), ('server:ipv4', ('inet:ipv4', {}), {", "are a member of this group.', }), ('posix:gid', ('int', {}),", "endpoint) or virtual (e.g., malware sandbox) host.', }), ('it:exec:thread', ('guid',", "= s_version.unpackVersion(valu) valu = s_version.fmtVersion(major, minor, patch) return valu loglevels", "name='bruteVersionStr', mesg='Unable to brute force version parts out of the", "'doc': 'The snort rule that matched the file.'}), ('flow', ('inet:flow',", "( ('org', ('ou:org', {}), { 'doc': 'Used to map an", "{}), { 'deprecated': True, 'doc': 'Person who authored the software.',", "the value is binary data.', }), )), ('it:prod:soft', {}, (", "}), ('it:reveng:impfunc', ('str', {'lower': 1}), { 'doc': 'A function from", "('inet:port', {}), { 'doc': 'The client port during the URL", "process executing code that read the file.', }), ('host', ('it:host',", "present.', }), ('time', ('time', {}), { 'doc': 'The time the", "}), )), ('it:account', {}, ( ('user', ('inet:user', {}), { 'doc':", "{ 'doc': 'A GUID that represents a host or system.'", "the software is available from.', }), )), ('it:prod:softlib', {}, (", "'doc': 'The account that logged in.', }), ('creds', ('auth:creds', {}),", "'it:sec:cwe', 'uniq': True, 'sorted': True, 'split': ','}), { 'doc': 'An", ":path).', }), ('path:ext', ('str', {'lower': True, 'strip': True}), { 'ro':", "password hash value.', }), ('hash:lm', ('hash:lm', {}), { 'doc': 'The", "}), ('client', ('inet:client', {}), { 'doc': 'The address of the", "the file triggered on.' }), ('sig:name', ('str', {'lower': True}), {", "'An external process which created the thread.', }), ('src:thread', ('it:exec:thread',", "time the mutex was created.', }), ('name', ('it:dev:mutex', {}), {", "{}), { 'ro': True, 'doc': 'The final component of the", "matching the function.'}), ('file', ('file:bytes', {}), { 'ro': True, 'doc':", "'doc': 'An array of URLs that document the ATT&CK mitigation.',", "base memory address where the map was created in the", "# pragma: no cover raise except Exception: logger.exception('Failed to brute", "}), ('it:log:event', ('guid', {}), { 'doc': 'A GUID representing an", "'A list of CVEs that apply to this software version.',", "True, 'doc': 'URL available on the host.', }), )), ('it:dev:str',", "'The file modification time.', }), ('atime', ('time', {}), { 'doc':", "product.', }), ('it:adid', ('str', {'lower': True, 'strip': True}), { 'doc':", "'doc': 'The value of the registry key, if the value", "the ATT&CK technique.', }), ('parent', ('it:mitre:attack:technique', {}), { 'doc': 'The", "YARA rule match to a process.', }), ('it:app:snort:rule', ('guid', {}),", "loaded.', }), )), ('it:exec:mmap', {}, ( ('proc', ('it:exec:proc', {}), {", "{}), { 'doc': 'The time the thread was created.', }),", "the registry was written to.', }), ('reg', ('it:dev:regval', {}), {", "rule evaluated as a match.'}), )), ('it:reveng:function', {}, ( ('name',", "= iter(text) try: while True: c = next(genr) if c", "of the rule evaluated as a match.'}), )), ('it:reveng:function', {},", "code for the process.', }), ('user', ('inet:user', {}), { 'doc':", "the hit.'}), ('time', ('time', {}), { 'doc': 'The time of", "{ 'doc': 'A process executing on a host. May be", "description.', }), ('references', ('array', {'type': 'inet:url', 'uniq': True}), { 'doc':", "exited.', }), ('exitcode', ('int', {}), { 'doc': 'The exit code", "specific software version.'}), ('it:prod:softlib', ('comp', {'fields': ( ('soft', 'it:prod:softver'), ('lib',", "this network.', }), ('net6', ('inet:net6', {}), { 'doc': 'The optional", "{}), { 'doc': 'The specific file containing code that requested", "pragma: no cover raise except Exception: logger.exception('Failed to brute force", "('time', {}), { 'doc': 'The time that the YARA engine", "(listening) TCP port.' }), )), ('it:fs:file', {}, ( ('host', ('it:host',", "annotate nodes included in this ATT&CK group ID.', 'ex': 'cno.mitre.g0100',", "the file was deleted.', }), ('path:dir', ('file:path', {}), { 'ro':", "), 'interfaces': ( ('it:host:activity', { 'props': ( ('exe', ('file:bytes', {}),", "CPE 2.3 string.'}), )), ('it:sec:cwe', {}, ( ('name', ('str', {}),", "'The given software requests the android permission.'}), ('it:os:android:ilisten', ('comp', {'fields':", "* = \"any\" - = N/A ''' def __init__(self, modl,", "'The LM password hash value.', }), ('hash:ntlm', ('hash:ntlm', {}), {", "'doc': 'The executable file which caused the activity.'}), ('proc', ('it:exec:proc',", ")), ('it:app:snort:rule', {}, ( ('text', ('str', {}), { 'doc': 'The", "IPv6 address of the flow that caused the hit.'}), ('src:port',", "'A unique command-line string.', 'ex': 'foo.exe --dostuff bar', }), ('it:exec:mutex',", "s_exc.BadTypeValu(valu=valu, name='bruteVersionStr', mesg='Unable to brute force version parts out of", "ATT&CK technique.', 'ex': 'cno.mitre.t0100', }), ('references', ('array', {'type': 'inet:url', 'uniq':", "of the flow that caused the hit.'}), ('dst', ('inet:addr', {}),", "to false to indicate an unsuccessful logon attempt.', }), ('logoff:time',", "from a filesystem.', }), ('it:exec:file:write', ('guid', {}), { 'doc': 'An", "other * = \"any\" - = N/A ''' def __init__(self,", "('it:prod:softfile', ('comp', {'fields': ( ('soft', 'it:prod:softver'), ('file', 'file:bytes'))}), { 'doc':", "('str', {'lower': True}), { 'doc': 'A short description of the", "'Email address of the sofware author.', }), ('author:person', ('ps:person', {}),", "the process.' }), ('killedby', ('it:exec:proc', {}), { 'doc': 'The process", "rundll32.exe may be considered the \"main\" executable for DLLs loaded", "'The main process executing code that read the file.', }),", "'crit'), (70, 'alert'), (80, 'emerg'), ) class ItModule(s_module.CoreModule): async def", "a software product which is present on a given host.',", "'A raw JSON record of the log event.', }), )),", "'emerg'), ) class ItModule(s_module.CoreModule): async def initCoreModule(self): self.model.form('it:dev:str').onAdd(self._onFormItDevStr) self.model.form('it:dev:pipe').onAdd(self._onFormMakeDevStr) self.model.form('it:dev:mutex').onAdd(self._onFormMakeDevStr)", "}), ('client:host', ('it:host', {}), { 'doc': 'The host where the", "{}), { 'doc': 'The specific file containing code that created", "to launch the process, including any command line parameters.', 'disp':", "with this version instance.', }), ('vers:norm', ('str', {'lower': True}), {", "of a function in an executable.', }), ('it:reveng:funcstr', ('comp', {'fields':", "('arch', ('it:dev:str', {}), { 'doc': 'Software architecture.', }), ('released', ('time',", "the \"main\" executable for DLLs loaded by that program.', }),", "{}), { 'ro': True, 'doc': 'The file that contains the", "operating system.'}), ('os', ('it:prod:softver', {}), {'ro': True, 'doc': 'The operating", "of the software was released.', }), ('semver', ('it:semver', {}), {", "{'ro': True, 'doc': 'The app software which listens for the", "authentication domain where the account is registered.', }), ('posix:uid', ('int',", "('str', {}), { 'doc': 'A free-form description of the host.',", "('soft', ('it:prod:softver', {}), {'ro': True, 'doc': 'The software which can", "}), ('it:sec:cve', ('str', {'lower': True, 'regex': r'(?i)^CVE-[0-9]{4}-[0-9]{4,}$'}), { 'doc': 'A", "read.', }), )), ('it:exec:reg:set', {}, ( ('proc', ('it:exec:proc', {}), {", "valu loglevels = ( (10, 'debug'), (20, 'info'), (30, 'notice'),", "program.', }), ('cmd', ('it:cmd', {}), { 'doc': 'The command string", "'doc': 'The SHA1 password hash value.', }), ('hash:sha256', ('hash:sha256', {}),", "deleted the file. May or may not be the same", "'The SHA512 password hash value.', }), ('hash:lm', ('hash:lm', {}), {", "True, 'strip': True}), { 'ro': True, 'doc': 'The \"sw_edition\" field", "ATT&CK tactic.', }), ('tag', ('syn:tag', {}), { 'doc': 'The synapse", "string used to launch the process, including any command line", "path to the executable of the process.', }), ('src:exe', ('file:path',", "{}), { 'doc': 'A GUID representing an individual log event.',", "('hash:sha512', ('hash:sha512', {}), { 'doc': 'The SHA512 password hash value.',", "process executing code that deleted data from the registry.', }),", "'strip': True}), { 'ro': True, 'doc': 'The file extension of", "that created the named pipe.', }), ('host', ('it:host', {}), {", "vendor : product : version : update : edition :", "('it:os:android:ilisten', ('comp', {'fields': ( ('app', 'it:prod:soft'), ('intent', 'it:os:android:intent'))}), { 'doc':", "('perms:write', ('bool', {}), { 'doc': 'True if the mmap is", "of the software.', }), ('cpe', ('it:sec:cpe', {}), { 'doc': 'The", "for the node.', }), ('os', ('it:prod:softver', {}), { 'doc': 'The", "'The main process executing code that wrote to the registry.',", "running the process that wrote to the registry. Typically the", "}), ('released', ('time', {}), { 'doc': 'Timestamp for when this", "'doc': 'The name of the snort rule.'}), ('version', ('it:semver', {}),", "('path', ('file:path', {}), { 'doc': 'The path that the library", "of flow that caused the hit.'}), ('src:ipv4', ('inet:ipv4', {}), {", "version ordering. Prerelease information is disregarded for integer comparison purposes,", "'doc': 'The IPv6 address specified to bind().' }), ('server:port', ('inet:port',", "a snort rule hit.', }), ('it:reveng:function', ('guid', {}), { 'doc':", "field.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url', {}), { 'doc':", "was bound.', }), ('server', ('inet:server', {}), { 'doc': 'The inet:addr", "{ 'doc': 'A string representing a mutex.', }), ('it:dev:int', ('int',", "May be an actual (e.g., endpoint) or virtual (e.g., malware", "'Used to map an ATT&CK group to a synapse ou:org.',", "{ 'doc': 'The host running the process that wrote to", "('proc', ('it:exec:proc', {}), { 'doc': 'The process where the library", "('guid', {}), { 'doc': 'A GUID that represents a group", "('softver', ('it:prod:softver', {}), {'ro': True, 'doc': 'Software on the host.'})", "information associated with this account.', }), ('host', ('it:host', {}), {", "name within the namespace of an antivirus engine name.' }),", "{}), { 'doc': 'A function inside an executable.', }), ('it:reveng:filefunc',", "the domain.', }), ('desc', ('str', {}), { 'doc': 'A brief", "ATT&CK Mitigation ID.', 'ex': 'M1036', }), ('it:mitre:attack:software', ('str', {'regex': r'^S[0-9]{4}$'}),", "'An instance of a host requesting a URL.', }), ('it:exec:bind',", "specific file containing code that deleted the file. May or", "for YARA evaluation engines.'}), )), ('it:app:yara:match', {}, ( ('rule', ('it:app:yara:rule',", "that the file triggered on.' }), ('time', ('time', {}), {", "behavior.'}), ('complexity', ('int', {}), { 'doc': 'The complexity of the", "a member of.', }), )), ('it:group', {}, ( ('name', ('str',", "'The URL that documents the ATT&CK group.', }), ('tag', ('syn:tag',", "that information is present. ''' def postTypeInit(self): s_types.Int.postTypeInit(self) self.setNormFunc(str, self._normPyStr)", "{ 'doc': 'A unique command-line string.', 'ex': 'foo.exe --dostuff bar',", "'language': parts[8], 'sw_edition': parts[9], 'target_sw': parts[10], 'target_hw': parts[11], 'other': parts[12],", "indicate an unsuccessful logon attempt.', }), ('logoff:time', ('time', {}), {", "loglevels}), { 'doc': 'A log level integer that increases with", "{}), { 'doc': 'The time the library was unloaded.', }),", "'T1548', }), ('it:mitre:attack:mitigation', ('str', {'regex': r'^M[0-9]{4}$'}), { 'doc': 'A Mitre", "of the ATT&CK mitigation.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url',", "s_version.parseVersionParts(valu) if subs is None: raise s_exc.BadTypeValu(valu=valu, name='bruteVersionStr', mesg='Unable to", "of the software is available from.', }), )), ('it:prod:softlib', {},", "for k, v in subs.items(): await node.set(f'semver:{k}', v) except asyncio.CancelledError:", "that wrote to the file. May or may not be", "the function.'}), ('funccalls', ('array', {'type': 'it:reveng:filefunc'}), { 'doc': 'Other function", "the CPE 2.3 string.'}), ('vendor', ('ou:name', {}), { 'ro': True,", "this CWE to a full description.', }), ('parents', ('array', {'type':", "{'type': 'it:mitre:attack:software', 'uniq': True, 'sorted': True, 'split': ','}), { 'doc':", "True, 'onespace': True}), { 'doc': 'The name of the group.',", "','}), { 'doc': 'An array of ChildOf CWE Relationships.' }),", "included in this ATT&CK tactic.', 'ex': 'cno.mitre.ta0100', }), ('references', ('array',", "to a process.', }), ('it:app:snort:rule', ('guid', {}), { 'doc': 'A", "True, 'doc': 'The \"vendor\" field from the CPE 2.3 string.'}),", "function calls within the scope of the function.', }), )),", "'doc': 'The name of the group.', }), ('desc', ('str', {}),", "'doc': 'The duration of the logon session.', }), ('client:host', ('it:host',", "executing code that bound the listening port.', }), ('host', ('it:host',", "{ 'doc': 'The main process executing code that created the", "advertising identification string.'}), ('it:os:android:aaid', ('it:adid', {}), { 'doc': 'An android", "('loaded', ('time', {}), { 'doc': 'The time the library was", "'The path that the library was loaded from.', }), ('file',", "which listens for the android intent.'}), ('intent', ('it:os:android:intent', {}), {'ro':", "self.model.prop('it:prod:softver:vers').onSet(self._onPropSoftverVers) self.model.prop('it:prod:softver:software').onSet(self._onPropSoftverSoft) def bruteVersionStr(self, valu): ''' Brute force the version", "with the account', }), ('contact', ('ps:contact', {}), { 'doc': 'Additional", "'The host where the group is registered.', }), ('domain', ('it:domain',", "True}), { 'doc': 'The name of the network.', }), ('desc',", "}), ('time', ('time', {}), { 'doc': 'The time the port", "'doc': 'The client port during the URL retrieval..' }), )),", "{ 'doc': 'The specific file containing code that wrote to", "IPv4 where the logon originated.', }), ('client:ipv6', ('inet:ipv6', {}), {", "host deleting a file from a filesystem.', }), ('it:exec:file:read', ('guid',", "ATT&CK technique IDs used by the group.', }), ('software', ('array',", "}), ('success', ('bool', {}), { 'doc': 'Set to false to", "('desc', ('str', {}), { 'doc': 'A description of the software.',", "= \"any\" - = N/A ''' def __init__(self, modl, name,", "try: valu, subs = self.bruteVersionStr(prop) await node.set('semver', valu) for k,", "to the POSIX account's default shell.\", 'ex': '/bin/bash', }), ('windows:sid',", "}), ), 'interfaces': ( ('it:host:activity', { 'props': ( ('exe', ('file:bytes',", "'doc': 'A Mitre ATT&CK Mitigation ID.', 'ex': 'M1036', }), ('it:mitre:attack:software',", "array of ATT&CK technique IDs addressed by the mitigation.', }),", "('it:exec:reg:get', ('guid', {}), { 'doc': 'An instance of a host", "{}), { 'doc': 'The time the memory map was created.',", "'A process executing on a host. May be an actual", "('it:os:windows:sid', {}), { 'doc': 'The Microsoft Windows Security Identifier of", "('array', {'type': 'it:mitre:attack:tactic', 'uniq': True, 'sorted': True, 'split': ','}), {", "the process that wrote to the registry. Typically the same", "{ 'doc': 'A snort rule unique identifier.', }), ('it:app:snort:hit', ('guid',", "valu.startswith('cpe:2.3:'): mesg = 'CPE 2.3 string is expected to start", "'doc': 'The primary group ID of the account.', 'ex': '1001',", "ATT&CK technique.', }), ('tag', ('syn:tag', {}), { 'doc': 'The synapse", "'The address of the client during the URL retrieval.' }),", "{}), { 'doc': 'The base memory address where the library", "contain the current value for the technique.', }), ('desc', ('str',", "'doc': 'The specific file containing code that read the file.", "full description.', }), ('references', ('array', {'type': 'inet:url', 'uniq': True}), {", "'it:prod:soft'), ('intent', 'it:os:android:intent') )}), { 'doc': 'The given software broadcasts", "('inet:user', {}), { 'doc': 'The group owner of the file.',", "'doc': 'An array of URLs that document the ATT&CK technique.',", "software.', }), ('names', ('array', {'type': 'it:dev:str', 'uniq': True, 'sorted': True}),", "filesystem.', }), ('it:exec:reg:get', ('guid', {}), { 'doc': 'An instance of", "def _onFormMakeDevStr(self, node): pprop = node.ndef[1] await node.snap.addNode('it:dev:str', pprop) async", "account logged in to.', }), ('account', ('it:account', {}), { 'doc':", "{}), { 'doc': 'The main process executing code that deleted", "('str', {'lower': True}), { 'doc': 'Normalized version of the version", "{ 'doc': 'The synapse tag used to annotate nodes included", "of the domain.', }), ('org', ('ou:org', {}), { 'doc': 'The", "'doc': 'The host where the account is registered.', }), ('domain',", "{ 'doc': 'The time the data from the registry was", "'The primary name for the ATT&CK mitigation.', }), ('desc', ('str',", "by a process at runtime.', }), ('it:exec:pipe', ('guid', {}), {", "'ex': 'cve-2012-0158' }), ('it:sec:cwe', ('str', {'regex': r'^CWE-[0-9]{1,8}$'}), { 'doc': 'NIST", "import asyncio import logging import synapse.exc as s_exc import synapse.lib.types", "'The path where the file was created.', }), ('path:dir', ('file:path',", "intent which is listened for by the app.'}), )), ('it:os:android:ibroadcast',", "given host.', }), )), ('it:log:event', {}, ( ('mesg', ('str', {}),", "}), ('it:exec:pipe', ('guid', {}), { 'doc': 'A named pipe created", "'A reference to a string inside a function.', }), ('it:reveng:impfunc',", "}), ('hash:sha256', ('hash:sha256', {}), { 'doc': 'The SHA256 password hash", "'doc': 'The YARA rule that matched the file.'}), ('file', ('file:bytes',", "considered the \"main\" executable for the process. For example, rundll32.exe", "{}), { 'doc': 'The sensor host node that produced the", "version.'}), ('it:prod:softlib', ('comp', {'fields': ( ('soft', 'it:prod:softver'), ('lib', 'it:prod:softver'))}), {", "subs is None: raise s_exc.BadTypeValu(valu=valu, name=self.name, mesg='Unable to parse string", "'doc': 'A reference URL for information about the signature.', })", "('str', {}), { 'doc': 'A description of the ATT&CK group.',", "ATT&CK group to a synapse ou:org.', }), ('name', ('ou:name', {}),", "'it:group'}), { 'doc': 'An array of groups that the account", "'doc': 'Email address of the sofware author.', }), ('author:person', ('ps:person',", "return ':'.join(parts), {'subs': subs} class SemVer(s_types.Int): ''' Provides support for", "'The current version of the rule.'}), ('enabled', ('bool', {}), {", "ATT&CK software IDs used by the group.', }), )), ('it:mitre:attack:tactic',", "named pipe. May or may not be the same :exe", "'doc': 'The authentication domain that the host is a member", "node.snap.addNode('it:dev:str', pprop) async def _onPropSoftverSoft(self, node, oldv): # Check to", "True, 'doc': 'The software which can run on the operating", "'doc': 'Calls to imported library functions within the scope of", "the mmap is mapped with execute permissions.', }), ('created', ('time',", "('exe', ('file:bytes', {}), { 'doc': 'The executable file which caused", "'sorted': True, 'split': ','}), { 'doc': 'An array of ATT&CK", "\"vendor\" field from the CPE 2.3 string.'}), ('product', ('str', {'lower':", "'doc': 'The guid matching the function.'}), ('file', ('file:bytes', {}), {", "registry was read.', }), ('reg', ('it:dev:regval', {}), { 'doc': 'The", "which caused the activity.'}), ('thread', ('it:exec:thread', {}), { 'doc': 'The", "an individual log event.', 'interfaces': ('it:host:activity',), }), ('it:network', ('guid', {}),", "URL was requested.', }), ('url', ('inet:url', {}), { 'doc': 'The", "{ 'ro': True, 'doc': 'The file that matched the YARA", "tactic.', }), )), ('it:mitre:attack:technique', {}, ( ('name', ('str', {'strip': True}),", "software product.', }), ('it:adid', ('str', {'lower': True, 'strip': True}), {", "{}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The main process", "('guid', {}), { 'doc': 'A arbitrary, unversioned software product.', }),", "integer comparison purposes, as we cannot map an arbitrary pre-release", "node.ndef[1]) async def _onFormMakeDevStr(self, node): pprop = node.ndef[1] await node.snap.addNode('it:dev:str',", "{}), { 'doc': 'A GUID that represents a host or", "broadcasts the android intent.'}), ('intent', ('it:os:android:intent', {}), {'ro': True, 'doc':", "True, 'strip': True}), { 'doc': 'An advertising identification string.'}), ('it:os:windows:sid',", "c except StopIteration: parts.append(part) return parts def _normPyStr(self, valu): if", "('duration', {}), { 'doc': 'The duration of the logon session.',", "that document the CVE ID.', }), )), ('it:sec:cpe', {}, (", "{'lower': True, 'strip': True}), { 'ro': True, 'doc': 'The \"language\"", "as integers, with a max width of 20 bits. The", "ATT&CK technique.', }), ('isnow', ('it:mitre:attack:technique', {}), { 'doc': 'If deprecated,", "{}), { 'doc': 'The inet:flow that matched the snort rule.'}),", "that caused the hit.'}), ('dst:port', ('inet:port', {}), { 'doc': 'The", "('guid', {}), { 'doc': 'A GUID that represents an individual", "}), ('semver:pre', ('str', {}), { 'doc': 'Semver prerelease string.', }),", "'The source IPv4 address of the flow that caused the", "{'subs': subs} class SemVer(s_types.Int): ''' Provides support for parsing a", "{ 'doc': 'An instance of a host adding a file", "('it:prod:softos', {}, ( ('soft', ('it:prod:softver', {}), {'ro': True, 'doc': 'The", "'doc': 'A URL linking this CWE to a full description.',", "('addresses', ('array', {'type': 'it:mitre:attack:technique', 'uniq': True, 'sorted': True, 'split': ','}),", "{'fields': ( ('app', 'it:prod:soft'), ('perm', 'it:os:android:perm'))}), { 'doc': 'The given", "the existing file.', }), ('host', ('it:host', {}), { 'doc': 'The", "a member of this group.', }), ('posix:gid', ('int', {}), {", "('it:app:yara:rule', {}, ( ('text', ('str', {}), { 'doc': 'The YARA", "c == '\\\\': c += next(genr) if c == ':':", "{ 'deprecated': True, 'doc': 'Person who authored the software.', }),", "the software author.', }), ('author:email', ('inet:email', {}), { 'deprecated': True,", "('name', ('str', {'lower': True})))}), { 'doc': 'A signature name within", "{ 'doc': 'A Mitre ATT&CK Mitigation ID.', 'ex': 'M1036', }),", "'ex': 'cno.mitre.s0100', }), ('references', ('array', {'type': 'inet:url', 'uniq': True}), {", "'doc': 'The file extension of the file name (parsed from", "}), ('it:host', ('guid', {}), { 'doc': 'A GUID that represents", "network.', }), ('org', ('ou:org', {}), { 'doc': 'The org that", "of this network.', }), )), ('it:account', {}, ( ('user', ('inet:user',", "('it:group', {}, ( ('name', ('str', {'lower': True, 'strip': True, 'onespace':", "'The path where the file was read.', }), ('path:dir', ('file:path',", "requested the URL.', }), ('host', ('it:host', {}), { 'doc': 'The", "('it:prod:softfile', {}, ( ('soft', ('it:prod:softver', {}), {'ro': True, 'doc': 'The", "from the CPE 2.3 string.'}), ('version', ('str', {'lower': True, 'strip':", "k, v in subs.items(): await node.set(f'semver:{k}', v) except asyncio.CancelledError: #", "deleting a registry key.', }), ('it:app:yara:rule', ('guid', {}), { 'doc':", "an imported library.', }), ), 'interfaces': ( ('it:host:activity', { 'props':", "flow that caused the hit.'}), ('src:port', ('inet:port', {}), { 'doc':", "}), ('it:exec:bind', ('guid', {}), { 'doc': 'An instance of a", "('ou:org', {}), { 'deprecated': True, 'doc': 'Organization which authored the", "{}), { 'doc': 'The time the registry was written to.',", "was loaded in the process.', }), ('loaded', ('time', {}), {", "('it:dev:mutex', {}), { 'doc': 'The mutex string.', }), )), ('it:exec:pipe',", "'The file creation time.', }), ('mtime', ('time', {}), { 'doc':", "name of the process owner.', }), ('path', ('file:path', {}), {", "{ 'doc': 'The host running the process that requested the", "Specification', 'ex': 'CWE-120', }), ('it:mitre:attack:status', ('str', {'enums': 'current,deprecated,withdrawn'}), { 'doc':", "{}), { 'doc': 'The file that was modified.', }), )),", "host or system.', }), ('it:sec:cve', ('str', {'lower': True, 'regex': r'(?i)^CVE-[0-9]{4}-[0-9]{4,}$'}),", "build string.', }), ('url', ('inet:url', {}), { 'doc': 'URL where", "'doc': 'The NIST CPE 2.3 string specifying this software.', }),", "that created the new file. Typically the same host referenced", "it's normed valu prop = node.get('vers') if not prop: return", "of the registry key, if the value is binary data.',", "file.', }), ('hash:sha256', ('hash:sha256', {}), { 'doc': 'A SHA256 hash", "'doc': 'The process that matched the YARA rule.'}), ('time', ('time',", "True, 'doc': 'The \"edition\" field from the CPE 2.3 string.'}),", "disregarded for integer comparison purposes, as we cannot map an", "Windows Security Identifier.', 'ex': 'S-1-5-21-1220945662-1202665555-839525555-5555', }), ('it:os:ios:idfa', ('it:adid', {}), {", "{ 'doc': 'The host on which the activity occurred.'}), ('time',", "was deleted.', }), ('reg', ('it:dev:regval', {}), { 'doc': 'The registry", "('update', ('str', {'lower': True, 'strip': True}), { 'ro': True, 'doc':", "{}), {'ro': True, 'doc': 'The software which can run on", "('norm', ('str', {'lower': True}), { 'doc': 'Lower case normalized version", "'doc': 'The complexity of the function.'}), ('funccalls', ('array', {'type': 'it:reveng:filefunc'}),", "key.', }), ('it:exec:reg:set', ('guid', {}), { 'doc': 'An instance of", "tactic.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url', {}), { 'doc':", "a string.', }), ('int', ('it:dev:int', {}), { 'doc': 'The value", "concerning the function.'}), ('impcalls', ('array', {'type': 'it:reveng:impfunc'}), { 'doc': 'Calls", "'S-1-5-21-1220945662-1202665555-839525555-5555', }), ('it:os:ios:idfa', ('it:adid', {}), { 'doc': 'An iOS advertising", "'The name of the domain.', }), ('desc', ('str', {}), {", "('hash:md5', ('hash:md5', {}), { 'doc': 'The MD5 password hash value.',", "the software.', }), ('names', ('array', {'type': 'it:dev:str', 'uniq': True, 'sorted':", "('time', {}), { 'doc': 'The time the process exited.', }),", "'S0154', }), ('it:dev:str', ('str', {}), { 'doc': 'A developer-selected string.'", "activity.'}), ('thread', ('it:exec:thread', {}), { 'doc': 'The host thread which", "('function', 'it:reveng:function'))}), { 'doc': 'An instance of a function in", "unique command-line string.', 'ex': 'foo.exe --dostuff bar', }), ('it:exec:mutex', ('guid',", "brute force version string [%s]', prop) def getModelDefs(self): modl =", "'doc': 'The main process executing code that requested the URL.',", "}), ('host', ('it:host', {}), { 'doc': 'The host that the", "owner.', }), ('path', ('file:path', {}), { 'doc': 'The path to", "a match.'}), )), ('it:reveng:function', {}, ( ('name', ('str', {}), {", "contiguous IPv4 address range of this network.', }), ('net6', ('inet:net6',", "'doc': 'The \"target_hw\" field from the CPE 2.3 string.'}), ('other',", "prop) # Make it:dev:str from version str await node.snap.addNode('it:dev:str', prop)", "{'type': 'str', 'uniq': True, 'sorted': True}), { 'doc': 'Associated names", "{}), { 'doc': 'The path where the file was read.',", "string.' }), ('it:dev:pipe', ('str', {}), { 'doc': 'A string representing", "last known ipv4 address for the host.' }), ('latlong', ('geo:latlong',", "software.', }), )), ('it:mitre:attack:mitigation', {}, ( # TODO map to", "2.3 string specifying this software.', }), ('author', ('ps:contact', {}), {", "('version', ('it:semver', {}), { 'doc': 'The most recent version of", "main process executing code that wrote to the registry.', }),", "{}, ()), ('it:os:android:perm', {}, ()), ('it:os:android:intent', {}, ()), ('it:os:android:reqperm', {},", "system.', }), ('it:sec:cve', ('str', {'lower': True, 'regex': r'(?i)^CVE-[0-9]{4}-[0-9]{4,}$'}), { 'doc':", "'doc': 'The specific file containing code that created the named", "distributed by a specific software version.'}), ('it:prod:softlib', ('comp', {'fields': (", "('it:reveng:function', ('guid', {}), { 'doc': 'A function inside an executable.',", "('semver:minor', ('int', {}), { 'doc': 'Version minor number.', }), ('semver:patch',", "the registry was read.', }), ('reg', ('it:dev:regval', {}), { 'doc':", "used to calculate the password hash.', }), ('hash:md5', ('hash:md5', {}),", "key.', }), ('str', ('it:dev:str', {}), { 'doc': 'The value of", "process exited.', }), ('exitcode', ('int', {}), { 'doc': 'The exit", "('app', ('it:prod:softver', {}), {'ro': True, 'doc': 'The android app which", "String', }), ), 'types': ( ('it:hostname', ('str', {'strip': True, 'lower':", "'A specific version of a software product.'}), ('it:prod:softfile', ('comp', {'fields':", "ATT&CK group.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url', {}), {", "'The duration of the logon session.', }), ('client:host', ('it:host', {}),", "ID.', 'ex': 'G0100', }), ('it:mitre:attack:tactic', ('str', {'regex': r'^TA[0-9]{4}$'}), { 'doc':", "which is listened for by the app.'}), )), ('it:os:android:ibroadcast', {},", "('it:prod:softlib', ('comp', {'fields': ( ('soft', 'it:prod:softver'), ('lib', 'it:prod:softver'))}), { 'doc':", "Enumeration Specification', 'ex': 'CWE-120', }), ('it:mitre:attack:status', ('str', {'enums': 'current,deprecated,withdrawn'}), {", "path of the file.'}), )), ('it:hostsoft', {}, ( ('host', ('it:host',", "engines.'}), )), ('it:app:yara:match', {}, ( ('rule', ('it:app:yara:rule', {}), { 'ro':", ")), ('it:dev:str', {}, ( ('norm', ('str', {'lower': True}), { 'doc':", "'The size of the memory map in bytes.', }), ('perms:read',", "{ 'doc': 'The inet:flow that matched the snort rule.'}), ('src',", "''' def __init__(self, modl, name, info, opts): opts['lower'] = True", "time for the process.', }), ('exited', ('time', {}), { 'doc':", "key, if the value is binary data.', }), )), ('it:prod:soft',", "present in the axon.', }), )), ('it:exec:mutex', {}, ( ('proc',", "host running the process that deleted data from the registry.", "a specific antivirus signature.' }), ('it:auth:passwdhash', ('guid', {}), { 'doc':", "{}), { 'doc': 'The time of the network flow that", "('it:account', ('guid', {}), { 'doc': 'A GUID that represents an", "{}), { 'doc': 'An iOS advertising identification string.'}), ('it:os:android:aaid', ('it:adid',", "('author:email', ('inet:email', {}), { 'deprecated': True, 'doc': 'Email address of", "file was written to/modified.', }), ('path', ('file:path', {}), { 'doc':", "'The host running the process that deleted the file. Typically", "}), )), ('it:log:event', {}, ( ('mesg', ('str', {}), { 'doc':", "'The host running the process that deleted data from the", "'doc': 'The host that executed the process. May be an", "'doc': 'The host running the process that deleted data from", "('semver', ('it:semver', {}), { 'doc': 'System normalized semantic version number.',", "'version': parts[5], 'update': parts[6], 'edition': parts[7], 'language': parts[8], 'sw_edition': parts[9],", "of the account.', 'ex': '1001', }), ('posix:gecos', ('int', {}), {", "used to annotate nodes included in this ATT&CK software.', 'ex':", "a subs dictionary. ''' try: valu, info = self.core.model.type('it:semver').norm(valu) subs", "('inet:addr', {}), { 'doc': 'The destination address of the trigger.'}),", "'doc': 'The \"other\" field from the CPE 2.3 string.'}), )),", "string.'}), ('vendor', ('ou:name', {}), { 'ro': True, 'doc': 'The \"vendor\"", "from an imported library.', }), ), 'interfaces': ( ('it:host:activity', {", "rule match to a process.', }), ('it:app:snort:rule', ('guid', {}), {", "DLLs loaded by that program.', }), ('cmd', ('it:cmd', {}), {", "'types': ( ('it:hostname', ('str', {'strip': True, 'lower': True}), { 'doc':", "the mitigation.', }), )), ('it:dev:int', {}, ()), ('it:dev:pipe', {}, ()),", "{ 'doc': 'The main process executing code that requested the", "for the ATT&CK technique.', }), ('status', ('it:mitre:attack:status', {}), { 'doc':", "as a semver.') if valu > s_version.mask60: raise s_exc.BadTypeValu(valu=valu, name=self.name,", "broadcasts the given Android intent.'}), ('it:prod:softver', ('guid', {}), { 'doc':", "account.', }), ('host', ('it:host', {}), { 'doc': 'The host where", "True}), { 'doc': 'Associated names for the ATT&CK software.', }),", "'A reference URL for information about the signature.', }) )),", "map was created.', }), ('deleted', ('time', {}), { 'doc': 'The", "{ 'doc': 'The path that the library was loaded from.',", "boundary of authentication and configuration such as a windows domain.'", "True}), { 'doc': 'The primary name for the ATT&CK tactic.',", "'doc': 'The file that contains the function.'}), ('va', ('int', {}),", "opts = {'vars': {'soft': prop}} nodes = await node.snap.nodes('it:prod:soft=$soft', opts=opts)", "{ 'doc': 'A function inside an executable.', }), ('it:reveng:filefunc', ('comp',", "where a specific version of the software is available from.',", "('time', {}), { 'doc': 'The time the library was loaded.',", "('it:av:prochit', ('guid', {}), { 'doc': 'An instance of a process", "on.' }), ('sig:name', ('str', {'lower': True}), { 'ro': True, 'doc':", "tactic.', }), ('desc', ('str', {}), { 'doc': 'A description of", "an executable.', }), ('it:reveng:funcstr', ('comp', {'fields': (('function', 'it:reveng:function'), ('string', 'str'))}),", "time the file was read.', }), ('path', ('file:path', {}), {", "of URLs that document the CVE ID.', }), )), ('it:sec:cpe',", "calls within the scope of the function.', }), )), ('it:reveng:funcstr',", "('bool', {}), { 'doc': 'The rule enabled status to be", "YARA rule.'}), ('time', ('time', {}), { 'doc': 'The time that", "compatible with the given os software version.'}), ('it:hostsoft', ('comp', {'fields':", "'doc': 'Contact info for the author of the YARA rule.'}),", "'doc': 'The source address of flow that caused the hit.'}),", "('time', {}), { 'doc': 'The time the logon occured.', }),", "as s_types import synapse.lib.module as s_module import synapse.lib.version as s_version", ")), ('it:prod:soft', {}, ( ('name', ('str', {'lower': True, 'strip': True}),", "'doc': 'The \"target_sw\" field from the CPE 2.3 string.'}), ('target_hw',", "{}), { 'doc': 'The most recent version of the rule", "{'type': 'ou:name', 'uniq': True, 'sorted': True}), { 'doc': 'An array", "# Set vers:norm and make it's normed valu prop =", "('inet:ipv4', {}), { 'doc': 'The IPv4 where the logon originated.',", "{'ro': True, 'doc': 'The app software which broadcasts the android", "'The client port during the URL retrieval..' }), )), ('it:exec:bind',", "('cpe', ('it:sec:cpe', {}), { 'doc': 'The NIST CPE 2.3 string", "subs = {'major': major, 'minor': minor, 'patch': patch} return valu,", "the hit.'}), ('dst:ipv6', ('inet:ipv6', {}), { 'doc': 'The destination IPv4", "version instance.', }), ('software:name', ('str', {'lower': True, 'strip': True}), {", "a host deleting a file from a filesystem.', }), ('it:exec:file:read',", "{ 'doc': 'A file on a host.' }), ('it:exec:file:add', ('guid',", "'doc': 'The YARA rule that matched the file.'}), ('proc', ('it:exec:proc',", "info, opts) def _splitCpe23(self, text): part = '' parts =", "{}), { 'doc': 'The IPv4 address specified to bind().' }),", "that triggered an alert on a specific antivirus signature.', }),", "source address of flow that caused the hit.'}), ('src:ipv4', ('inet:ipv4',", "the network.', }), ('desc', ('str', {}), { 'doc': 'A brief", "}), ('desc:short', ('str', {'lower': True}), { 'doc': 'A short description", "'doc': 'The android intent which is listened for by the", "if subs is None: raise s_exc.BadTypeValu(valu=valu, name='bruteVersionStr', mesg='Unable to brute", "{ 'ro': True, 'doc': 'The signature that the file triggered", "identification string.'}), ('it:os:windows:sid', ('str', {'regex': r'^S-1-[0-59]-\\d{2}-\\d{8,10}-\\d{8,10}-\\d{8,10}-[1-9]\\d{3}$'}), { 'doc': 'A Microsoft", "instance of a host deleting a registry key.', }), ('it:app:yara:rule',", "'doc': 'An array of ATT&CK technique IDs used by the", "'doc': 'A signature name within the namespace of an antivirus", "('str', {'regex': r'^T[0-9]{4}(.[0-9]{3})?$'}), { 'doc': 'A Mitre ATT&CK Technique ID.',", "brute force version parts out of the string') if subs:", "{}), { 'doc': 'A GUID that represents a logical network.'", "the semver properly or bruteforce parts try: valu, subs =", "{}), { 'doc': 'The GECOS field for the POSIX account.',", "{'vars': {'soft': prop}} nodes = await node.snap.nodes('it:prod:soft=$soft', opts=opts) if nodes:", "namespace of an antivirus engine name.' }), ('it:av:filehit', ('comp', {'fields':", "}), )), ('it:exec:file:add', {}, ( ('proc', ('it:exec:proc', {}), { 'doc':", "mesg = f'CPE 2.3 string has {len(parts)} parts, expected 13.'", "'A file is distributed by a specific software version.'}), ('it:prod:softlib',", "which killed this process.', }), )), ('it:exec:thread', {}, ( ('proc',", "valu): if not valu.startswith('cpe:2.3:'): mesg = 'CPE 2.3 string is", "'doc': 'The primary name for the ATT&CK group.', }), ('names',", "True}), { 'doc': 'The primary name for the ATT&CK software.',", "file path if the mmap is a mapped view of", "thread was created.', }), ('exited', ('time', {}), { 'doc': 'The", "parts out of the string') if subs: valu = s_version.packVersion(subs.get('major'),", "the given domain.', }), )), ('it:network', {}, ( ('name', ('str',", "product : version : update : edition : language :", ")), ('it:exec:reg:get', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The", "\"main\" executable for the process. For example, rundll32.exe may be", "('it:app:snort:rule', ('guid', {}), { 'doc': 'A snort rule unique identifier.',", "('file:path', {}), { 'doc': 'The file path if the mmap", "a password hash.', }), ('it:exec:proc', ('guid', {}), { 'doc': 'A", "{}), { 'doc': 'The thread which created this thread.', }),", "triggered on.' }), ('time', ('time', {}), { 'doc': 'The time", "memory map was deleted.', }), ('path', ('file:path', {}), { 'doc':", "from the CPE 2.3 string.'}), ('edition', ('str', {'lower': True, 'strip':", "Microsoft Windows Security Identifier of the group.', }), )), ('it:logon',", "version contains a library software version.'}), ('it:prod:softos', ('comp', {'fields': (", "is a member of.', }), )), ('it:group', {}, ( ('name',", "'doc': 'An array of strings referenced within the function.', }),", "{ 'doc': 'The time that the activity started.'}), ), }),", "from the CPE 2.3 string.'}), ('product', ('str', {'lower': True, 'strip':", "'An array of URLs that document the ATT&CK technique.', }),", "version.', }), ('names', ('array', {'type': 'it:dev:str', 'uniq': True, 'sorted': True}),", "True, 'doc': 'The software which distributes the file.'}), ('file', ('file:bytes',", "}), )), ('it:mitre:attack:software', {}, ( ('software', ('it:prod:soft', {}), { 'doc':", "< 0: raise s_exc.BadTypeValu(valu=valu, name=self.name, mesg='Cannot norm a negative integer", "{}), { 'doc': 'The function rank score used to evaluate", "status.', 'ex': 'current', }), ('it:mitre:attack:group', ('str', {'regex': r'^G[0-9]{4}$'}), { 'doc':", "'Calls to imported library functions within the scope of the", "ATT&CK software to a synapse it:prod:soft.', }), ('name', ('str', {'strip':", "+= next(genr) if c == ':': parts.append(part) part = ''", "'M1036', }), ('it:mitre:attack:software', ('str', {'regex': r'^S[0-9]{4}$'}), { 'doc': 'A Mitre", "the group.', }), )), ('it:mitre:attack:tactic', {}, ( ('name', ('str', {'strip':", "names for the ATT&CK group.', }), ('desc', ('str', {}), {", "'Host with the software.'}), ('softver', ('it:prod:softver', {}), {'ro': True, 'doc':", "number of the host.', }), ('operator', ('ps:contact', {}), { 'doc':", "2.3 string.'}), ('language', ('str', {'lower': True, 'strip': True}), { 'ro':", "{}), { 'doc': 'An instance of a host writing a", "host.', }), ('it:av:sig', ('comp', {'fields': (('soft', 'it:prod:soft'), ('name', ('str', {'lower':", "YARA rule text.', 'disp': {'hint': 'text'}, }), ('name', ('str', {}),", "'The version of the rule at the time of match.'}),", "'doc': 'A description of the ATT&CK group.', 'disp': {'hint': 'text'},", "{}, ( ('text', ('str', {}), { 'doc': 'The YARA rule", "client port during the URL retrieval..' }), )), ('it:exec:bind', {},", "('it:mitre:attack:group', ('str', {'regex': r'^G[0-9]{4}$'}), { 'doc': 'A Mitre ATT&CK Group", "software.', }), ('author:org', ('ou:org', {}), { 'deprecated': True, 'doc': 'Organization", "string.'}), )), ('it:sec:cwe', {}, ( ('name', ('str', {}), { 'doc':", "'target_hw': parts[11], 'other': parts[12], } return ':'.join(parts), {'subs': subs} class", "this software version', }), ('cves', ('array', {'type': 'it:sec:cve', 'uniq': True,", "org or person who authored the software.', }), ('author:org', ('ou:org',", "( ('function', ('it:reveng:function', {}), { 'ro': True, 'doc': 'The guid", "('version', ('it:semver', {}), { 'doc': 'The current version of the", "'The signature name.', }), ('sig:soft', ('it:prod:soft', {}), { 'ro': True,", "process that created the new file. Typically the same host", "'doc': 'A function inside an executable.', }), ('it:reveng:filefunc', ('comp', {'fields':", "the function.'}), ('impcalls', ('array', {'type': 'it:reveng:impfunc'}), { 'doc': 'Calls to", "be present in the axon.', }), )), ('it:exec:mutex', {}, (", "the ATT&CK tactic.', }), ('tag', ('syn:tag', {}), { 'doc': 'The", "('ou:name', {}), { 'ro': True, 'doc': 'The \"vendor\" field from", "process that deleted the file. Typically the same host referenced", "'doc': 'A thread executing in a process.', }), ('it:exec:loadlib', ('guid',", "given software listens for an android intent.'}), ('it:os:android:ibroadcast', ('comp', {'fields':", "was modified.', }), )), ('it:exec:reg:get', {}, ( ('proc', ('it:exec:proc', {}),", "at runtime.', }), ('it:exec:pipe', ('guid', {}), { 'doc': 'A named", "( ('name', ('str', {'lower': True, 'strip': True}), { 'doc': 'Name", "{'ro': True, 'doc': 'The software which can run on the", "of URLs that document the ATT&CK tactic.', }), )), ('it:mitre:attack:technique',", "ATT&CK technique IDs addressed by the mitigation.', }), )), ('it:dev:int',", "array of alternate names for the ATT&CK group.', }), ('desc',", "severity.', }), ('data', ('data', {}), { 'doc': 'A raw JSON", "'doc': 'The owner of the file.', }), ('group', ('inet:user', {}),", "'ro': True, 'doc': 'The final component of the file path", "()), ('it:os:android:intent', {}, ()), ('it:os:android:reqperm', {}, ( ('app', ('it:prod:softver', {}),", "{}), { 'doc': 'The file path if the mmap is", "string.'}), ('other', ('str', {'lower': True, 'strip': True}), { 'ro': True,", "may be considered the \"main\" executable for DLLs loaded by", "was mapped.', }), ('va', ('int', {}), { 'doc': 'The base", "authentication and configuration such as a windows domain.' }), ('it:account',", "'ex': 'current', }), ('it:mitre:attack:group', ('str', {'regex': r'^G[0-9]{4}$'}), { 'doc': 'A", "android intent string.'}), ('it:os:android:reqperm', ('comp', {'fields': ( ('app', 'it:prod:soft'), ('perm',", "host.'}) )), ('it:av:sig', {}, ( ('soft', ('it:prod:soft', {}), { 'ro':", "'doc': 'An array of URLs that document the ATT&CK tactic.',", "}), ('windows:sid', ('it:os:windows:sid', {}), { 'doc': 'The Microsoft Windows Security", "repr(self, valu): major, minor, patch = s_version.unpackVersion(valu) valu = s_version.fmtVersion(major,", "('exited', ('time', {}), { 'doc': 'The time the thread exited.',", "( ('soft', ('it:prod:soft', {}), { 'ro': True, 'doc': 'The anti-virus", "account', }), ('contact', ('ps:contact', {}), { 'doc': 'Additional contact information", "{ 'doc': 'A YARA rule match to a file.', }),", "\"update\" field from the CPE 2.3 string.'}), ('edition', ('str', {'lower':", "'it:prod:softver'), ('os', 'it:prod:softver'))}), { 'doc': 'The software version is known", "('guid', {}), { 'doc': 'An instance of a host reading", "'doc': 'The primary name for the ATT&CK tactic.', }), ('desc',", "def _onPropSoftverArch(self, node, oldv): # make it:dev:str for arch prop", "{ 'doc': 'A description of the software.', 'disp': {'hint': 'text'},", "valu, subs = self.bruteVersionStr(prop) await node.set('semver', valu) for k, v", "the POSIX account's home directory.\", 'ex': '/home/visi', }), ('posix:shell', ('file:path',", "value that was read.', }), )), ('it:exec:reg:set', {}, ( ('proc',", "valu = s_version.packVersion(subs.get('major'), subs.get('minor'), subs.get('patch')) return valu, {'subs': subs} def", "the executable of the process.', }), ('src:exe', ('file:path', {}), {", "if prop: opts = {'vars': {'soft': prop}} nodes = await", "if not valu.startswith('cpe:2.3:'): mesg = 'CPE 2.3 string is expected", "node.set('vers:norm', prop) # Make it:dev:str from version str await node.snap.addNode('it:dev:str',", "to attempt to get version information for. Notes: This first", "{ 'ro': True, 'doc': 'The \"language\" field from the CPE", "'doc': 'The IPv6 where the logon originated.', }), )), ('it:hosturl',", "version.', }), ('vers', ('it:dev:str', {}), { 'doc': 'Version string associated", "iOS advertising identification string.'}), ('it:os:android:aaid', ('it:adid', {}), { 'doc': 'An", "'doc': 'The current version of the rule.'}), )), ('it:app:snort:hit', {},", "'Name of the software.', }), ('names', ('array', {'type': 'it:dev:str', 'uniq':", "}), ('desc', ('str', {}), { 'doc': 'A brief description of", "created the named pipe. Typically the same host referenced in", "URLs that document the ATT&CK tactic.', }), )), ('it:mitre:attack:technique', {},", "('it:exec:loadlib', ('guid', {}), { 'doc': 'A library load event in", "'doc': 'The host running the process that read the file.", "known to be compatible with the given os software version.'}),", "'doc': 'An array of techniques used by the software.', }),", "of strings referenced within the function.', }), )), ('it:reveng:filefunc', {},", "it exhibits interesting behavior.'}), ('complexity', ('int', {}), { 'doc': 'The", "opts) def _splitCpe23(self, text): part = '' parts = []", "account on a host or network.' }), ('it:group', ('guid', {}),", "{}), { 'doc': 'The specific file containing code that deleted", "('time', {}), { 'doc': 'The time the thread was created.',", "('str', {'regex': r'^S-1-[0-59]-\\d{2}-\\d{8,10}-\\d{8,10}-\\d{8,10}-[1-9]\\d{3}$'}), { 'doc': 'A Microsoft Windows Security Identifier.',", "the registry. May or may not be the same :exe", "of Input (Classic Buffer Overflow)', }), ('desc', ('str', {}), {", "'A signature name within the namespace of an antivirus engine", "guid matching the function.'}), ('file', ('file:bytes', {}), { 'ro': True,", "('names', ('array', {'type': 'ou:name', 'uniq': True, 'sorted': True}), { 'doc':", "('it:prod:softver', {}), { 'doc': 'The operating system of the host.'", "{ 'doc': 'The specific file containing code that created the", "( # TODO map to an eventual risk:mitigation ('name', ('str',", "for the ATT&CK tactic.', }), ('desc', ('str', {}), { 'doc':", "data.', }), )), ('it:prod:soft', {}, ( ('name', ('str', {'lower': True,", "logging import synapse.exc as s_exc import synapse.lib.types as s_types import", "url.', }), ('url', ('inet:url', {}), { 'ro': True, 'doc': 'URL", "the account is registered.', }), ('domain', ('it:domain', {}), { 'doc':", "('loc', ('loc', {}), { 'doc': 'The geo-political location string for", "'doc': 'A string representing a named pipe.', }), ('it:dev:mutex', ('str',", "(e.g., malware sandbox) host.', }), ('it:exec:thread', ('guid', {}), { 'doc':", "'alert'), (80, 'emerg'), ) class ItModule(s_module.CoreModule): async def initCoreModule(self): self.model.form('it:dev:str').onAdd(self._onFormItDevStr)", "ATT&CK group.', }), ('techniques', ('array', {'type': 'it:mitre:attack:technique', 'uniq': True, 'sorted':", "of the host.', }), ('org', ('ou:org', {}), { 'doc': 'The", "('it:os:android:perm', {}), {'ro': True, 'doc': 'The android permission requested by", "to the executable of the process.', }), ('src:exe', ('file:path', {}),", "'doc': 'The time the library was unloaded.', }), ('path', ('file:path',", "from the registry was deleted.', }), ('reg', ('it:dev:regval', {}), {", "parts[6], 'edition': parts[7], 'language': parts[8], 'sw_edition': parts[9], 'target_sw': parts[10], 'target_hw':", "version of the rule evaluated as a match.'}), )), ('it:app:yara:procmatch',", "data from the registry. Typically the same host referenced in", "host.', }), )), ('it:dev:str', {}, ( ('norm', ('str', {'lower': True}),", "matched the YARA rule.'}), ('version', ('it:semver', {}), { 'doc': 'The", "'doc': 'An array of URLs that document the ATT&CK software.',", "annotate nodes included in this ATT&CK mitigation.', 'ex': 'cno.mitre.m0100', }),", "{ 'doc': 'The main process executing code that bound the", "number.', }), ('semver:minor', ('int', {}), { 'doc': 'Version minor number.',", "('it:reveng:impfunc', ('str', {'lower': 1}), { 'doc': 'A function from an", "software version.', }), ('cpe', ('it:sec:cpe', {}), { 'doc': 'The NIST", "('it:host', {}), { 'doc': 'The host running the process that", "of URLs that document the ATT&CK mitigation.', }), ('addresses', ('array',", "string.'}), ('target_sw', ('str', {'lower': True, 'strip': True}), { 'ro': True,", "('src:proc', ('it:exec:proc', {}), { 'doc': 'The process which created the", "{}), {'ro': True, 'doc': 'The operating system which the software", "'doc': 'The size of the memory map in bytes.', }),", "{ 'doc': 'The time the registry was written to.', }),", "}), ('client:port', ('inet:port', {}), { 'doc': 'The client port during", "model of the host.', }), ('serial', ('str', {}), { 'doc':", "}), ('hash:ntlm', ('hash:ntlm', {}), { 'doc': 'The NTLM password hash", "system.', }), ('it:host', ('guid', {}), { 'doc': 'A GUID that", "{}), { 'ro': True, 'doc': 'The string that the function", "'A developer selected integer constant.', }), ('it:dev:regkey', ('str', {}), {", "'doc': 'The specific file containing code that created the mutex.", "{ 'doc': 'The host running the process that deleted the", "{}), { 'doc': 'The registry key or value that was", "True}), { 'ro': True, 'doc': 'The \"sw_edition\" field from the", "{ 'doc': 'A GUID that represents a logical network.' }),", "map an ATT&CK group to a synapse ou:org.', }), ('name',", "{ 'doc': 'An instance of a host binding a listening", "'An instance of a host getting a registry key.', }),", "('time', {}), { 'doc': 'The file modification time.', }), ('atime',", "{}), { 'doc': 'An instance of a host reading a", "process executing code that created the named pipe.', }), ('host',", "broadcast by the app.'}), )), ('it:prod:softver', {}, ( ('software', ('it:prod:soft',", "'doc': 'The file distributed by the software.'}), ('path', ('file:path', {}),", "Brute force the version out of a string. Args: valu", "CPE 2.3 string.'}), ('target_sw', ('str', {'lower': True, 'strip': True}), {", "if present.'}), ('time', ('time', {}), { 'doc': 'The time the", "{ 'doc': 'The time the file was deleted.', }), ('path',", "the permission.'}), ('perm', ('it:os:android:perm', {}), {'ro': True, 'doc': 'The android", "be the same :exe specified in :proc, if present.'}), ('time',", "if subs is None: raise s_exc.BadTypeValu(valu=valu, name=self.name, mesg='Unable to parse", "'notice'), (40, 'warning'), (50, 'err'), (60, 'crit'), (70, 'alert'), (80,", "'doc': 'Version patch number.', }), ('semver:pre', ('str', {}), { 'doc':", "('file:bytes', {}), { 'doc': 'The file that was read.', }),", "on a specific antivirus signature.', }), ('it:av:prochit', ('guid', {}), {", "True}), { 'ro': True, 'doc': 'The \"part\" field from the", "{}), { 'doc': 'The time the named pipe was created.',", "this field may contain the current value for the technique.',", "{ 'doc': 'A library load event in a process.', }),", "event in a process.', }), ('it:exec:mmap', ('guid', {}), { 'doc':", "True, 'strip': True}), { 'ro': True, 'doc': 'The \"update\" field", "session.', }), ('client:host', ('it:host', {}), { 'doc': 'The host where", "}), ('place', ('geo:place', {}), { 'doc': 'The place where the", "version string into an integer to allow version ordering. Prerelease", "( ('host', ('it:host', {}), { 'ro': True, 'doc': 'Host serving", "a software product.'}), ('it:prod:softfile', ('comp', {'fields': ( ('soft', 'it:prod:softver'), ('file',", "which contains the signature.', }), ('name', ('str', {'lower': True}), {", "('it:fs:file', {}, ( ('host', ('it:host', {}), { 'doc': 'The host", "info = self.core.model.type('it:semver').norm(valu) subs = info.get('subs') return valu, subs except", ")), ('it:os:android:ilisten', {}, ( ('app', ('it:prod:softver', {}), {'ro': True, 'doc':", "('inet:user', {}), { 'doc': 'The username associated with the account',", "('created', ('time', {}), { 'doc': 'The time the thread was", "{ 'doc': 'The primary name for the ATT&CK tactic.', }),", "{}), {'ro': True, 'doc': 'The app software which broadcasts the", "'doc': 'The CWE description field.', 'ex': 'Buffer Copy without Checking", "'URL relevant for the software.', }), ('isos', ('bool', {}), {", "'The process which contains the thread.', }), ('created', ('time', {}),", "'doc': 'The command string used to launch the process, including", "('str', {'lower': True}), { 'ro': True, 'doc': 'The signature name.',", ")), ('it:reveng:function', {}, ( ('name', ('str', {}), { 'doc': 'The", "'The current version of the rule.'}), )), ('it:app:snort:hit', {}, (", "parts[11], 'other': parts[12], } return ':'.join(parts), {'subs': subs} class SemVer(s_types.Int):", "that documents the ATT&CK tactic.', }), ('tag', ('syn:tag', {}), {", "True, 'doc': 'Web account of the software author.', }), ('author:email',", "the ATT&CK technique.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url', {}),", "the hit.'}), ('sensor', ('it:host', {}), { 'doc': 'The sensor host", "{ 'doc': 'Software associated with this version instance.', }), ('software:name',", "the file was read.', }), ('path:dir', ('file:path', {}), { 'ro':", "owns/operates the network.', }), ('net4', ('inet:net4', {}), { 'doc': 'The", "r'^CWE-[0-9]{1,8}$'}), { 'doc': 'NIST NVD Common Weaknesses Enumeration Specification', 'ex':", "software.', 'disp': {'hint': 'text'}, }), ('desc:short', ('str', {'lower': True}), {", "attempting to extract version parts out of the string. Returns:", "{ 'doc': 'The primary name for the ATT&CK software.', }),", "executed the process. May be an actual or a virtual", "intent.'}), ('it:prod:softver', ('guid', {}), { 'doc': 'A specific version of", "{ 'doc': 'The file that was modified.', }), )), ('it:exec:reg:get',", "string specifying this software version', }), ('cves', ('array', {'type': 'it:sec:cve',", "file. May or may not be the same :exe specified", ")), ('it:av:filehit', {}, ( ('file', ('file:bytes', {}), { 'ro': True,", "}), )), ('it:reveng:filefunc', {}, ( ('function', ('it:reveng:function', {}), { 'ro':", "codeblock of the function.'}), ('rank', ('int', {}), { 'doc': 'The", "vers:norm and make it's normed valu prop = node.get('vers') if", "if name is available and set it if possible prop", "{ 'deprecated': True, 'doc': 'Email address of the sofware author.',", "the string. Returns: int, dict: The system normalized version integer", "('str', {}), { 'doc': 'The name of the YARA rule.'}),", "prerelease string.', }), ('semver:build', ('str', {}), { 'doc': 'Semver build", "'strip': True}), { 'ro': True, 'doc': 'The \"target_hw\" field from", "flow that caused the hit.'}), ('src:ipv4', ('inet:ipv4', {}), { 'doc':", "parts[5], 'update': parts[6], 'edition': parts[7], 'language': parts[8], 'sw_edition': parts[9], 'target_sw':", "synapse it:prod:soft.', }), ('name', ('str', {'strip': True}), { 'doc': 'The", "by the mitigation.', }), )), ('it:dev:int', {}, ()), ('it:dev:pipe', {},", "was deleted.', }), ('path:dir', ('file:path', {}), { 'ro': True, 'doc':", "{ 'doc': 'The software version is known to be compatible", "activity.'}), ('host', ('it:host', {}), { 'doc': 'The host on which", "}), )), ('it:exec:reg:get', {}, ( ('proc', ('it:exec:proc', {}), { 'doc':", "a synapse ou:org.', }), ('name', ('ou:name', {}), { 'doc': 'The", "'doc': 'The parent directory of the file path (parsed from", "field from the CPE 2.3 string.'}), ('sw_edition', ('str', {'lower': True,", "software.'}), ('path', ('file:path', {}), { 'doc': 'The default installation path", "final component of the file path (parsed from :path).', }),", "('loc', {}), { 'doc': 'The geo-political location string for the", "'An instance of a function in an executable.', }), ('it:reveng:funcstr',", "('it:host', {}), { 'ro': True, 'doc': 'Host serving a url.',", "{ 'doc': 'The inet:addr of the server when binding the", "valu.strip() if not valu: raise s_exc.BadTypeValu(valu=valu, name=self.name, mesg='No text left", "('time', {}), { 'doc': 'The time the file was read.',", "technique.', }), ('parent', ('it:mitre:attack:technique', {}), { 'doc': 'The parent ATT&CK", "{'hint': 'text'}, }), ('name', ('str', {}), { 'doc': 'The name", "('src:ipv6', ('inet:ipv6', {}), { 'doc': 'The source IPv6 address of", "code that bound the listening port.', }), ('host', ('it:host', {}),", "'doc': 'A Mitre ATT&CK Group ID.', 'ex': 'G0100', }), ('it:mitre:attack:tactic',", "ATT&CK tactic.', }), )), ('it:mitre:attack:technique', {}, ( ('name', ('str', {'strip':", "{ 'doc': 'The log messsage text.', }), ('severity', ('int', {'enums':", "the network.', }), ('org', ('ou:org', {}), { 'doc': 'The org", "given domain.', }), )), ('it:network', {}, ( ('name', ('str', {'lower':", "{}), { 'doc': 'The host that executed the process. May", "getting a registry key.', }), ('it:exec:reg:set', ('guid', {}), { 'doc':", "the group.', }), ('host', ('it:host', {}), { 'doc': 'The host", "}), ('client:ipv6', ('inet:ipv6', {}), { 'doc': 'The IPv6 of the", "matched the file.'}), ('file', ('file:bytes', {}), { 'ro': True, 'doc':", "'The snort rule text.', 'disp': {'hint': 'text'}, }), ('name', ('str',", "of the logon session.', }), ('client:host', ('it:host', {}), { 'doc':", "command line parameters.', 'disp': {'hint': 'text'}, }), ('pid', ('int', {}),", "time that the activity started.'}), ), }), ), 'forms': (", "notional host.', }), ('exe', ('file:bytes', {}), { 'doc': 'The file", "permission requested by the app.'}), )), ('it:prod:softos', {}, ( ('soft',", "when this version of the software was released.', }), ('semver',", "instance of a host binding a listening port.', }), ('it:fs:file',", "'doc': 'The time the registry was written to.', }), ('reg',", "('str', {'regex': r'^M[0-9]{4}$'}), { 'doc': 'A Mitre ATT&CK Mitigation ID.',", "read.', }), ('path:dir', ('file:path', {}), { 'ro': True, 'doc': 'The", "time the library was unloaded.', }), ('path', ('file:path', {}), {", "True}), { 'doc': 'Normalized version of the version string.', }),", "operating system which the software can run on.'}), )), ('it:os:android:ilisten',", "the logon originated.', }), )), ('it:hosturl', {}, ( ('host', ('it:host',", "'The software version that contains the library.'}), ('lib', ('it:prod:softver', {}),", "the account.', 'ex': '1001', }), ('posix:gecos', ('int', {}), { 'doc':", "}), ('isos', ('bool', {}), { 'doc': 'Set to True if", "a file from a filesystem.', }), ('it:exec:file:read', ('guid', {}), {", "'doc': 'True if the mmap is mapped with write permissions.',", "mutex string.', }), )), ('it:exec:pipe', {}, ( ('proc', ('it:exec:proc', {}),", "the ATT&CK software.', }), ('names', ('array', {'type': 'str', 'uniq': True,", "'file:bytes'), ('function', 'it:reveng:function'))}), { 'doc': 'An instance of a function", "{}), { 'doc': 'The file modification time.', }), ('atime', ('time',", "= s_version.parseVersionParts(valu) if subs is None: raise s_exc.BadTypeValu(valu=valu, name='bruteVersionStr', mesg='Unable", "('time', ('time', {}), { 'doc': 'The time that the YARA", "file.', }), ('host', ('it:host', {}), { 'doc': 'The host running", "mapped with read permissions.', }), ('perms:write', ('bool', {}), { 'doc':", "\"other\" field from the CPE 2.3 string.'}), )), ('it:sec:cwe', {},", "executable of the process.', }), ('src:exe', ('file:path', {}), { 'doc':", "}), ('it:mitre:attack:tactic', ('str', {'regex': r'^TA[0-9]{4}$'}), { 'doc': 'A Mitre ATT&CK", "logon session ended.', }), ('host', ('it:host', {}), { 'doc': 'The", "True, 'doc': 'The guid matching the function.'}), ('string', ('str', {}),", "URL that was requested.', }), ('client', ('inet:client', {}), { 'doc':", "{}), { 'ro': True, 'doc': 'The anti-virus product which contains", "2.3 string.'}), ('target_sw', ('str', {'lower': True, 'strip': True}), { 'ro':", "/ notional host.', }), ('exe', ('file:bytes', {}), { 'doc': 'The", "'doc': 'The software which distributes the file.'}), ('file', ('file:bytes', {}),", "parts. This normalizes a version string into an integer to", "subs.get('patch')) return valu, {'subs': subs} def _normPyInt(self, valu): if valu", "integer that increases with severity.', }), ('data', ('data', {}), {", "on this sub-technique.', }), ('tactics', ('array', {'type': 'it:mitre:attack:tactic', 'uniq': True,", "set it if possible prop = node.get('software') if prop: opts", "nodes included in this ATT&CK mitigation.', 'ex': 'cno.mitre.m0100', }), ('references',", "}), )), ('it:exec:thread', {}, ( ('proc', ('it:exec:proc', {}), { 'doc':", "'doc': 'The LM password hash value.', }), ('hash:ntlm', ('hash:ntlm', {}),", "{ 'doc': 'The complexity of the function.'}), ('funccalls', ('array', {'type':", "process that wrote to the file. Typically the same host", "developer-selected string.' }), ('it:dev:pipe', ('str', {}), { 'doc': 'A string", "('name', ('str', {'strip': True}), { 'doc': 'The primary name for", "library functions within the scope of the function.', }), ('strings',", "software product.'}), ('it:prod:softfile', ('comp', {'fields': ( ('soft', 'it:prod:softver'), ('file', 'file:bytes'))}),", "True}), { 'doc': 'Observed/variant names for this software version.', }),", "{}), { 'doc': 'The time the thread exited.', }), ('exitcode',", "('it:av:sig', {}), { 'ro': True, 'doc': 'The signature that the", "('server', ('inet:server', {}), { 'doc': 'The inet:addr of the server", "the process that requested the URL. Typically the same host", "{'regex': r'^G[0-9]{4}$'}), { 'doc': 'A Mitre ATT&CK Group ID.', 'ex':", "'it:mitre:attack:tactic', 'uniq': True, 'sorted': True, 'split': ','}), { 'doc': 'An", "{ 'doc': 'An instance of a YARA rule match to", "process.', }), ('src:exe', ('file:path', {}), { 'doc': 'The path to", "'An array of groups that the account is a member", "{}, ( ('norm', ('str', {'lower': True}), { 'doc': 'Lower case", "that caused the hit.'}), ('sensor', ('it:host', {}), { 'doc': 'The", "'text'}, }), ('pid', ('int', {}), { 'doc': 'The process ID.',", "('it:prod:softos', ('comp', {'fields': ( ('soft', 'it:prod:softver'), ('os', 'it:prod:softver'))}), { 'doc':", "('it:reveng:function', {}, ( ('name', ('str', {}), { 'doc': 'The name", "bound the listening port. May or may not be the", "technique.', }), ('isnow', ('it:mitre:attack:technique', {}), { 'doc': 'If deprecated, this", "True}), { 'ro': True, 'doc': 'The \"other\" field from the", "return value for the thread.', }), ('src:proc', ('it:exec:proc', {}), {", "_onFormMakeDevStr(self, node): pprop = node.ndef[1] await node.snap.addNode('it:dev:str', pprop) async def", "host.' }), ('it:exec:file:add', ('guid', {}), { 'doc': 'An instance of", "'The specific file containing code that wrote to the file.", "{ 'doc': 'The authentication domain where the group is registered.',", "'doc': 'The software version that contains the library.'}), ('lib', ('it:prod:softver',", "a host reading a file from a filesystem.', }), ('it:exec:file:write',", "process which created the thread.', }), ('src:thread', ('it:exec:thread', {}), {", "}), ('loc', ('loc', {}), { 'doc': 'The geo-political location string", "'doc': 'A string representing a mutex.', }), ('it:dev:int', ('int', {}),", "that created the new file. May or may not be", "from :path).', }), ('file', ('file:bytes', {}), { 'doc': 'The file", "Mitre ATT&CK Technique ID.', 'ex': 'T1548', }), ('it:mitre:attack:mitigation', ('str', {'regex':", "executing code that requested the URL.', }), ('host', ('it:host', {}),", "'Software associated with this version instance.', }), ('software:name', ('str', {'lower':", "('desc:short', ('str', {'lower': True}), { 'doc': 'A short description of", "'sorted': True}), { 'doc': 'An array of alternate names for", "main process executing code that created the named pipe.', }),", "}), ('it:exec:url', ('guid', {}), { 'doc': 'An instance of a", "('it:host', {}), { 'doc': 'The host that the account logged", "r'^TA[0-9]{4}$'}), { 'doc': 'A Mitre ATT&CK Tactic ID.', 'ex': 'TA0040',", "{'fields': (('file', 'file:bytes'), ('sig', 'it:av:sig'))}), { 'doc': 'A file that", "that was read.', }), )), ('it:exec:file:write', {}, ( ('proc', ('it:exec:proc',", "('it:prod:soft', {}), { 'doc': 'Used to map an ATT&CK software", "{}), { 'doc': 'A specific version of a software product.'}),", "True, 'doc': 'The android permission requested by the app.'}), )),", "('org', ('ou:org', {}), { 'doc': 'The org that operates the", "'The destination port of the flow that caused the hit.'}),", "('inet:port', {}), { 'doc': 'The bound (listening) TCP port.' }),", "form the semver properly or bruteforce parts try: valu, subs", "product which contains the signature.', }), ('name', ('str', {'lower': True}),", "'The time the file was written to/modified.', }), ('path', ('file:path',", "{ 'doc': 'The host thread which caused the activity.'}), ('host',", "True, 'strip': True}), { 'doc': 'The name of the software", "('it:prod:softver', {}), {'ro': True, 'doc': 'The software version that contains", "{}), { 'doc': 'The last known ipv4 address for the", "the app.'}), )), ('it:prod:softver', {}, ( ('software', ('it:prod:soft', {}), {", ")), ('it:mitre:attack:software', {}, ( ('software', ('it:prod:soft', {}), { 'doc': 'Used", "}), ('latlong', ('geo:latlong', {}), { 'doc': 'The last known location", "('it:os:ios:idfa', {}, ()), ('it:os:android:aaid', {}, ()), ('it:os:android:perm', {}, ()), ('it:os:android:intent',", "}), ('net4', ('inet:net4', {}), { 'doc': 'The optional contiguous IPv4", "directory.\", 'ex': '/home/visi', }), ('posix:shell', ('file:path', {}), { 'doc': \"The", "'System normalized semantic version number.', }), ('semver:major', ('int', {}), {", "file from a filesystem.', }), ('it:exec:file:write', ('guid', {}), { 'doc':", "{ 'doc': 'The name of the YARA rule.'}), ('author', ('ps:contact',", "requesting a URL.', }), ('it:exec:bind', ('guid', {}), { 'doc': 'An", "software which listens for the android intent.'}), ('intent', ('it:os:android:intent', {}),", "('time', {}), { 'doc': 'The time the file was written", "string for the node.', }), ('os', ('it:prod:softver', {}), { 'doc':", "matching the function.'}), ('string', ('str', {}), { 'ro': True, 'doc':", "{}), { 'doc': 'The bound (listening) TCP port.' }), )),", "primary name for the ATT&CK mitigation.', }), ('desc', ('str', {'strip':", "'doc': 'A file is distributed by a specific software version.'}),", "from the CPE 2.3 string.'}), )), ('it:sec:cwe', {}, ( ('name',", "specific file containing code that created the mutex. May or", "{}, ()), ('it:os:ios:idfa', {}, ()), ('it:os:android:aaid', {}, ()), ('it:os:android:perm', {},", "{'fields': (('host', 'it:host'), ('url', 'inet:url'))}), { 'doc': 'A url hosted", "number.', }), ('semver:pre', ('str', {}), { 'doc': 'Semver prerelease string.',", "process that read the registry. Typically the same host referenced", "'doc': 'The signature that the file triggered on.' }), ('sig:name',", "file containing code that read the registry. May or may", "(parsed from :path).', }), ('path:ext', ('str', {'lower': True, 'strip': True}),", "True}), { 'doc': 'Lower case normalized version of the it:dev:str.',", "'The specific file containing code that deleted data from the", "code that read the file.', }), ('host', ('it:host', {}), {", "{'subs': subs} def repr(self, valu): major, minor, patch = s_version.unpackVersion(valu)", "}), ('server:port', ('inet:port', {}), { 'doc': 'The bound (listening) TCP", "field from the CPE 2.3 string.'}), ('vendor', ('ou:name', {}), {", "normalized version of the it:dev:str.', }), )), ('it:sec:cve', {}, (", "'The source port of the flow that caused the hit.'}),", "nodes included in this ATT&CK group ID.', 'ex': 'cno.mitre.g0100', }),", "the POSIX account.', }), ('posix:home', ('file:path', {}), { 'doc': \"The", "a filesystem.', }), ('it:exec:reg:get', ('guid', {}), { 'doc': 'An instance", "{}), { 'doc': 'The log messsage text.', }), ('severity', ('int',", "the signature.' }), )), ('it:auth:passwdhash', {}, ( ('salt', ('hex', {}),", "process which caused the activity.'}), ('thread', ('it:exec:thread', {}), { 'doc':", "{ 'doc': 'The path where the file was read.', }),", "rule.'}), )), ('it:app:snort:hit', {}, ( ('rule', ('it:app:snort:rule', {}), { 'doc':", "of URLs that document the ATT&CK technique.', }), ('parent', ('it:mitre:attack:technique',", "process. For example, rundll32.exe may be considered the \"main\" executable", "True}), { 'doc': 'A description of the ATT&CK software.', 'disp':", "file containing code that bound the listening port. May or", "('int', {}), { 'doc': 'Version minor number.', }), ('semver:patch', ('int',", "technique.', 'ex': 'cno.mitre.t0100', }), ('references', ('array', {'type': 'inet:url', 'uniq': True}),", "CPE 2.3 string specifying this software version', }), ('cves', ('array',", "}), ('it:dev:pipe', ('str', {}), { 'doc': 'A string representing a", "string into its component parts. This normalizes a version string", "to brute force version parts out of the string') if", "value of the registry key, if the value is an", "trigger.'}), ('dst:ipv4', ('inet:ipv4', {}), { 'doc': 'The destination IPv4 address", "s_version.unpackVersion(valu) valu = s_version.packVersion(major, minor, patch) subs = {'major': major,", "('it:prod:soft', {}, ( ('name', ('str', {'lower': True, 'strip': True}), {", "('time', ('time', {}), { 'doc': 'The time the data from", "raise s_exc.BadTypeValu(valu=valu, name=self.name, mesg='Cannot norm a negative integer as a", "Windows registry key.', 'ex': 'HKEY_LOCAL_MACHINE\\\\SOFTWARE\\\\Microsoft\\\\Windows\\\\CurrentVersion\\\\Run', }), ('it:dev:regval', ('guid', {}), {", "of the file path (parsed from :path).', }), ('file', ('file:bytes',", "Mitre ATT&CK Mitigation ID.', 'ex': 'M1036', }), ('it:mitre:attack:software', ('str', {'regex':", "identifier.', }), ('it:app:yara:match', ('comp', {'fields': (('rule', 'it:app:yara:rule'), ('file', 'file:bytes'))}), {", "description of the ATT&CK mitigation.', 'disp': {'hint': 'text'}, }), ('url',", "'ro': True, 'doc': 'The file that matched the YARA rule.'}),", "'The time that the activity started.'}), ), }), ), 'forms':", "'split': ','}), { 'doc': 'An array of ATT&CK technique IDs", "time the file was deleted.', }), ('path', ('file:path', {}), {", "('org', ('ou:org', {}), { 'doc': 'Used to map an ATT&CK", "'The complexity of the function.'}), ('funccalls', ('array', {'type': 'it:reveng:filefunc'}), {", "synapse tag used to annotate nodes included in this ATT&CK", "YARA rule that matched the file.'}), ('file', ('file:bytes', {}), {", "'doc': 'The specific file containing code that requested the URL.", "technique.', }), ('desc', ('str', {'strip': True}), { 'doc': 'A description", "signature.', }), ('it:av:prochit', ('guid', {}), { 'doc': 'An instance of", "{ 'doc': 'Set to True if the software is a", "part = '' continue part += c except StopIteration: parts.append(part)", "'An array of URLs that document the ATT&CK software.', }),", "\"language\" field from the CPE 2.3 string.'}), ('sw_edition', ('str', {'lower':", "make it's normed valu prop = node.get('vers') if not prop:", "'A URL linking this CVE to a full description.', }),", "process triggering an alert on a specific antivirus signature.' }),", "'doc': 'A software version contains a library software version.'}), ('it:prod:softos',", "{}), { 'doc': 'The Windows registry key.', }), ('str', ('it:dev:str',", "}), ('it:dev:str', ('str', {}), { 'doc': 'A developer-selected string.' }),", "True if the software is a library.'}), )), ('it:adid', {},", "the file path (parsed from :path).', }), ('file', ('file:bytes', {}),", "raise s_exc.BadTypeValu(valu=valu, name=self.name, mesg='Cannot norm a integer larger than 1152921504606846975", "of.', }), ('ipv4', ('inet:ipv4', {}), { 'doc': 'The last known", "{ 'doc': 'The (optional) hex encoded salt value used to", "'doc': 'The IPv6 of the client during the URL retrieval..'", "('hash:sha512', {}), { 'doc': 'The SHA512 password hash value.', }),", "True: c = next(genr) if c == '\\\\': c +=", "file that was modified.', }), )), ('it:exec:reg:get', {}, ( ('proc',", "the rule evaluated as a match.'}), )), ('it:app:yara:procmatch', {}, (", "('geo:latlong', {}), { 'doc': 'The last known location for the", "to see if name is available and set it if", "('function', ('it:reveng:function', {}), { 'ro': True, 'doc': 'The guid matching", "library.'}), )), ('it:adid', {}, ()), ('it:os:ios:idfa', {}, ()), ('it:os:android:aaid', {},", "name=self.name, mesg='Unable to parse string as a semver.') valu =", "instance of a host reading a file from a filesystem.',", "_onFormItDevStr(self, node): await node.set('norm', node.ndef[1]) async def _onFormMakeDevStr(self, node): pprop", "out of the string. Returns: int, dict: The system normalized", "contains the thread.', }), ('created', ('time', {}), { 'doc': 'The", "('it:cmd', {}), { 'doc': 'The command string used to launch", "requested.', }), ('url', ('inet:url', {}), { 'doc': 'The URL that", "'The string that the function references.'}), )), ('it:reveng:impfunc', {}, ()),", "code that deleted the file.', }), ('host', ('it:host', {}), {", "if nodes: name = nodes[0].get('name') if name: await node.set('software:name', name)", "possible prop = node.get('software') if prop: opts = {'vars': {'soft':", "('it:reveng:impfunc', {}, ()), ), } name = 'it' return ((name,", "file was deleted.', }), ('path', ('file:path', {}), { 'doc': 'The", "'doc': 'The start time for the process.', }), ('exited', ('time',", "that was deleted.', }), )), ('it:exec:file:read', {}, ( ('proc', ('it:exec:proc',", "of the flow that caused the hit.'}), ('src:ipv6', ('inet:ipv6', {}),", "'NIST NVD Common Weaknesses Enumeration Specification', 'ex': 'CWE-120', }), ('it:mitre:attack:status',", "{'ro': True, 'doc': 'The android app which requests the permission.'}),", "{}), { 'doc': 'The path for the file.', }), ('path:dir',", "mapped view of a file.', }), ('hash:sha256', ('hash:sha256', {}), {", "which created this thread.', }), )), ('it:exec:loadlib', {}, ( ('proc',", "created by a process at runtime.', }), ('it:exec:url', ('guid', {}),", "'doc': 'The time the registry was read.', }), ('reg', ('it:dev:regval',", "that deleted the file. May or may not be the", "{'type': 'it:reveng:filefunc'}), { 'doc': 'Other function calls within the scope", "existing file.', }), ('host', ('it:host', {}), { 'doc': 'The host", "may not be the same :exe specified in :proc, if", "this version instance.', }), ('software:name', ('str', {'lower': True, 'strip': True}),", "it:dev:str from version str await node.snap.addNode('it:dev:str', prop) # form the", "was written to.', }), ('reg', ('it:dev:regval', {}), { 'doc': 'The", "('it:host', {}, ( ('name', ('it:hostname', {}), { 'doc': 'The name", "('comp', {'fields': ( ('soft', 'it:prod:softver'), ('os', 'it:prod:softver'))}), { 'doc': 'The", "':'.join(parts), {'subs': subs} class SemVer(s_types.Int): ''' Provides support for parsing", "ATT&CK technique.', }), ('status', ('it:mitre:attack:status', {}), { 'doc': 'The status", "path where the file was created.', }), ('path:dir', ('file:path', {}),", "{'regex': r'^TA[0-9]{4}$'}), { 'doc': 'A Mitre ATT&CK Tactic ID.', 'ex':", "the CPE 2.3 string.'}), ('language', ('str', {'lower': True, 'strip': True}),", "}), ('perms:read', ('bool', {}), { 'doc': 'True if the mmap", "modification time.', }), ('atime', ('time', {}), { 'doc': 'The file", "def getModelDefs(self): modl = { 'ctors': ( ('it:semver', 'synapse.models.infotech.SemVer', {},", "'doc': 'An instance of a host adding a file to", "of ATT&CK technique IDs addressed by the mitigation.', }), )),", "}), ('user', ('inet:user', {}), { 'doc': 'The owner of the", ")), ('it:fs:file', {}, ( ('host', ('it:host', {}), { 'doc': 'The", "('app', 'it:prod:soft'), ('intent', 'it:os:android:intent'))}), { 'doc': 'The given software listens", "'doc': 'The main process executing code that created the new", "string.', }), )), ('it:exec:url', {}, ( ('proc', ('it:exec:proc', {}), {", "'A developer-selected string.' }), ('it:dev:pipe', ('str', {}), { 'doc': 'A", "await node.set('norm', node.ndef[1]) async def _onFormMakeDevStr(self, node): pprop = node.ndef[1]", "'doc': 'The authentication domain where the account is registered.', }),", "file.', }), ('group', ('inet:user', {}), { 'doc': 'The group owner", "('inet:ipv6', {}), { 'doc': 'The source IPv6 address of the", "author of the YARA rule.'}), ('version', ('it:semver', {}), { 'doc':", "('ou:name', {}), { 'doc': 'The primary name for the ATT&CK", "prop = node.get('software') if prop: opts = {'vars': {'soft': prop}}", "{ 'doc': 'A YARA rule unique identifier.', }), ('it:app:yara:match', ('comp',", "('it:domain', {}), { 'doc': 'The authentication domain that the host", "that caused the hit.'}), ('src:ipv4', ('inet:ipv4', {}), { 'doc': 'The", "{}), { 'doc': 'The serial number of the host.', }),", "version.'}), ('it:prod:softos', ('comp', {'fields': ( ('soft', 'it:prod:softver'), ('os', 'it:prod:softver'))}), {", "name (parsed from :path).', }), ('path:base', ('file:base', {}), { 'ro':", "retrieval..' }), )), ('it:exec:bind', {}, ( ('proc', ('it:exec:proc', {}), {", "parts[2], 'vendor': parts[3], 'product': parts[4], 'version': parts[5], 'update': parts[6], 'edition':", "snort rule unique identifier.', }), ('it:app:snort:hit', ('guid', {}), { 'doc':", "('it:hostname', {}, ()), ('it:host', {}, ( ('name', ('it:hostname', {}), {", "('cmd', ('it:cmd', {}), { 'doc': 'The command string used to", "bytes.', }), ('perms:read', ('bool', {}), { 'doc': 'True if the", "is expected to start with \"cpe:2.3:\"' raise s_exc.BadTypeValu(valu=valu, mesg=mesg) text,", "NVD Common Weaknesses Enumeration Specification', 'ex': 'CWE-120', }), ('it:mitre:attack:status', ('str',", "('host', ('it:host', {}), { 'ro': True, 'doc': 'Host serving a", "{ 'doc': 'The thread which created this thread.', }), )),", "levels are represented as integers, with a max width of", "c = next(genr) if c == '\\\\': c += next(genr)", "'doc': 'A GUID that represents a logical network.' }), ('it:domain',", "{'lower': True, 'strip': True}), { 'doc': 'The name of the", "{ 'doc': 'The exit code or return value for the", "def initCoreModule(self): self.model.form('it:dev:str').onAdd(self._onFormItDevStr) self.model.form('it:dev:pipe').onAdd(self._onFormMakeDevStr) self.model.form('it:dev:mutex').onAdd(self._onFormMakeDevStr) self.model.form('it:dev:regkey').onAdd(self._onFormMakeDevStr) self.model.prop('it:prod:softver:arch').onSet(self._onPropSoftverArch) self.model.prop('it:prod:softver:vers').onSet(self._onPropSoftverVers) self.model.prop('it:prod:softver:software').onSet(self._onPropSoftverSoft) def", "containing code that requested the URL. May or may not", "integer and a subs dictionary. ''' try: valu, info =", "{ 'doc': 'The file that was deleted.', }), )), ('it:exec:file:read',", "'it:os:android:perm'))}), { 'doc': 'The given software requests the android permission.'}),", "('time', ('time', {}), { 'doc': 'The time the registry was", "the host.', }), ('ctime', ('time', {}), { 'doc': 'The file", "file extension of the file name (parsed from :path).', }),", "'doc': 'The time the library was loaded.', }), ('unloaded', ('time',", "('time', ('time', {}), { 'doc': 'The time the URL was", "into an integer to allow version ordering. Prerelease information is", "{ 'doc': 'A description of the ATT&CK technique.', 'disp': {'hint':", "{}), { 'doc': 'The URL that was requested.', }), ('client',", "JSON record of the log event.', }), )), ('it:domain', {},", "file to a filesystem.', }), ('it:exec:reg:get', ('guid', {}), { 'doc':", "arbitrary pre-release version into a integer value Major, minor and", "'An instance of a password hash.', }), ('it:exec:proc', ('guid', {}),", "}), ('hash:sha1', ('hash:sha1', {}), { 'doc': 'The SHA1 password hash", "valu < 0: raise s_exc.BadTypeValu(valu=valu, name=self.name, mesg='Cannot norm a negative", "{}), { 'doc': 'The time the URL was requested.', }),", "{ 'doc': 'Semver build string.', }), ('url', ('inet:url', {}), {", "{}), { 'ro': True, 'doc': 'The YARA rule that matched", "which authored the software.', }), ('author:acct', ('inet:web:acct', {}), { 'deprecated':", "}), ('group', ('inet:user', {}), { 'doc': 'The group owner of", "scope of the function.', }), ('strings', ('array', {'type': 'it:dev:str', 'uniq':", "('int', {}), { 'doc': 'The base memory address where the", "}), ('host', ('it:host', {}), { 'doc': 'The host where the", "('str', {}), { 'doc': 'A Windows registry key.', 'ex': 'HKEY_LOCAL_MACHINE\\\\SOFTWARE\\\\Microsoft\\\\Windows\\\\CurrentVersion\\\\Run',", "'ro': True, 'doc': 'The YARA rule that matched the file.'}),", "'The URL that documents the ATT&CK tactic.', }), ('tag', ('syn:tag',", "process where the library was loaded.', }), ('va', ('int', {}),", "associated with the account', }), ('contact', ('ps:contact', {}), { 'doc':", "('version', ('str', {'lower': True, 'strip': True}), { 'ro': True, 'doc':", ")), ('it:logon', {}, ( ('time', ('time', {}), { 'doc': 'The", "s_types import synapse.lib.module as s_module import synapse.lib.version as s_version logger", "{}), { 'doc': 'A Windows registry key.', 'ex': 'HKEY_LOCAL_MACHINE\\\\SOFTWARE\\\\Microsoft\\\\Windows\\\\CurrentVersion\\\\Run', }),", "}), ('it:prod:soft', ('guid', {}), { 'doc': 'A arbitrary, unversioned software", "'doc': 'The primary name for the ATT&CK mitigation.', }), ('desc',", "to the executable which started the process.', }), ('src:proc', ('it:exec:proc',", "can run on.'}), )), ('it:os:android:ilisten', {}, ( ('app', ('it:prod:softver', {}),", "}), ('groups', ('array', {'type': 'it:group'}), { 'doc': 'Groups that are", "a integer larger than 1152921504606846975 as a semver.') major, minor,", "the CPE 2.3 string.'}), ('product', ('str', {'lower': True, 'strip': True}),", "class SemVer(s_types.Int): ''' Provides support for parsing a semantic version", "created by a process at runtime.', }), ('it:exec:pipe', ('guid', {}),", "deleted the file. Typically the same host referenced in :proc,", "'doc': 'The time the file was read.', }), ('path', ('file:path',", "process owner.', }), ('path', ('file:path', {}), { 'doc': 'The path", "'doc': 'The URL that documents the ATT&CK technique.', }), ('tag',", "'ro': True, 'doc': 'The string that the function references.'}), )),", "with severity.', }), ('data', ('data', {}), { 'doc': 'A raw", "('file:path', {}), { 'doc': 'The path where the file was", "( ('soft', 'it:prod:softver'), ('file', 'file:bytes'))}), { 'doc': 'A file is", "}), ('vers', ('it:dev:str', {}), { 'doc': 'Version string associated with", "Identifier of the group.', }), )), ('it:logon', {}, ( ('time',", "'doc': 'The IPv4 address specified to bind().' }), ('server:ipv6', ('inet:ipv6',", "_normPyStr(self, valu): valu = valu.strip() if not valu: raise s_exc.BadTypeValu(valu=valu,", "to.', }), ('reg', ('it:dev:regval', {}), { 'doc': 'The registry key", "that was deleted.', }), )), ('it:app:snort:rule', {}, ( ('text', ('str',", "no cover raise except Exception: logger.exception('Failed to brute force version", "'An array of ATT&CK tactics that include this technique.', }),", "description of the ATT&CK group.', 'disp': {'hint': 'text'}, }), ('url',", "the group.', }), )), ('it:logon', {}, ( ('time', ('time', {}),", "Mitre ATT&CK Tactic ID.', 'ex': 'TA0040', }), ('it:mitre:attack:technique', ('str', {'regex':", "'doc': 'The host containing the file.', }), ('path', ('file:path', {}),", "CPE 2.3 string.'}), ('product', ('str', {'lower': True, 'strip': True}), {", "}), )), ('it:exec:reg:set', {}, ( ('proc', ('it:exec:proc', {}), { 'doc':", "this CVE to a full description.', }), ('references', ('array', {'type':", "force version parts out of the string') if subs: valu", "}), ('size', ('int', {}), { 'doc': 'The size of the", "runtime.', }), ('it:exec:pipe', ('guid', {}), { 'doc': 'A named pipe", "'An array of URLs that document the CVE ID.', }),", "name=self.name, mesg='No text left after stripping whitespace') subs = s_version.parseSemver(valu)", "node): await node.set('norm', node.ndef[1]) async def _onFormMakeDevStr(self, node): pprop =", "('ou:org', {}), { 'doc': 'Used to map an ATT&CK group", "example, rundll32.exe may be considered the \"main\" executable for DLLs", "the POSIX account's default shell.\", 'ex': '/bin/bash', }), ('windows:sid', ('it:os:windows:sid',", "{ 'doc': 'Version minor number.', }), ('semver:patch', ('int', {}), {", "('it:app:snort:hit', {}, ( ('rule', ('it:app:snort:rule', {}), { 'doc': 'The snort", "'The name of the host or system.', }), ('desc', ('str',", "that document the ATT&CK software.', }), ('techniques', ('array', {'type': 'it:mitre:attack:technique',", "('it:app:yara:rule', {}), { 'doc': 'The YARA rule that matched the", "node.get('arch') if prop: await node.snap.addNode('it:dev:str', prop) async def _onPropSoftverVers(self, node,", "signature.', }), )), ('it:av:prochit', {}, ( ('proc', ('it:exec:proc', {}), {", "hit.'}), ('dst:port', ('inet:port', {}), { 'doc': 'The destination port of", "{ 'doc': 'A URL linking this CVE to a full", "a full description.', }), ('references', ('array', {'type': 'inet:url', 'uniq': True}),", "('guid', {}), { 'doc': 'A thread executing in a process.',", "('file', ('file:bytes', {}), { 'doc': 'The library file that was", "mesg=mesg) subs = { 'part': parts[2], 'vendor': parts[3], 'product': parts[4],", "{ 'doc': 'An array of techniques used by the software.',", "{}), { 'doc': 'The named pipe string.', }), )), ('it:exec:url',", "name, info, opts) def _splitCpe23(self, text): part = '' parts", "width of 20 bits. The comparable integer value representing the", "the flow that caused the hit.'}), ('dst', ('inet:addr', {}), {", "the port was bound.', }), ('server', ('inet:server', {}), { 'doc':", "}), ('mtime', ('time', {}), { 'doc': 'The file modification time.',", "field from the CPE 2.3 string.'}), ('update', ('str', {'lower': True,", "{}), { 'doc': 'A GUID that represents a group on", "'A string representing a named pipe.', }), ('it:dev:mutex', ('str', {}),", "read the registry.', }), ('host', ('it:host', {}), { 'doc': 'The", "('guid', {}), { 'doc': 'An instance of a YARA rule", "rule.'}), ('time', ('time', {}), { 'doc': 'The time that the", ")), ('it:reveng:funcstr', {}, ( ('function', ('it:reveng:function', {}), { 'ro': True,", "a host or system.', }), ('it:sec:cve', ('str', {'lower': True, 'regex':", "is an operating system.'}), ('islib', ('bool', {}), { 'doc': 'Set", "registry key.', }), ('it:app:yara:rule', ('guid', {}), { 'doc': 'A YARA", "primary name for the ATT&CK tactic.', }), ('desc', ('str', {}),", "normalized version integer and a subs dictionary. ''' try: valu,", "eventual risk:mitigation ('name', ('str', {'strip': True}), { 'doc': 'The primary", "of the rule.'}), ('enabled', ('bool', {}), { 'doc': 'The rule", "destination address of the trigger.'}), ('dst:ipv4', ('inet:ipv4', {}), { 'doc':", "messsage text.', }), ('severity', ('int', {'enums': loglevels}), { 'doc': 'A", "'An instance of a process triggering an alert on a", "the ATT&CK tactic.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url', {}),", "('time', ('time', {}), { 'doc': 'The time of the network", "2.3 string.'}), ('target_hw', ('str', {'lower': True, 'strip': True}), { 'ro':", "flow that caused the hit.'}), ('time', ('time', {}), { 'doc':", "( ('exe', ('file:bytes', {}), { 'doc': 'The executable file which", "was loaded from.', }), ('file', ('file:bytes', {}), { 'doc': 'The", "process that created the named pipe. Typically the same host", "{}), { 'doc': 'The snort rule that matched the file.'}),", "('guid', {}), { 'doc': 'A GUID representing an individual log", "sandbox) host.', }), ('it:exec:thread', ('guid', {}), { 'doc': 'A thread", "('src:proc', ('it:exec:proc', {}), { 'doc': 'An external process which created", "def _onFormItDevStr(self, node): await node.set('norm', node.ndef[1]) async def _onFormMakeDevStr(self, node):", ")), ('it:exec:url', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The", "('str', {}), { 'doc': 'Notes concerning the function.'}), ('impcalls', ('array',", "'current', }), ('it:mitre:attack:group', ('str', {'regex': r'^G[0-9]{4}$'}), { 'doc': 'A Mitre", "('it:os:android:intent', {}), {'ro': True, 'doc': 'The android intent which is", "text, info = s_types.Str._normPyStr(self, valu) parts = self._splitCpe23(text) if len(parts)", "permission.'}), ('it:os:android:ilisten', ('comp', {'fields': ( ('app', 'it:prod:soft'), ('intent', 'it:os:android:intent'))}), {", "the new file. Typically the same host referenced in :proc,", "{ 'doc': 'The LM password hash value.', }), ('hash:ntlm', ('hash:ntlm',", "'The specific file containing code that read the file. May", "status to be used for YARA evaluation engines.'}), )), ('it:app:yara:match',", "iter(text) try: while True: c = next(genr) if c ==", "of the function.', }), ('strings', ('array', {'type': 'it:dev:str', 'uniq': True}),", "{}), { 'doc': 'The mutex string.', }), )), ('it:exec:pipe', {},", "to allow version ordering. Prerelease information is disregarded for integer", ")), ('it:hosturl', {}, ( ('host', ('it:host', {}), { 'ro': True,", "the software.', }), )), ('it:mitre:attack:mitigation', {}, ( # TODO map", "'The host running the process that created the mutex. Typically", "'The android intent which is listened for by the app.'}),", "'A url hosted on or served by a host or", "version that contains the library.'}), ('lib', ('it:prod:softver', {}), {'ro': True,", "the file.'}), ('file', ('file:bytes', {}), {'ro': True, 'doc': 'The file", "{}), { 'doc': 'The time the file was read.', }),", ")), ('it:app:yara:procmatch', {}, ( ('rule', ('it:app:yara:rule', {}), { 'doc': 'The", "('time', {}), { 'doc': 'The time that the activity started.'}),", "{ 'doc': 'The command string used to launch the process,", "full description.', }), ('parents', ('array', {'type': 'it:sec:cwe', 'uniq': True, 'sorted':", "nodes included in this ATT&CK tactic.', 'ex': 'cno.mitre.ta0100', }), ('references',", "{}, ()), ('it:os:android:reqperm', {}, ( ('app', ('it:prod:softver', {}), {'ro': True,", "'Observed/variant names for this software version.', }), ('cpe', ('it:sec:cpe', {}),", "'ro': True, 'doc': 'The \"sw_edition\" field from the CPE 2.3", "{}, ( ('rule', ('it:app:yara:rule', {}), { 'doc': 'The YARA rule", "the registry. Typically the same host referenced in :proc, if", "name for the ATT&CK mitigation.', }), ('desc', ('str', {'strip': True}),", "technique.', }), ('status', ('it:mitre:attack:status', {}), { 'doc': 'The status of", "the flow that caused the hit.'}), ('time', ('time', {}), {", "('guid', {}), { 'doc': 'A file on a host.' }),", "'ex': 'CWE-120', }), ('it:mitre:attack:status', ('str', {'enums': 'current,deprecated,withdrawn'}), { 'doc': 'A", "('inet:user', {}), { 'doc': 'The owner of the file.', }),", "('file:path', {}), { 'doc': 'The path that the library was", "Android intent.'}), ('it:prod:softver', ('guid', {}), { 'doc': 'A specific version", "('perm', 'it:os:android:perm'))}), { 'doc': 'The given software requests the android", "running the process that deleted the file. Typically the same", "'doc': 'A list of CVEs that apply to this software", "'A GUID representing an individual log event.', 'interfaces': ('it:host:activity',), }),", "{}), { 'doc': 'The (optional) hex encoded salt value used", "('file:bytes', {}), { 'doc': 'The file that was created.', }),", "'ex': 'M1036', }), ('it:mitre:attack:software', ('str', {'regex': r'^S[0-9]{4}$'}), { 'doc': 'A", "}), ('author', ('ps:contact', {}), { 'doc': 'The contact information of", "{ 'doc': 'A NIST CPE 2.3 Formatted String', }), ),", "'The main process executing code that read the registry.', }),", "a URL.', }), ('it:exec:bind', ('guid', {}), { 'doc': 'An instance", "subs.get('minor', 0), subs.get('patch', 0)) return valu, subs async def _onFormItDevStr(self,", "'doc': 'An android advertising identification string.'}), ('it:os:android:perm', ('str', {}), {", "of groups that the account is a member of.', }),", "named pipe.', }), ('it:dev:mutex', ('str', {}), { 'doc': 'A string", "'doc': 'The time the thread exited.', }), ('exitcode', ('int', {}),", "'strip': True}), { 'ro': True, 'doc': 'The \"target_sw\" field from", "ATT&CK Technique ID.', 'ex': 'T1548', }), ('it:mitre:attack:mitigation', ('str', {'regex': r'^M[0-9]{4}$'}),", "CPE 2.3 string.'}), ('version', ('str', {'lower': True, 'strip': True}), {", "base memory address where the library was loaded in the", "{ 'doc': 'A Mitre ATT&CK Technique ID.', 'ex': 'T1548', }),", "{'ro': True, 'doc': 'The software which distributes the file.'}), ('file',", "'doc': 'The \"language\" field from the CPE 2.3 string.'}), ('sw_edition',", "name.', }), ('sig:soft', ('it:prod:soft', {}), { 'ro': True, 'doc': 'The", "tag used to annotate nodes included in this ATT&CK group", "('isnow', ('it:mitre:attack:technique', {}), { 'doc': 'If deprecated, this field may", "'doc': 'A file that triggered an alert on a specific", "URL retrieval..' }), )), ('it:exec:bind', {}, ( ('proc', ('it:exec:proc', {}),", "( ('rule', ('it:app:snort:rule', {}), { 'doc': 'The snort rule that", "the process to the rule.'}), ('version', ('it:semver', {}), { 'doc':", "}), ('host', ('it:host', {}), { 'doc': 'The host running the", "ATT&CK software.', 'ex': 'cno.mitre.s0100', }), ('references', ('array', {'type': 'inet:url', 'uniq':", "for the process.', }), ('user', ('inet:user', {}), { 'doc': 'The", "'ro': True, 'doc': 'URL available on the host.', }), )),", "'doc': 'Additional contact information associated with this account.', }), ('host',", "version part extraction by noming through the string subs =", "code that requested the URL.', }), ('host', ('it:host', {}), {", "('it:dev:str', {}, ( ('norm', ('str', {'lower': True}), { 'doc': 'Lower", "description of the signature.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url',", "score used to evaluate if it exhibits interesting behavior.'}), ('complexity',", "'Contact info for the author of the YARA rule.'}), ('version',", "for the process.', }), ('exited', ('time', {}), { 'doc': 'The", "CVE ID.', }), )), ('it:sec:cpe', {}, ( ('part', ('str', {'lower':", "'doc': 'The main process executing code that read the registry.',", "that created the mutex.', }), ('host', ('it:host', {}), { 'doc':", "to a full description.', }), ('parents', ('array', {'type': 'it:sec:cwe', 'uniq':", "the file triggered on.' }), ('time', ('time', {}), { 'doc':", "True}), { 'doc': 'The name of the software at a", "semver.') valu = s_version.packVersion(subs.get('major'), subs.get('minor'), subs.get('patch')) return valu, {'subs': subs}", "current version of the rule.'}), )), ('it:app:snort:hit', {}, ( ('rule',", "import logging import synapse.exc as s_exc import synapse.lib.types as s_types", "{'fields': (('host', 'it:host'), ('softver', 'it:prod:softver'))}), { 'doc': 'A version of", "a host requesting a URL.', }), ('it:exec:bind', ('guid', {}), {", "location for the host.' }), ('place', ('geo:place', {}), { 'doc':", "'doc': 'The file considered the \"main\" executable for the process.", "an antivirus engine name.' }), ('it:av:filehit', ('comp', {'fields': (('file', 'file:bytes'),", "('it:dev:regval', ('guid', {}), { 'doc': 'A Windows registry key/value pair.',", "'A Mitre ATT&CK Technique ID.', 'ex': 'T1548', }), ('it:mitre:attack:mitigation', ('str',", "address of flow that caused the hit.'}), ('src:ipv4', ('inet:ipv4', {}),", "the client during the URL retrieval.' }), ('client:ipv4', ('inet:ipv4', {}),", "{}), { 'doc': 'The time the registry was read.', }),", "{ 'doc': 'The function rank score used to evaluate if", "Group ID.', 'ex': 'G0100', }), ('it:mitre:attack:tactic', ('str', {'regex': r'^TA[0-9]{4}$'}), {", "part extraction by noming through the string subs = s_version.parseVersionParts(valu)", "}), ('hash:sha512', ('hash:sha512', {}), { 'doc': 'The SHA512 password hash", "('it:group', ('guid', {}), { 'doc': 'A GUID that represents a", "'A description of the ATT&CK group.', 'disp': {'hint': 'text'}, }),", "'doc': 'The named pipe string.', }), )), ('it:exec:url', {}, (", "time the registry was written to.', }), ('reg', ('it:dev:regval', {}),", "}), ('unloaded', ('time', {}), { 'doc': 'The time the library", "'doc': 'The name of the network.', }), ('desc', ('str', {}),", "{ 'doc': 'The time that the YARA engine matched the", "to the rule.'}), ('version', ('it:semver', {}), { 'doc': 'The most", "(CVE) number.', 'ex': 'cve-2012-0158' }), ('it:sec:cwe', ('str', {'regex': r'^CWE-[0-9]{1,8}$'}), {", "()), ('it:os:ios:idfa', {}, ()), ('it:os:android:aaid', {}, ()), ('it:os:android:perm', {}, ()),", "to a filesystem.', }), ('it:exec:reg:get', ('guid', {}), { 'doc': 'An", "{}), { 'doc': 'An instance of a process triggering an", "the process.', }), ('src:proc', ('it:exec:proc', {}), { 'doc': 'The process", "that bound the listening port.', }), ('host', ('it:host', {}), {", "the current value for the technique.', }), ('desc', ('str', {'strip':", "file distributed by the software.'}), ('path', ('file:path', {}), { 'doc':", "('name', ('it:dev:mutex', {}), { 'doc': 'The mutex string.', }), )),", "{'fields': ( ('app', 'it:prod:soft'), ('intent', 'it:os:android:intent') )}), { 'doc': 'The", "'The URL that was requested.', }), ('client', ('inet:client', {}), {", "True, 'doc': 'The anti-virus product which contains the signature.', }),", "('comp', {'fields': ( ('app', 'it:prod:soft'), ('intent', 'it:os:android:intent') )}), { 'doc':", "'doc': 'The anti-virus product which contains the signature.', }), ('name',", "('str', {'lower': True}), { 'ro': True, 'doc': 'The signature name.'", "Set vers:norm and make it's normed valu prop = node.get('vers')", "'The credentials that were used for the logon.', }), ('duration',", "the file.', }), ('path', ('file:path', {}), { 'doc': 'The path", "IPv6 address range of this network.', }), )), ('it:account', {},", "'A description of the ATT&CK tactic.', 'disp': {'hint': 'text'}, }),", "{}), { 'doc': 'The specific file containing code that bound", "current value for the technique.', }), ('desc', ('str', {'strip': True}),", "True, 'lower': True}), { 'doc': 'The name of a host", "('it:dev:str', {}), { 'doc': 'The value of the registry key,", "'doc': 'The main process executing code that created the named", "functions within the scope of the function.', }), ('strings', ('array',", "'interfaces': ('it:host:activity',), }), ('it:network', ('guid', {}), { 'doc': 'A GUID", "{}, ()), ('it:host', {}, ( ('name', ('it:hostname', {}), { 'doc':", "tag used to annotate nodes included in this ATT&CK tactic.',", "file representing the value of the registry key, if the", "that caused the hit.'}), ('time', ('time', {}), { 'doc': 'The", "the executable which started the process.', }), ('src:proc', ('it:exec:proc', {}),", "subs.items(): await node.set(f'semver:{k}', v) except asyncio.CancelledError: # pragma: no cover", "at the time of match.'}), )), ('it:app:yara:rule', {}, ( ('text',", "that were used for the logon.', }), ('duration', ('duration', {}),", "pprop) async def _onPropSoftverSoft(self, node, oldv): # Check to see", "('hash:sha1', {}), { 'doc': 'The SHA1 password hash value.', }),", "True, 'strip': True}), { 'ro': True, 'doc': 'The \"target_sw\" field", "{}), { 'ro': True, 'doc': 'The parent directory of the", "'doc': 'The Microsoft Windows Security Identifier of the group.', }),", "{}), { 'doc': 'The org that owns/operates the network.', }),", "'True if the mmap is mapped with execute permissions.', }),", "file containing code that deleted the file. May or may", "to bind().' }), ('server:port', ('inet:port', {}), { 'doc': 'The bound", "created the new file. Typically the same host referenced in", "caused the activity.'}), ('proc', ('it:exec:proc', {}), { 'doc': 'The host", "file modification time.', }), ('atime', ('time', {}), { 'doc': 'The", "('it:app:yara:procmatch', {}, ( ('rule', ('it:app:yara:rule', {}), { 'doc': 'The YARA", "'The library software version.'}), )), ('it:prod:softfile', {}, ( ('soft', ('it:prod:softver',", "addressed by the mitigation.', }), )), ('it:dev:int', {}, ()), ('it:dev:pipe',", "port.' }), ('server:ipv4', ('inet:ipv4', {}), { 'doc': 'The IPv4 address", "was deleted.', }), ('path', ('file:path', {}), { 'doc': 'The path", "code that wrote to the registry.', }), ('host', ('it:host', {}),", "0), subs.get('patch', 0)) return valu, subs async def _onFormItDevStr(self, node):", "containing code that deleted the file. May or may not", "not valu: raise s_exc.BadTypeValu(valu=valu, name=self.name, mesg='No text left after stripping", "created the named pipe.', }), ('host', ('it:host', {}), { 'doc':", "}), ('it:av:filehit', ('comp', {'fields': (('file', 'file:bytes'), ('sig', 'it:av:sig'))}), { 'doc':", "'Person who authored the software.', }), ('url', ('inet:url', {}), {", "('it:network', ('guid', {}), { 'doc': 'A GUID that represents a", "'A string representing a mutex.', }), ('it:dev:int', ('int', {}), {", "}), ('it:exec:reg:del', ('guid', {}), { 'doc': 'An instance of a", "was created.', }), ('name', ('it:dev:pipe', {}), { 'doc': 'The named", "'A file that triggered an alert on a specific antivirus", "that requested the URL. Typically the same host referenced in", "library was loaded from.', }), ('file', ('file:bytes', {}), { 'doc':", "intent string.'}), ('it:os:android:reqperm', ('comp', {'fields': ( ('app', 'it:prod:soft'), ('perm', 'it:os:android:perm'))}),", "('software', ('it:prod:soft', {}), { 'doc': 'Software associated with this version", "('it:semver', {}), { 'doc': 'System normalized semantic version number.', }),", "self.model.form('it:dev:str').onAdd(self._onFormItDevStr) self.model.form('it:dev:pipe').onAdd(self._onFormMakeDevStr) self.model.form('it:dev:mutex').onAdd(self._onFormMakeDevStr) self.model.form('it:dev:regkey').onAdd(self._onFormMakeDevStr) self.model.prop('it:prod:softver:arch').onSet(self._onPropSoftverArch) self.model.prop('it:prod:softver:vers').onSet(self._onPropSoftverVers) self.model.prop('it:prod:softver:software').onSet(self._onPropSoftverSoft) def bruteVersionStr(self, valu):", "}), ('it:account', ('guid', {}), { 'doc': 'A GUID that represents", "the CPE 2.3 string.'}), ('edition', ('str', {'lower': True, 'strip': True}),", "'doc': 'The file path if the mmap is a mapped", "document the ATT&CK tactic.', }), )), ('it:mitre:attack:technique', {}, ( ('name',", "}), ('file', ('file:bytes', {}), { 'doc': 'The library file that", "('str', {'lower': True, 'strip': True, 'onespace': True}), { 'doc': 'The", "= logging.getLogger(__name__) class Cpe23Str(s_types.Str): ''' CPE 2.3 Formatted String https://nvlpubs.nist.gov/nistpubs/Legacy/IR/nistir7695.pdf", "'The main process executing code that created the mutex.', }),", "('it:exec:file:write', ('guid', {}), { 'doc': 'An instance of a host", "}), )), ('it:network', {}, ( ('name', ('str', {'lower': True, 'strip':", "'The main process executing code that created the named pipe.',", "'The time the thread exited.', }), ('exitcode', ('int', {}), {", "2.3 string.'}), ('sw_edition', ('str', {'lower': True, 'strip': True}), { 'ro':", "source IPv6 address of the flow that caused the hit.'}),", "description of the CVE vulnerability.', 'disp': {'hint': 'text'}, }), ('url',", "version out of a string. Args: valu (str): String to", "'doc': 'The given software requests the android permission.'}), ('it:os:android:ilisten', ('comp',", "'Web account of the software author.', }), ('author:email', ('inet:email', {}),", "a string. Args: valu (str): String to attempt to get", "{}), { 'doc': 'The file that was deleted.', }), )),", ":exe specified in :proc, if present.', }), ('time', ('time', {}),", "string [%s]', prop) def getModelDefs(self): modl = { 'ctors': (", "{}), { 'doc': 'An instance of a snort rule hit.',", "ATT&CK software.', }), ('desc', ('str', {'strip': True}), { 'doc': 'A", "executing code that created the named pipe.', }), ('host', ('it:host',", "deleted.', }), ('reg', ('it:dev:regval', {}), { 'doc': 'The registry key", "'The YARA rule text.', 'disp': {'hint': 'text'}, }), ('name', ('str',", "{ 'doc': 'The URL that documents the ATT&CK tactic.', }),", "('guid', {}), { 'doc': 'An instance of a host creating", "{ 'doc': 'The source address of flow that caused the", "a specific antivirus signature.', }), ('it:av:prochit', ('guid', {}), { 'doc':", "{}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The process which", "of the host or system.', }), ('desc', ('str', {}), {", "version parts out of the string. Returns: int, dict: The", "('str', {}), { 'doc': 'Semver build string.', }), ('url', ('inet:url',", "('guid', {}), { 'doc': 'A YARA rule unique identifier.', }),", "'doc': 'Organization which authored the software.', }), ('author:acct', ('inet:web:acct', {}),", "the given Android intent.'}), ('it:prod:softver', ('guid', {}), { 'doc': 'A", "'The function rank score used to evaluate if it exhibits", "that deleted data from the registry.', }), ('host', ('it:host', {}),", "status of this ATT&CK technique.', }), ('isnow', ('it:mitre:attack:technique', {}), {", "True, 'regex': r'(?i)^CVE-[0-9]{4}-[0-9]{4,}$'}), { 'doc': 'A vulnerability as designated by", "{'fields': ( ('soft', 'it:prod:softver'), ('file', 'file:bytes'))}), { 'doc': 'A file", "account.', 'ex': '1001', }), ('windows:sid', ('it:os:windows:sid', {}), { 'doc': 'The", "element status.', 'ex': 'current', }), ('it:mitre:attack:group', ('str', {'regex': r'^G[0-9]{4}$'}), {", "information about the signature.', }) )), ('it:av:filehit', {}, ( ('file',", "that contains the function.'}), ('va', ('int', {}), { 'doc': 'The", "hit.', }), ('sig', ('it:av:sig', {}), { 'doc': 'The signature that", "{}), { 'doc': 'The Microsoft Windows Security Identifier of the", "software version that contains the library.'}), ('lib', ('it:prod:softver', {}), {'ro':", "case normalized version of the it:dev:str.', }), )), ('it:sec:cve', {},", "{'type': 'it:sec:cwe', 'uniq': True, 'sorted': True, 'split': ','}), { 'doc':", "of a host requesting a URL.', }), ('it:exec:bind', ('guid', {}),", "map was created in the process.', }), ('size', ('int', {}),", "}), ), 'forms': ( ('it:hostname', {}, ()), ('it:host', {}, (", "'doc': 'The host running the process that deleted the file.", "{ 'doc': 'An instance of a password hash.', }), ('it:exec:proc',", "path for the file.', }), ('path:dir', ('file:path', {}), { 'ro':", "{}), { 'doc': 'The SHA256 password hash value.', }), ('hash:sha512',", "mmap is mapped with write permissions.', }), ('perms:execute', ('bool', {}),", "('it:exec:proc', {}), { 'doc': 'The host process which caused the", "with this account.', }), ('host', ('it:host', {}), { 'doc': 'The", "port.', }), ('it:fs:file', ('guid', {}), { 'doc': 'A file on", "address of the sofware author.', }), ('author:person', ('ps:person', {}), {", "main process executing code that created the new file.', }),", "{ 'doc': 'The source IPv6 address of the flow that", "for the ATT&CK group.', }), ('names', ('array', {'type': 'ou:name', 'uniq':", "= node.get('vers') if not prop: return await node.set('vers:norm', prop) #", "the version string.', }), ('arch', ('it:dev:str', {}), { 'doc': 'Software", "'cno.mitre.g0100', }), ('references', ('array', {'type': 'inet:url', 'uniq': True}), { 'doc':", "{ 'doc': 'Timestamp for when this version of the software", "(50, 'err'), (60, 'crit'), (70, 'alert'), (80, 'emerg'), ) class", "if c == ':': parts.append(part) part = '' continue part", "'doc': 'The name of the host or system.', }), ('desc',", "{ 'doc': 'URL relevant for the software.', }), ('isos', ('bool',", "}), ('contact', ('ps:contact', {}), { 'doc': 'Additional contact information associated", "await node.snap.addNode('it:dev:str', prop) async def _onPropSoftverVers(self, node, oldv): # Set", "rule unique identifier.', }), ('it:app:yara:match', ('comp', {'fields': (('rule', 'it:app:yara:rule'), ('file',", "minor, patch) return valu loglevels = ( (10, 'debug'), (20,", "name of the snort rule.'}), ('version', ('it:semver', {}), { 'doc':", "a mapped view of a file.', }), ('hash:sha256', ('hash:sha256', {}),", "'The \"vendor\" field from the CPE 2.3 string.'}), ('product', ('str',", "{}), { 'doc': 'The YARA rule text.', 'disp': {'hint': 'text'},", "}), ('it:exec:proc', ('guid', {}), { 'doc': 'A process executing on", "nodes: name = nodes[0].get('name') if name: await node.set('software:name', name) async", "{ 'ro': True, 'doc': 'The file that contains the function.'}),", "('it:dev:str', {}), { 'doc': 'Version string associated with this version", "'doc': 'The host running the process that wrote to the", "version of a software product which is present on a", "document the ATT&CK technique.', }), ('parent', ('it:mitre:attack:technique', {}), { 'doc':", "('sig', 'it:av:sig'))}), { 'doc': 'A file that triggered an alert", "'doc': 'The \"edition\" field from the CPE 2.3 string.'}), ('language',", "('text', ('str', {}), { 'doc': 'The YARA rule text.', 'disp':", "an ATT&CK group to a synapse ou:org.', }), ('name', ('ou:name',", "('hash:ntlm', {}), { 'doc': 'The NTLM password hash value.', }),", "for the ATT&CK software.', }), ('names', ('array', {'type': 'str', 'uniq':", "('vers', ('it:dev:str', {}), { 'doc': 'Version string associated with this", "True, 'strip': True}), { 'ro': True, 'doc': 'The file extension", "('inet:url', {}), { 'doc': 'A URL linking this CWE to", "{ 'doc': 'The authentication domain that the host is a", "{}, ( ('mesg', ('str', {}), { 'doc': 'The log messsage", "for the ATT&CK group.', }), ('desc', ('str', {}), { 'doc':", "loaded from.', }), ('file', ('file:bytes', {}), { 'doc': 'The library", "}), )), ('it:exec:file:write', {}, ( ('proc', ('it:exec:proc', {}), { 'doc':", "a mutex.', }), ('it:dev:int', ('int', {}), { 'doc': 'A developer", "code that created the mutex.', }), ('host', ('it:host', {}), {", "array of groups that the account is a member of.',", "that operates the given domain.', }), )), ('it:network', {}, (", "subs dictionary. ''' try: valu, info = self.core.model.type('it:semver').norm(valu) subs =", "this software version.', }), ('vers', ('it:dev:str', {}), { 'doc': 'Version", "{ 'doc': 'The credentials that were used for the logon.',", "of the rule.'}), )), ('it:app:snort:hit', {}, ( ('rule', ('it:app:snort:rule', {}),", "array of techniques used by the software.', }), )), ('it:mitre:attack:mitigation',", "IDs used by the group.', }), ('software', ('array', {'type': 'it:mitre:attack:software',", "{}, ( ('rule', ('it:app:yara:rule', {}), { 'ro': True, 'doc': 'The", "included in this ATT&CK software.', 'ex': 'cno.mitre.s0100', }), ('references', ('array',", "the ATT&CK tactic.', }), )), ('it:mitre:attack:technique', {}, ( ('name', ('str',", "is present on a given host.', }), ('it:av:sig', ('comp', {'fields':", "True}), { 'doc': 'The name of the domain.', }), ('desc',", "{ 'doc': 'The host running the process that created the", "named pipe created by a process at runtime.', }), ('it:exec:url',", "value that was deleted.', }), )), ('it:app:snort:rule', {}, ( ('text',", "('src:thread', ('it:exec:thread', {}), { 'doc': 'The thread which created this", "library software version.'}), )), ('it:prod:softfile', {}, ( ('soft', ('it:prod:softver', {}),", "2.3 string specifying this software version', }), ('cves', ('array', {'type':", "ATT&CK software.', }), ('names', ('array', {'type': 'str', 'uniq': True, 'sorted':", "'doc': 'The main process executing code that bound the listening", "cover raise except Exception: logger.exception('Failed to brute force version string", "dictionary. ''' try: valu, info = self.core.model.type('it:semver').norm(valu) subs = info.get('subs')", "file path (parsed from :path).', }), ('file', ('file:bytes', {}), {", "('time', {}), { 'doc': 'The start time for the process.',", "('server:ipv6', ('inet:ipv6', {}), { 'doc': 'The IPv6 address specified to", "of ATT&CK technique IDs used by the group.', }), ('software',", "def _splitCpe23(self, text): part = '' parts = [] genr", "'doc': 'Name of the software.', }), ('names', ('array', {'type': 'it:dev:str',", "snort rule that matched the file.'}), ('flow', ('inet:flow', {}), {", "'minor': minor, 'patch': patch} return valu, {'subs': subs} def repr(self,", "('success', ('bool', {}), { 'doc': 'Set to false to indicate", "True, 'doc': 'The signature name.', }), ('sig:soft', ('it:prod:soft', {}), {", "that wrote to the registry. Typically the same host referenced", "('time', ('time', {}), { 'doc': 'The time the logon occured.',", "host writing a file to a filesystem.', }), ('it:exec:reg:get', ('guid',", "version of the software is available from.', }), )), ('it:prod:softlib',", "'doc': 'Set to True if the software is an operating", "bound the listening port.', }), ('host', ('it:host', {}), { 'doc':", "the account is registered.', }), ('posix:uid', ('int', {}), { 'doc':", "{}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The process where", "('it:os:android:ibroadcast', ('comp', {'fields': ( ('app', 'it:prod:soft'), ('intent', 'it:os:android:intent') )}), {", "the time of match.'}), )), ('it:app:yara:rule', {}, ( ('text', ('str',", "in the axon.', }), )), ('it:exec:mutex', {}, ( ('proc', ('it:exec:proc',", "place where the host resides.', }), ('loc', ('loc', {}), {", "'doc': 'A free-form description of the host.', }), ('domain', ('it:domain',", "thread exited.', }), ('exitcode', ('int', {}), { 'doc': 'The exit", "pre-release version into a integer value Major, minor and patch", "{}), { 'doc': 'The time the library was loaded.', }),", "that wrote to the file. Typically the same host referenced", "True, 'strip': True}), { 'ro': True, 'doc': 'The \"product\" field", "used to launch the process, including any command line parameters.',", "representing the semver is the bitwise concatenation of the major,", "alert on a specific antivirus signature.', }), ('it:av:prochit', ('guid', {}),", "anti-virus product which contains the signature.', }), )), ('it:av:prochit', {},", "minor, 'patch': patch} return valu, {'subs': subs} def repr(self, valu):", "'A GUID that represents a host or system.' }), ('it:log:event',", "the file path (parsed from :path).', }), ('path:ext', ('str', {'lower':", "'doc': 'The time the file was created.', }), ('path', ('file:path',", "'The process ID.', }), ('time', ('time', {}), { 'doc': 'The", "}), ('operator', ('ps:contact', {}), { 'doc': 'The operator of the", "YARA evaluation engines.'}), )), ('it:app:yara:match', {}, ( ('rule', ('it:app:yara:rule', {}),", "('desc', ('str', {}), { 'doc': 'The CWE description field.', 'disp':", "{ 'doc': 'The version of the rule at the time", "new file. May or may not be the same :exe", "pipe. May or may not be the same :exe specified", "associated with this version instance.', }), ('vers:norm', ('str', {'lower': True}),", "{ 'ro': True, 'doc': 'The parent directory of the file", "('hash:lm', ('hash:lm', {}), { 'doc': 'The LM password hash value.',", "inside an executable.', }), ('it:reveng:filefunc', ('comp', {'fields': (('file', 'file:bytes'), ('function',", "'The time the named pipe was created.', }), ('name', ('it:dev:pipe',", "'HKEY_LOCAL_MACHINE\\\\SOFTWARE\\\\Microsoft\\\\Windows\\\\CurrentVersion\\\\Run', }), ('it:dev:regval', ('guid', {}), { 'doc': 'A Windows registry", "{ 'doc': 'The time the logon occured.', }), ('success', ('bool',", "(parsed from :path).', }), ('path:base', ('file:base', {}), { 'ro': True,", "domain.', }), )), ('it:network', {}, ( ('name', ('str', {'lower': True,", "host containing the file.', }), ('path', ('file:path', {}), { 'doc':", "semantic version number.', }), ('semver:major', ('int', {}), { 'doc': 'Version", "this network.', }), )), ('it:account', {}, ( ('user', ('inet:user', {}),", "('it:exec:thread', ('guid', {}), { 'doc': 'A thread executing in a", "__init__(self, modl, name, info, opts): opts['lower'] = True s_types.Str.__init__(self, modl,", "optional contiguous IPv4 address range of this network.', }), ('net6',", "('it:domain', {}), { 'doc': 'The authentication domain where the account", "mutex created by a process at runtime.', }), ('it:exec:pipe', ('guid',", "'The host running the process that created the named pipe.", "'strip': True}), { 'ro': True, 'doc': 'The \"language\" field from", "return parts def _normPyStr(self, valu): if not valu.startswith('cpe:2.3:'): mesg =", "('guid', {}), { 'doc': 'An instance of a process triggering", "that documents the ATT&CK group.', }), ('tag', ('syn:tag', {}), {", "import synapse.lib.module as s_module import synapse.lib.version as s_version logger =", "registry was written to.', }), ('reg', ('it:dev:regval', {}), { 'doc':", "('it:dev:regkey', {}, ()), ('it:dev:regval', {}, ( ('key', ('it:dev:regkey', {}), {", "'doc': 'The specific file containing code that bound the listening", "'doc': 'A free-form description of the CVE vulnerability.', 'disp': {'hint':", "'doc': 'A free-form description of the signature.', 'disp': {'hint': 'text'},", "documents the ATT&CK software.', }), ('tag', ('syn:tag', {}), { 'doc':", ":proc, if present.', }), ('time', ('time', {}), { 'doc': 'The", "}), ('va', ('int', {}), { 'doc': 'The base memory address", "referenced in :proc, if present.', }), ('time', ('time', {}), {", "is available from.', }), )), ('it:prod:softlib', {}, ( ('soft', ('it:prod:softver',", "}), ('exitcode', ('int', {}), { 'doc': 'The exit code for", "('time', {}), { 'doc': 'The time the registry was written", "was loaded.', }), ('unloaded', ('time', {}), { 'doc': 'The time", ")), ('it:exec:file:read', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The", "serial number of the host.', }), ('operator', ('ps:contact', {}), {", "path to the POSIX account's home directory.\", 'ex': '/home/visi', }),", "running the process that created the mutex. Typically the same", "or may not be the same :exe referenced in :proc,", "'doc': 'The name of the domain.', }), ('desc', ('str', {}),", "{ 'ro': True, 'doc': 'The \"edition\" field from the CPE", "'A GUID that represents an account on a host or", "a windows domain.' }), ('it:account', ('guid', {}), { 'doc': 'A", "ATT&CK mitigation.', }), ('desc', ('str', {'strip': True}), { 'doc': 'A", "thread which created this thread.', }), )), ('it:exec:loadlib', {}, (", "('url', ('inet:url', {}), { 'doc': 'The URL that documents the", "'The serial number of the host.', }), ('operator', ('ps:contact', {}),", "loaded.', }), ('unloaded', ('time', {}), { 'doc': 'The time the", "('ctime', ('time', {}), { 'doc': 'The file creation time.', }),", "logon originated.', }), ('client:ipv4', ('inet:ipv4', {}), { 'doc': 'The IPv4", "'doc': 'The URL that documents the ATT&CK tactic.', }), ('tag',", "}), ('it:dev:regkey', ('str', {}), { 'doc': 'A Windows registry key.',", "if that information is present. ''' def postTypeInit(self): s_types.Int.postTypeInit(self) self.setNormFunc(str,", "{}), { 'doc': 'The host running the process that bound", "{}), { 'doc': 'The host running the process that created", "'doc': 'The host that the account logged in to.', }),", "a particular version.', }), ('names', ('array', {'type': 'it:dev:str', 'uniq': True,", "if present.', }), ('exe', ('file:bytes', {}), { 'doc': 'The specific", "('os', ('it:prod:softver', {}), {'ro': True, 'doc': 'The operating system which", "of the it:dev:str.', }), )), ('it:sec:cve', {}, ( ('desc', ('str',", "{ 'doc': 'The parent ATT&CK technique on this sub-technique.', }),", "}), ('desc', ('str', {}), { 'doc': 'A description of the", "('perm', ('it:os:android:perm', {}), {'ro': True, 'doc': 'The android permission requested", "string.'}), ('product', ('str', {'lower': True, 'strip': True}), { 'ro': True,", "Software ID.', 'ex': 'S0154', }), ('it:dev:str', ('str', {}), { 'doc':", "the process that created the mutex. Typically the same host", "of the account.', 'ex': '1001', }), ('posix:gid', ('int', {}), {", "process at runtime.', }), ('it:exec:url', ('guid', {}), { 'doc': 'An", "('url', ('inet:url', {}), { 'doc': 'A reference URL for information", "contact information of the org or person who authored the", "{}), { 'doc': 'Version patch number.', }), ('semver:pre', ('str', {}),", "time the process exited.', }), ('exitcode', ('int', {}), { 'doc':", "('ps:contact', {}), { 'doc': 'The contact information of the org", "associated with this version instance.', }), ('software:name', ('str', {'lower': True,", "'The sensor host node that produced the hit.'}), ('version', ('it:semver',", "'strip': True}), { 'ro': True, 'doc': 'The \"edition\" field from", "'sorted': True, 'split': ','}), { 'doc': 'An array of ChildOf", "('exitcode', ('int', {}), { 'doc': 'The exit code for the", "pipe. Typically the same host referenced in :proc, if present.',", "('data', ('data', {}), { 'doc': 'A raw JSON record of", "'The primary name for the ATT&CK tactic.', }), ('desc', ('str',", "}), )), ('it:domain', {}, ( ('name', ('str', {'lower': True, 'strip':", "during the URL retrieval..' }), )), ('it:exec:bind', {}, ( ('proc',", "'warning'), (50, 'err'), (60, 'crit'), (70, 'alert'), (80, 'emerg'), )", "'doc': 'An instance of a function in an executable.', }),", "antivirus engine name.' }), ('it:av:filehit', ('comp', {'fields': (('file', 'file:bytes'), ('sig',", "{ 'doc': 'The geo-political location string for the node.', }),", "specific version of the software is available from.', }), )),", "key.', }), ('it:exec:reg:del', ('guid', {}), { 'doc': 'An instance of", "'doc': 'The (optional) clear text password for this password hash.',", "'doc': 'The file that was created.', }), )), ('it:exec:file:del', {},", "in.', }), ('creds', ('auth:creds', {}), { 'doc': 'The credentials that", "of the software author.', }), ('author:email', ('inet:email', {}), { 'deprecated':", "('semver:pre', ('str', {}), { 'doc': 'Semver prerelease string.', }), ('semver:build',", "logical network.' }), ('it:domain', ('guid', {}), { 'doc': 'A logical", "pipe.', }), ('host', ('it:host', {}), { 'doc': 'The host running", "code that wrote to / modified the existing file.', }),", "'doc': 'The exit code for the process.', }), ('user', ('inet:user',", "('client:port', ('inet:port', {}), { 'doc': 'The client port during the", "'doc': 'A brief description of the group.', }), ('host', ('it:host',", "map an ATT&CK software to a synapse it:prod:soft.', }), ('name',", "'The file that contains the function.'}), ('va', ('int', {}), {", "host node that produced the hit.'}), ('version', ('it:semver', {}), {", "Returns: int, dict: The system normalized version integer and a", "}), )), ('it:exec:reg:del', {}, ( ('proc', ('it:exec:proc', {}), { 'doc':", "_splitCpe23(self, text): part = '' parts = [] genr =", "deleted data from the registry. May or may not be", "'doc': 'The host running the process that created the mutex.", "path to the POSIX account's default shell.\", 'ex': '/bin/bash', }),", "process that deleted data from the registry. Typically the same", "('it:exec:thread', {}), { 'doc': 'The thread which created this thread.',", "the it:semver normalization before attempting to extract version parts out", "matched the process to the rule.'}), ('version', ('it:semver', {}), {", "{ 'doc': 'Semantic Version type.', }), ('it:sec:cpe', 'synapse.models.infotech.Cpe23Str', {}, {", "{ 'doc': 'An array of strings referenced within the function.',", "linking this CWE to a full description.', }), ('parents', ('array',", "account.', 'ex': '1001', }), ('posix:gid', ('int', {}), { 'doc': 'The", "( ('key', ('it:dev:regkey', {}), { 'doc': 'The Windows registry key.',", "{}), { 'doc': 'The time the data from the registry", "('time', {}), { 'doc': 'The time the library was unloaded.',", "the activity.'}), ('thread', ('it:exec:thread', {}), { 'doc': 'The host thread", "'The IPv4 address specified to bind().' }), ('server:ipv6', ('inet:ipv6', {}),", "('it:logon', {}, ( ('time', ('time', {}), { 'doc': 'The time", "integer value Major, minor and patch levels are represented as", "node, oldv): # Set vers:norm and make it's normed valu", "used by the group.', }), )), ('it:mitre:attack:tactic', {}, ( ('name',", "listening port. May or may not be the same :exe", "as s_exc import synapse.lib.types as s_types import synapse.lib.module as s_module", ")), ('it:exec:loadlib', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The", "('it:os:android:intent', {}, ()), ('it:os:android:reqperm', {}, ( ('app', ('it:prod:softver', {}), {'ro':", "= s_version.parseSemver(valu) if subs is None: raise s_exc.BadTypeValu(valu=valu, name=self.name, mesg='Unable", "()), ('it:dev:regval', {}, ( ('key', ('it:dev:regkey', {}), { 'doc': 'The", "('group', ('inet:user', {}), { 'doc': 'The group owner of the", "{'hint': 'text'}, }), ('desc:short', ('str', {'lower': True}), { 'doc': 'A", "array of URLs that document the ATT&CK technique.', }), ('parent',", "'doc': 'The \"product\" field from the CPE 2.3 string.'}), ('version',", "('it:host:activity',), }), ('it:network', ('guid', {}), { 'doc': 'A GUID that", ")), ('it:domain', {}, ( ('name', ('str', {'lower': True, 'strip': True,", "('language', ('str', {'lower': True, 'strip': True}), { 'ro': True, 'doc':", "file which caused the activity.'}), ('proc', ('it:exec:proc', {}), { 'doc':", "= s_version.packVersion(subs.get('major'), subs.get('minor'), subs.get('patch')) return valu, {'subs': subs} def _normPyInt(self,", "modl, name, info, opts): opts['lower'] = True s_types.Str.__init__(self, modl, name,", "address where the library was loaded in the process.', }),", "array of URLs that document the ATT&CK mitigation.', }), ('addresses',", "semver properly or bruteforce parts try: valu, subs = self.bruteVersionStr(prop)", "\"The path to the POSIX account's home directory.\", 'ex': '/home/visi',", "'doc': 'The user ID of the account.', 'ex': '1001', }),", "'The name of the network.', }), ('desc', ('str', {}), {", "execute permissions.', }), ('created', ('time', {}), { 'doc': 'The time", "'doc': 'The log messsage text.', }), ('severity', ('int', {'enums': loglevels}),", "the ATT&CK mitigation.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url', {}),", "evaluated as a match.'}), )), ('it:reveng:function', {}, ( ('name', ('str',", "True}), { 'doc': 'Name of the software.', }), ('names', ('array',", "the registry key, if the value is an integer.', }),", "{}), { 'doc': 'Set to True if the software is", "('it:exec:file:add', ('guid', {}), { 'doc': 'An instance of a host", "'doc': 'The guid matching the function.'}), ('string', ('str', {}), {", "'it:prod:softver'))}), { 'doc': 'A software version contains a library software", "'doc': 'URL relevant for the software.', }), ('isos', ('bool', {}),", "the file was written to/modified.', }), ('path:dir', ('file:path', {}), {", "{ 'doc': 'The source IPv4 address of the flow that", "('serial', ('str', {}), { 'doc': 'The serial number of the", "( ('app', ('it:prod:softver', {}), {'ro': True, 'doc': 'The app software", "{'soft': prop}} nodes = await node.snap.nodes('it:prod:soft=$soft', opts=opts) if nodes: name", "memory map in bytes.', }), ('perms:read', ('bool', {}), { 'doc':", "('it:exec:file:del', ('guid', {}), { 'doc': 'An instance of a host", "the android permission.'}), ('it:os:android:ilisten', ('comp', {'fields': ( ('app', 'it:prod:soft'), ('intent',", "'The time the library was loaded.', }), ('unloaded', ('time', {}),", "{ 'doc': 'The contact information of the org or person", "('proc', ('it:exec:proc', {}), { 'doc': 'The process where the memory", "'split': ','}), { 'doc': 'An array of techniques used by", "s_exc.BadTypeValu: # Try doing version part extraction by noming through", "map an arbitrary pre-release version into a integer value Major,", "('it:av:sig', {}, ( ('soft', ('it:prod:soft', {}), { 'ro': True, 'doc':", "('path', ('file:path', {}), { 'doc': 'The path where the file", "('ipv4', ('inet:ipv4', {}), { 'doc': 'The last known ipv4 address", "(70, 'alert'), (80, 'emerg'), ) class ItModule(s_module.CoreModule): async def initCoreModule(self):", "scope of the function.', }), )), ('it:reveng:funcstr', {}, ( ('function',", "representing a mutex.', }), ('it:dev:int', ('int', {}), { 'doc': 'A", "host is a member of.', }), ('ipv4', ('inet:ipv4', {}), {", "to map an ATT&CK software to a synapse it:prod:soft.', }),", "'doc': 'A description of the ATT&CK software.', 'disp': {'hint': 'text'},", "major number.', }), ('semver:minor', ('int', {}), { 'doc': 'Version minor", "read permissions.', }), ('perms:write', ('bool', {}), { 'doc': 'True if", "'A free-form description of the CVE vulnerability.', 'disp': {'hint': 'text'},", "subs: valu = s_version.packVersion(subs.get('major'), subs.get('minor', 0), subs.get('patch', 0)) return valu,", "process.', }), ('loaded', ('time', {}), { 'doc': 'The time the", "{}), { 'doc': 'The URL that documents the ATT&CK tactic.',", "May or may not be the same :exe referenced in", "{}), { 'deprecated': True, 'doc': 'Organization which authored the software.',", "from the CPE 2.3 string.'}), ('sw_edition', ('str', {'lower': True, 'strip':", "of a password hash.', }), ('it:exec:proc', ('guid', {}), { 'doc':", "{}), { 'doc': 'The base memory address where the map", "( ('time', ('time', {}), { 'doc': 'The time the logon", "('complexity', ('int', {}), { 'doc': 'The complexity of the function.'}),", "with execute permissions.', }), ('created', ('time', {}), { 'doc': 'The", "'cve-2012-0158' }), ('it:sec:cwe', ('str', {'regex': r'^CWE-[0-9]{1,8}$'}), { 'doc': 'NIST NVD", "r'(?i)^CVE-[0-9]{4}-[0-9]{4,}$'}), { 'doc': 'A vulnerability as designated by a Common", "'deprecated': True, 'doc': 'Person who authored the software.', }), ('url',", "given host.', }), ('it:av:sig', ('comp', {'fields': (('soft', 'it:prod:soft'), ('name', ('str',", "signature that the file triggered on.' }), ('time', ('time', {}),", "which created the process.' }), ('killedby', ('it:exec:proc', {}), { 'doc':", "annotate nodes included in this ATT&CK tactic.', 'ex': 'cno.mitre.ta0100', }),", "('int', {}), { 'doc': 'Version patch number.', }), ('semver:pre', ('str',", "registry. May or may not be the same :exe referenced", "time the file was written to/modified.', }), ('path', ('file:path', {}),", ": product : version : update : edition : language", "{ 'doc': 'The time the thread exited.', }), ('exitcode', ('int',", ")), ('it:mitre:attack:tactic', {}, ( ('name', ('str', {'strip': True}), { 'doc':", "True}), { 'doc': 'The primary name for the ATT&CK mitigation.',", "the hit.'}), ('src:ipv4', ('inet:ipv4', {}), { 'doc': 'The source IPv4", "'doc': 'The time the mutex was created.', }), ('name', ('it:dev:mutex',", "name of the function.'}), ('description', ('str', {}), { 'doc': 'Notes", "{}), { 'doc': 'The (optional) clear text password for this", "'product': parts[4], 'version': parts[5], 'update': parts[6], 'edition': parts[7], 'language': parts[8],", "True, 'doc': 'The file that contains the function.'}), ('va', ('int',", "'doc': 'A Windows registry key.', 'ex': 'HKEY_LOCAL_MACHINE\\\\SOFTWARE\\\\Microsoft\\\\Windows\\\\CurrentVersion\\\\Run', }), ('it:dev:regval', ('guid',", "ipv4 address for the host.' }), ('latlong', ('geo:latlong', {}), {", "{ 'doc': 'The YARA rule text.', 'disp': {'hint': 'text'}, }),", "'A Windows registry key/value pair.', }), ('it:prod:soft', ('guid', {}), {", "that was written to.', }), )), ('it:exec:reg:del', {}, ( ('proc',", "which created the thread.', }), ('src:thread', ('it:exec:thread', {}), { 'doc':", "{ 'ro': True, 'doc': 'The \"update\" field from the CPE", "'A logical boundary of authentication and configuration such as a", "True, 'doc': 'Host serving a url.', }), ('url', ('inet:url', {}),", "executing code that read the file.', }), ('host', ('it:host', {}),", "'Version patch number.', }), ('semver:pre', ('str', {}), { 'doc': 'Semver", "('guid', {}), { 'doc': 'An instance of a host deleting", "activity occurred.'}), ('time', ('time', {}), { 'doc': 'The time that", "'An instance of a YARA rule match to a process.',", "YARA rule.'}), ('version', ('it:semver', {}), { 'doc': 'The current version", "software requests the android permission.'}), ('it:os:android:ilisten', ('comp', {'fields': ( ('app',", "}), ), 'types': ( ('it:hostname', ('str', {'strip': True, 'lower': True}),", "'The time the memory map was deleted.', }), ('path', ('file:path',", "'A library load event in a process.', }), ('it:exec:mmap', ('guid',", "('it:exec:mutex', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The main", "or value that was read.', }), )), ('it:exec:reg:set', {}, (", "'forms': ( ('it:hostname', {}, ()), ('it:host', {}, ( ('name', ('it:hostname',", "'update': parts[6], 'edition': parts[7], 'language': parts[8], 'sw_edition': parts[9], 'target_sw': parts[10],", "address range of this network.', }), ('net6', ('inet:net6', {}), {", "('time', ('time', {}), { 'doc': 'The time that the AV", "('client:host', ('it:host', {}), { 'doc': 'The host where the logon", "{}), {'ro': True, 'doc': 'The app software which listens for", "{ 'doc': 'The file on the host.', }), ('ctime', ('time',", "mitigation.', }), ('desc', ('str', {'strip': True}), { 'doc': 'A description", "('int', {}), { 'doc': 'Version major number.', }), ('semver:minor', ('int',", "}), ('exited', ('time', {}), { 'doc': 'The time the thread", "2.3 string.'}), )), ('it:sec:cwe', {}, ( ('name', ('str', {}), {", "'doc': 'Lower case normalized version of the it:dev:str.', }), )),", "('it:exec:reg:set', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The main", "{ 'doc': 'The NIST CPE 2.3 string specifying this software.',", "instance of a host deleting a file from a filesystem.',", "}), ('server', ('inet:server', {}), { 'doc': 'The inet:addr of the", "('desc', ('str', {'strip': True}), { 'doc': 'A description of the", "('it:dev:regkey', ('str', {}), { 'doc': 'A Windows registry key.', 'ex':", "{'lower': True, 'strip': True}), { 'ro': True, 'doc': 'The \"update\"", ":path).', }), ('file', ('file:bytes', {}), { 'doc': 'The file on", "the YARA rule.'}), ('time', ('time', {}), { 'doc': 'The time", "This normalizes a version string into an integer to allow", "mutex.', }), ('host', ('it:host', {}), { 'doc': 'The host running", "'doc': 'A Windows registry key/value pair.', }), ('it:prod:soft', ('guid', {}),", "{'lower': True, 'strip': True}), { 'ro': True, 'doc': 'The \"sw_edition\"", "to a synapse it:prod:soft.', }), ('name', ('str', {'strip': True}), {", "'doc': 'The Windows registry key.', }), ('str', ('it:dev:str', {}), {", "caused the hit.'}), ('dst:port', ('inet:port', {}), { 'doc': 'The destination", "mapped segment located in a process.', }), ('it:cmd', ('str', {'strip':", "listens for the android intent.'}), ('intent', ('it:os:android:intent', {}), {'ro': True,", "inside a function.', }), ('it:reveng:impfunc', ('str', {'lower': 1}), { 'doc':", "used by the software.', }), )), ('it:mitre:attack:mitigation', {}, ( #", ")), ('it:mitre:attack:group', {}, ( ('org', ('ou:org', {}), { 'doc': 'Used", "('it:log:event', ('guid', {}), { 'doc': 'A GUID representing an individual", "host or network.' }), ('it:group', ('guid', {}), { 'doc': 'A", "{ 'doc': 'Set to True if the software is an", "of the host.', }), ('model', ('str', {}), { 'doc': 'The", "org that operates the given domain.', }), )), ('it:network', {},", "('comp', {'fields': (('function', 'it:reveng:function'), ('string', 'str'))}), { 'deprecated': True, 'doc':", "system.'}), ('islib', ('bool', {}), { 'doc': 'Set to True if", "{}), { 'doc': 'The file on the host.', }), ('ctime',", "ATT&CK mitigation.', }), ('addresses', ('array', {'type': 'it:mitre:attack:technique', 'uniq': True, 'sorted':", "('author', ('ps:contact', {}), { 'doc': 'Contact info for the author", "for the thread.', }), ('src:proc', ('it:exec:proc', {}), { 'doc': 'An", "'The time the file was deleted.', }), ('path', ('file:path', {}),", "{ 'doc': 'The host running the process that read the", "'doc': 'A Mitre ATT&CK Technique ID.', 'ex': 'T1548', }), ('it:mitre:attack:mitigation',", "('name', ('str', {'lower': True, 'strip': True, 'onespace': True}), { 'doc':", "was written to/modified.', }), ('path', ('file:path', {}), { 'doc': 'The", "node.set('semver', valu) for k, v in subs.items(): await node.set(f'semver:{k}', v)", "data from the registry.', }), ('host', ('it:host', {}), { 'doc':", "signature.', }) )), ('it:av:filehit', {}, ( ('file', ('file:bytes', {}), {", "dict: The system normalized version integer and a subs dictionary.", "'onespace': True}), { 'doc': 'The name of the network.', }),", "whitespace') subs = s_version.parseSemver(valu) if subs is None: raise s_exc.BadTypeValu(valu=valu,", "instance of a host adding a file to a filesystem.',", "\"target_sw\" field from the CPE 2.3 string.'}), ('target_hw', ('str', {'lower':", "executable.', }), ('it:reveng:filefunc', ('comp', {'fields': (('file', 'file:bytes'), ('function', 'it:reveng:function'))}), {", "{ 'doc': 'A developer-selected string.' }), ('it:dev:pipe', ('str', {}), {", "('thread', ('it:exec:thread', {}), { 'doc': 'The host thread which caused", "in bytes.', }), ('perms:read', ('bool', {}), { 'doc': 'True if", "{ 'doc': 'The destination port of the flow that caused", "r'^G[0-9]{4}$'}), { 'doc': 'A Mitre ATT&CK Group ID.', 'ex': 'G0100',", "'doc': 'The file access time.', }), ('user', ('inet:user', {}), {", "('time', ('time', {}), { 'doc': 'The time the mutex was", "{ 'doc': 'The name of the function.'}), ('description', ('str', {}),", "20 bits. The comparable integer value representing the semver is", "('guid', {}), { 'doc': 'A mutex created by a process", "and a subs dictionary. ''' try: valu, info = self.core.model.type('it:semver').norm(valu)", "parts[7], 'language': parts[8], 'sw_edition': parts[9], 'target_sw': parts[10], 'target_hw': parts[11], 'other':", "('file:path', {}), { 'doc': 'The default installation path of the", "'doc': 'A short description of the software.', }), ('cpe', ('it:sec:cpe',", "for an android intent.'}), ('it:os:android:ibroadcast', ('comp', {'fields': ( ('app', 'it:prod:soft'),", "registry key/value pair.', }), ('it:prod:soft', ('guid', {}), { 'doc': 'A", "('it:os:android:perm', {}, ()), ('it:os:android:intent', {}, ()), ('it:os:android:reqperm', {}, ( ('app',", "{ 'doc': 'Used to map an ATT&CK group to a", "named pipe.', }), ('host', ('it:host', {}), { 'doc': 'The host", "a YARA rule match to a process.', }), ('it:app:snort:rule', ('guid',", "file was created.', }), ('path:dir', ('file:path', {}), { 'ro': True,", "{'lower': 1}), { 'doc': 'A function from an imported library.',", "sensor host node that produced the hit.'}), ('version', ('it:semver', {}),", "that represents a group on a host or network.' }),", "distributes the file.'}), ('file', ('file:bytes', {}), {'ro': True, 'doc': 'The", "host.', }), ('model', ('str', {}), { 'doc': 'The product model", "'doc': 'The process which killed this process.', }), )), ('it:exec:thread',", "'edition': parts[7], 'language': parts[8], 'sw_edition': parts[9], 'target_sw': parts[10], 'target_hw': parts[11],", "'strip': True}), { 'ro': True, 'doc': 'The \"product\" field from", "'inet:url', 'uniq': True}), { 'doc': 'An array of URLs that", "a semver.') major, minor, patch = s_version.unpackVersion(valu) valu = s_version.packVersion(major,", "author.', }), ('author:email', ('inet:email', {}), { 'deprecated': True, 'doc': 'Email", "{ 'ro': True, 'doc': 'The \"other\" field from the CPE", "{}), { 'doc': 'A description of the ATT&CK tactic.', 'disp':", "be an actual or a virtual / notional host.', }),", "('product', ('str', {'lower': True, 'strip': True}), { 'ro': True, 'doc':", "of ChildOf CWE Relationships.' }), )), ('it:mitre:attack:group', {}, ( ('org',", "selected integer constant.', }), ('it:dev:regkey', ('str', {}), { 'doc': 'A", "import synapse.exc as s_exc import synapse.lib.types as s_types import synapse.lib.module", "which can run on the operating system.'}), ('os', ('it:prod:softver', {}),", "executing code that deleted data from the registry.', }), ('host',", "True}), { 'doc': 'An array of strings referenced within the", "source IPv4 address of the flow that caused the hit.'}),", "{ 'doc': 'The process where the memory was mapped.', }),", "given Android intent.'}), ('it:prod:softver', ('guid', {}), { 'doc': 'A specific", "a url.', }), ('url', ('inet:url', {}), { 'ro': True, 'doc':", "'doc': 'A NIST CPE 2.3 Formatted String', }), ), 'types':", "filesystem.', }), ('it:exec:file:read', ('guid', {}), { 'doc': 'An instance of", "'it:prod:soft'), ('perm', 'it:os:android:perm'))}), { 'doc': 'The given software requests the", "{ 'doc': 'The given software listens for an android intent.'}),", "norm a negative integer as a semver.') if valu >", "caused the hit.'}), ('dst:ipv6', ('inet:ipv6', {}), { 'doc': 'The destination", "name) async def _onPropSoftverArch(self, node, oldv): # make it:dev:str for", "group is registered.', }), ('domain', ('it:domain', {}), { 'doc': 'The", "{ 'doc': 'The path where the file was written to/modified.',", "the axon.', }), )), ('it:exec:mutex', {}, ( ('proc', ('it:exec:proc', {}),", "file path (parsed from :path).', }), ('path:ext', ('str', {'lower': True,", "('it:prod:softver', {}), {'ro': True, 'doc': 'The operating system which the", "within the scope of the function.', }), )), ('it:reveng:funcstr', {},", "patch levels are represented as integers, with a max width", "('creds', ('auth:creds', {}), { 'doc': 'The credentials that were used", "('desc', ('str', {}), { 'doc': 'A description of the ATT&CK", "'doc': 'The registry key or value that was deleted.', }),", "'doc': 'The parent ATT&CK technique on this sub-technique.', }), ('tactics',", "individual log event.', 'interfaces': ('it:host:activity',), }), ('it:network', ('guid', {}), {", "'ex': '/home/visi', }), ('posix:shell', ('file:path', {}), { 'doc': \"The path", "('strings', ('array', {'type': 'it:dev:str', 'uniq': True}), { 'doc': 'An array", "{'lower': True, 'regex': r'(?i)^CVE-[0-9]{4}-[0-9]{4,}$'}), { 'doc': 'A vulnerability as designated", "True, 'doc': 'The \"update\" field from the CPE 2.3 string.'}),", "'doc': 'The version of the rule at the time of", "{}, ()), ('it:dev:mutex', {}, ()), ('it:dev:regkey', {}, ()), ('it:dev:regval', {},", "'doc': 'The time of the network flow that caused the", "}), ('path', ('file:path', {}), { 'doc': 'The file path if", "of.', }), )), ('it:group', {}, ( ('name', ('str', {'lower': True,", "'The CWE description field.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url',", "created.', }), ('exited', ('time', {}), { 'doc': 'The time the", "('it:exec:url', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The main", "the file. May or may not be the same :exe", "'doc': 'Software architecture.', }), ('released', ('time', {}), { 'doc': 'Timestamp", "'doc': 'An instance of a host getting a registry key.',", "'An instance of a host reading a file from a", "= [] genr = iter(text) try: while True: c =", "subs = s_version.parseSemver(valu) if subs is None: raise s_exc.BadTypeValu(valu=valu, name=self.name,", "async def _onPropSoftverSoft(self, node, oldv): # Check to see if", "'The time that the AV engine detected the signature.' }),", "integer larger than 1152921504606846975 as a semver.') major, minor, patch", "'The executable file which caused the activity.'}), ('proc', ('it:exec:proc', {}),", "{}), { 'doc': 'The authentication domain where the group is", "True})))}), { 'doc': 'A signature name within the namespace of", "the URL. Typically the same host referenced in :proc, if", "'strip': True}), { 'ro': True, 'doc': 'The \"part\" field from", "'A vulnerability as designated by a Common Vulnerabilities and Exposures", "('array', {'type': 'it:group'}), { 'doc': 'An array of groups that", "('str', {'lower': True, 'strip': True}), { 'doc': 'An advertising identification", "was created.', }), ('path:dir', ('file:path', {}), { 'ro': True, 'doc':", "'The library file that was loaded.', }), )), ('it:exec:mmap', {},", "by a Common Vulnerabilities and Exposures (CVE) number.', 'ex': 'cve-2012-0158'", "''' Provides support for parsing a semantic version string into", "value representing the semver is the bitwise concatenation of the", "logical boundary of authentication and configuration such as a windows", "('intent', 'it:os:android:intent'))}), { 'doc': 'The given software listens for an", "by the software.', }), )), ('it:mitre:attack:mitigation', {}, ( # TODO", "the process.', }), ('exited', ('time', {}), { 'doc': 'The time", "extraction by noming through the string subs = s_version.parseVersionParts(valu) if", "this software version.', }), ('cpe', ('it:sec:cpe', {}), { 'doc': 'The", "= s_version.packVersion(subs.get('major'), subs.get('minor', 0), subs.get('patch', 0)) return valu, subs async", "if the mmap is mapped with execute permissions.', }), ('created',", "('it:exec:bind', ('guid', {}), { 'doc': 'An instance of a host", "{}, ( ('text', ('str', {}), { 'doc': 'The snort rule", "('file', 'file:bytes'))}), { 'doc': 'A file is distributed by a", "}), ('it:reveng:funcstr', ('comp', {'fields': (('function', 'it:reveng:function'), ('string', 'str'))}), { 'deprecated':", "that matched the snort rule.'}), ('src', ('inet:addr', {}), { 'doc':", "}), ('sig:name', ('str', {'lower': True}), { 'ro': True, 'doc': 'The", "flow that caused the hit.'}), ('dst', ('inet:addr', {}), { 'doc':", "await node.set('semver', valu) for k, v in subs.items(): await node.set(f'semver:{k}',", "{ 'doc': 'The time of the network flow that caused", "{}), { 'doc': 'The host where the account is registered.',", "process executing code that bound the listening port.', }), ('host',", "('src:ipv4', ('inet:ipv4', {}), { 'doc': 'The source IPv4 address of", "'The authentication domain where the account is registered.', }), ('posix:uid',", "('str', {}), { 'doc': 'The name of the function.'}), ('description',", "('soft', ('it:prod:softver', {}), {'ro': True, 'doc': 'The software which distributes", "parse string as a semver.') valu = s_version.packVersion(subs.get('major'), subs.get('minor'), subs.get('patch'))", "ATT&CK software.', }), ('techniques', ('array', {'type': 'it:mitre:attack:technique', 'uniq': True, 'sorted':", "a process.', }), ('it:exec:mmap', ('guid', {}), { 'doc': 'A memory", "'A GUID that represents an individual logon/logoff event.' }), ('it:hosturl',", "}), )), ('it:exec:file:read', {}, ( ('proc', ('it:exec:proc', {}), { 'doc':", "True, 'onespace': True}), { 'doc': 'The name of the domain.',", "('size', ('int', {}), { 'doc': 'The size of the memory", "mitigation.', }), ('tag', ('syn:tag', {}), { 'doc': 'The synapse tag", "('time', ('time', {}), { 'doc': 'The time the port was", "{ 'ro': True, 'doc': 'The \"vendor\" field from the CPE", "deleted data from the registry.', }), ('host', ('it:host', {}), {", "address of the first codeblock of the function.'}), ('rank', ('int',", "executing code that deleted the file.', }), ('host', ('it:host', {}),", "a named pipe.', }), ('it:dev:mutex', ('str', {}), { 'doc': 'A", "{ 'doc': 'A specific version of a software product.'}), ('it:prod:softfile',", "'doc': 'Version string associated with this version instance.', }), ('vers:norm',", "May be an actual or a virtual / notional host.',", "for arch prop = node.get('arch') if prop: await node.snap.addNode('it:dev:str', prop)", "process executing code that read the registry.', }), ('host', ('it:host',", "sofware author.', }), ('author:person', ('ps:person', {}), { 'deprecated': True, 'doc':", "\"cpe:2.3:\"' raise s_exc.BadTypeValu(valu=valu, mesg=mesg) text, info = s_types.Str._normPyStr(self, valu) parts", "Major, minor and patch levels are represented as integers, with", "binding the port.' }), ('server:ipv4', ('inet:ipv4', {}), { 'doc': 'The", "_normPyInt(self, valu): if valu < 0: raise s_exc.BadTypeValu(valu=valu, name=self.name, mesg='Cannot", "'split': ','}), { 'doc': 'An array of ATT&CK tactics that", "attempt to get version information for. Notes: This first attempts", "True, 'doc': 'Person who authored the software.', }), ('url', ('inet:url',", "True}), { 'ro': True, 'doc': 'The signature name.' }), ('desc',", "'onespace': True}), { 'doc': 'The name of the domain.', }),", "as a windows domain.' }), ('it:account', ('guid', {}), { 'doc':", "('it:exec:proc', {}), { 'doc': 'The process which killed this process.',", "normalized semantic version number.', }), ('semver:major', ('int', {}), { 'doc':", "a host adding a file to a filesystem.', }), ('it:exec:file:del',", "s_types.Str._normPyStr(self, valu) parts = self._splitCpe23(text) if len(parts) != 13: mesg", "('time', {}), { 'doc': 'The time the thread exited.', }),", "'doc': 'The main process executing code that wrote to /", "and patch levels are represented as integers, with a max", "or network.' }), ('it:logon', ('guid', {}), { 'doc': 'A GUID", "}), ('ctime', ('time', {}), { 'doc': 'The file creation time.',", "{ 'doc': 'The main process executing code that wrote to", "permissions.', }), ('perms:execute', ('bool', {}), { 'doc': 'True if the", "'sw_edition': parts[9], 'target_sw': parts[10], 'target_hw': parts[11], 'other': parts[12], } return", "operating system of the host.' }), ('manu', ('str', {}), {", "parent ATT&CK technique on this sub-technique.', }), ('tactics', ('array', {'type':", "where the account is registered.', }), ('domain', ('it:domain', {}), {", "the file.', }), ('path:dir', ('file:path', {}), { 'ro': True, 'doc':", "main process executing code that requested the URL.', }), ('host',", "a host writing a file to a filesystem.', }), ('it:exec:reg:get',", "{ 'ro': True, 'doc': 'The guid matching the function.'}), ('file',", "version : update : edition : language : sw_edition :", "with a max width of 20 bits. The comparable integer", "SHA256 password hash value.', }), ('hash:sha512', ('hash:sha512', {}), { 'doc':", "'doc': 'An instance of a host writing a file to", "address where the map was created in the process.', }),", "{'subs': subs} def _normPyInt(self, valu): if valu < 0: raise", "load event in a process.', }), ('it:exec:mmap', ('guid', {}), {", "('it:host', {}), { 'doc': 'The host where the account is", "May or may not be the same :exe specified in", "by a host or system.', }), ('it:sec:cve', ('str', {'lower': True,", "logon attempt.', }), ('logoff:time', ('time', {}), { 'doc': 'The time", "CPE 2.3 string.'}), ('target_hw', ('str', {'lower': True, 'strip': True}), {", "{}), { 'doc': 'The source IPv6 address of the flow", "('it:fs:file', ('guid', {}), { 'doc': 'A file on a host.'", "('it:dev:str', {}), { 'doc': 'Software architecture.', }), ('released', ('time', {}),", "('it:reveng:function', {}), { 'ro': True, 'doc': 'The guid matching the", "filesystem.', }), ('it:exec:file:del', ('guid', {}), { 'doc': 'An instance of", "('dst:port', ('inet:port', {}), { 'doc': 'The destination port of the", "authored the software.', }), ('author:acct', ('inet:web:acct', {}), { 'deprecated': True,", "larger than 1152921504606846975 as a semver.') major, minor, patch =", "{ 'doc': 'The optional contiguous IPv6 address range of this", "{}, ( # TODO map to an eventual risk:mitigation ('name',", "writing a file to a filesystem.', }), ('it:exec:reg:get', ('guid', {}),", "(parsed from :path).', }), ('file', ('file:bytes', {}), { 'doc': 'The", "('tactics', ('array', {'type': 'it:mitre:attack:tactic', 'uniq': True, 'sorted': True, 'split': ','}),", "the scope of the function.', }), ('strings', ('array', {'type': 'it:dev:str',", "this ATT&CK group ID.', 'ex': 'cno.mitre.g0100', }), ('references', ('array', {'type':", "raise s_exc.BadTypeValu(valu=valu, mesg=mesg) text, info = s_types.Str._normPyStr(self, valu) parts =", "'Semver prerelease string.', }), ('semver:build', ('str', {}), { 'doc': 'Semver", "}), ('it:dev:int', ('int', {}), { 'doc': 'A developer selected integer", "as we cannot map an arbitrary pre-release version into a", "the CVE vulnerability.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url', {}),", "the YARA rule.'}), ('author', ('ps:contact', {}), { 'doc': 'Contact info", "Formatted String', }), ), 'types': ( ('it:hostname', ('str', {'strip': True,", "}), ('it:dev:regval', ('guid', {}), { 'doc': 'A Windows registry key/value", "True, 'doc': 'The signature that the file triggered on.' }),", "of the trigger.'}), ('dst:ipv4', ('inet:ipv4', {}), { 'doc': 'The destination", "parts[9], 'target_sw': parts[10], 'target_hw': parts[11], 'other': parts[12], } return ':'.join(parts),", "part : vendor : product : version : update :", "software broadcasts the given Android intent.'}), ('it:prod:softver', ('guid', {}), {", "('it:account', {}), { 'doc': 'The account that logged in.', }),", "{}), { 'doc': 'A description of the ATT&CK group.', 'disp':", "('int', {}), { 'doc': 'The user ID of the account.',", "'disp': {'hint': 'text'}, }), ('name', ('str', {}), { 'doc': 'The", "ATT&CK group.', }), ('names', ('array', {'type': 'ou:name', 'uniq': True, 'sorted':", "{ 'doc': 'The host process which caused the activity.'}), ('thread',", "()), ('it:exec:proc', {}, ( ('host', ('it:host', {}), { 'doc': 'The", "extract version parts out of the string. Returns: int, dict:", "read the file. May or may not be the same", "'str', 'uniq': True, 'sorted': True}), { 'doc': 'Associated names for", "('it:exec:pipe', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The main", "mapped.', }), ('va', ('int', {}), { 'doc': 'The base memory", "('name', ('str', {'lower': True}), { 'ro': True, 'doc': 'The signature", "the signature.', }), ('name', ('str', {'lower': True}), { 'ro': True,", "('str', {}), { 'doc': 'A brief description of the group.',", "{ 'doc': 'The registry key or value that was written", "('it:av:sig', {}), { 'doc': 'The signature that the file triggered", "component parts. This normalizes a version string into an integer", "'The main process executing code that requested the URL.', }),", "in a process.', }), ('it:exec:loadlib', ('guid', {}), { 'doc': 'A", "wrote to the registry.', }), ('host', ('it:host', {}), { 'doc':", "'The log messsage text.', }), ('severity', ('int', {'enums': loglevels}), {", "'split': ','}), { 'doc': 'An array of ATT&CK software IDs", "virtual address of the first codeblock of the function.'}), ('rank',", "'doc': 'The file that was modified.', }), )), ('it:exec:reg:get', {},", "True}), { 'ro': True, 'doc': 'The \"update\" field from the", "}), ('names', ('array', {'type': 'ou:name', 'uniq': True, 'sorted': True}), {", "'An iOS advertising identification string.'}), ('it:os:android:aaid', ('it:adid', {}), { 'doc':", "comparison purposes, as we cannot map an arbitrary pre-release version", "logged in.', }), ('creds', ('auth:creds', {}), { 'doc': 'The credentials", "given software broadcasts the given Android intent.'}), ('it:prod:softver', ('guid', {}),", "True, 'doc': 'The file extension of the file name (parsed", "}), ('author:org', ('ou:org', {}), { 'deprecated': True, 'doc': 'Organization which", "an actual or a virtual / notional host.', }), ('exe',", "'doc': 'The registry key or value that was written to.',", "listening port.', }), ('host', ('it:host', {}), { 'doc': 'The host", "{}), { 'doc': 'The URL that documents the ATT&CK group.',", "the mmap is mapped with write permissions.', }), ('perms:execute', ('bool',", "('it:cmd', {}, ()), ('it:exec:proc', {}, ( ('host', ('it:host', {}), {", "created.', }), ('path:dir', ('file:path', {}), { 'ro': True, 'doc': 'The", "{ 'doc': 'An instance of a host writing a file", "('it:mitre:attack:software', {}, ( ('software', ('it:prod:soft', {}), { 'doc': 'Used to", "CPE 2.3 string.'}), ('sw_edition', ('str', {'lower': True, 'strip': True}), {", "('dst', ('inet:addr', {}), { 'doc': 'The destination address of the", "(('host', 'it:host'), ('url', 'inet:url'))}), { 'doc': 'A url hosted on", "{ 'doc': 'The primary name for the ATT&CK mitigation.', }),", "of 20 bits. The comparable integer value representing the semver", "'The process that matched the YARA rule.'}), ('time', ('time', {}),", "'The android permission requested by the app.'}), )), ('it:prod:softos', {},", "sub-technique.', }), ('tactics', ('array', {'type': 'it:mitre:attack:tactic', 'uniq': True, 'sorted': True,", "this ATT&CK software.', 'ex': 'cno.mitre.s0100', }), ('references', ('array', {'type': 'inet:url',", "[%s]', prop) def getModelDefs(self): modl = { 'ctors': ( ('it:semver',", "('it:prod:softver', {}), {'ro': True, 'doc': 'The software which can run", "'The org that operates the given host.', }), )), ('it:log:event',", "rule match to a file.', }), ('it:app:yara:procmatch', ('guid', {}), {", "{}), { 'ro': True, 'doc': 'The signature that the file", "{}), { 'doc': 'The authentication domain that the host is", "{}), { 'doc': 'The primary group ID of the account.',", "for the ATT&CK software.', }), ('desc', ('str', {'strip': True}), {", "{'lower': True, 'strip': True}), { 'ro': True, 'doc': 'The \"edition\"", "that matched the file.'}), ('proc', ('it:exec:proc', {}), { 'doc': 'The", "('file:bytes', {}), { 'ro': True, 'doc': 'The file that matched", "'Version string associated with this version instance.', }), ('vers:norm', ('str',", "specific file containing code that wrote to the file. May", "by the app.'}), )), ('it:prod:softos', {}, ( ('soft', ('it:prod:softver', {}),", "it:dev:str.', }), )), ('it:sec:cve', {}, ( ('desc', ('str', {}), {", "NIST CPE 2.3 Formatted String', }), ), 'types': ( ('it:hostname',", "{'type': 'it:dev:str', 'uniq': True}), { 'doc': 'An array of strings", "'The process which killed this process.', }), )), ('it:exec:thread', {},", "registry key or value that was written to.', }), )),", "True, 'sorted': True}), { 'doc': 'Associated names for the ATT&CK", "valu) parts = self._splitCpe23(text) if len(parts) != 13: mesg =", "next(genr) if c == '\\\\': c += next(genr) if c", "read.', }), ('path', ('file:path', {}), { 'doc': 'The path where", "'doc': 'Set to True if the software is a library.'}),", "('guid', {}), { 'doc': 'A GUID that represents an account", "{'enums': 'current,deprecated,withdrawn'}), { 'doc': 'A Mitre ATT&CK element status.', 'ex':", "containing code that created the mutex. May or may not", "}), ('file', ('file:bytes', {}), { 'doc': 'The file on the", "the file. Typically the same host referenced in :proc, if", "string as a semver.') valu = s_version.packVersion(subs.get('major'), subs.get('minor'), subs.get('patch')) return", "used to annotate nodes included in this ATT&CK group ID.',", "time.', }), ('user', ('inet:user', {}), { 'doc': 'The owner of", "the host.', }), ('model', ('str', {}), { 'doc': 'The product", "{ 'doc': 'A version of a software product which is", "('it:exec:mmap', ('guid', {}), { 'doc': 'A memory mapped segment located", "'doc': 'A Mitre ATT&CK Tactic ID.', 'ex': 'TA0040', }), ('it:mitre:attack:technique',", "that was requested.', }), ('client', ('inet:client', {}), { 'doc': 'The", "('it:os:android:reqperm', ('comp', {'fields': ( ('app', 'it:prod:soft'), ('perm', 'it:os:android:perm'))}), { 'doc':", "'The org that owns/operates the network.', }), ('net4', ('inet:net4', {}),", "('net4', ('inet:net4', {}), { 'doc': 'The optional contiguous IPv4 address", "prop: await node.snap.addNode('it:dev:str', prop) async def _onPropSoftverVers(self, node, oldv): #", "'doc': 'Observed/variant names for this software version.', }), ('cpe', ('it:sec:cpe',", ")), ('it:prod:softlib', {}, ( ('soft', ('it:prod:softver', {}), {'ro': True, 'doc':", "account's default shell.\", 'ex': '/bin/bash', }), ('windows:sid', ('it:os:windows:sid', {}), {", "installation path of the file.'}), )), ('it:hostsoft', {}, ( ('host',", "the android intent.'}), ('intent', ('it:os:android:intent', {}), {'ro': True, 'doc': 'The", "product which contains the signature.', }), )), ('it:av:prochit', {}, (", "( ('text', ('str', {}), { 'doc': 'The snort rule text.',", "}), ('semver:major', ('int', {}), { 'doc': 'Version major number.', }),", "'it:reveng:filefunc'}), { 'doc': 'Other function calls within the scope of", "()), ('it:os:android:perm', {}, ()), ('it:os:android:intent', {}, ()), ('it:os:android:reqperm', {}, (", "'The (optional) hex encoded salt value used to calculate the", "was written to.', }), )), ('it:exec:reg:del', {}, ( ('proc', ('it:exec:proc',", "'Used to map an ATT&CK software to a synapse it:prod:soft.',", "documents the ATT&CK technique.', }), ('tag', ('syn:tag', {}), { 'doc':", "} return ':'.join(parts), {'subs': subs} class SemVer(s_types.Int): ''' Provides support", "{}), { 'doc': 'The name of the snort rule.'}), ('version',", "('funccalls', ('array', {'type': 'it:reveng:filefunc'}), { 'doc': 'Other function calls within", "{ 'doc': 'The operator of the host.', }), ('org', ('ou:org',", "'it:app:yara:rule'), ('file', 'file:bytes'))}), { 'doc': 'A YARA rule match to", "{ 'doc': 'An array of ChildOf CWE Relationships.' }), )),", "'An instance of a host creating or setting a registry", "mesg=mesg) text, info = s_types.Str._normPyStr(self, valu) parts = self._splitCpe23(text) if", "('intent', 'it:os:android:intent') )}), { 'doc': 'The given software broadcasts the", "we cannot map an arbitrary pre-release version into a integer", "'doc': 'A brief description of the domain.', }), ('org', ('ou:org',", "documents the ATT&CK tactic.', }), ('tag', ('syn:tag', {}), { 'doc':", "'/bin/bash', }), ('windows:sid', ('it:os:windows:sid', {}), { 'doc': 'The Microsoft Windows", "{ 'doc': 'The file considered the \"main\" executable for the", "that document the ATT&CK tactic.', }), )), ('it:mitre:attack:technique', {}, (", "patch levels. Prerelease and build information will be parsed out", "'The username associated with the account', }), ('contact', ('ps:contact', {}),", "in this ATT&CK software.', 'ex': 'cno.mitre.s0100', }), ('references', ('array', {'type':", "with the given os software version.'}), ('it:hostsoft', ('comp', {'fields': (('host',", "before attempting to extract version parts out of the string.", "'The named pipe string.', }), )), ('it:exec:url', {}, ( ('proc',", "{}, ( ('function', ('it:reveng:function', {}), { 'ro': True, 'doc': 'The", "that owns/operates the network.', }), ('net4', ('inet:net4', {}), { 'doc':", "'CWE-120', }), ('it:mitre:attack:status', ('str', {'enums': 'current,deprecated,withdrawn'}), { 'doc': 'A Mitre", "instance of a host writing a file to a filesystem.',", "( ('it:hostname', {}, ()), ('it:host', {}, ( ('name', ('it:hostname', {}),", "'doc': 'The URL that was requested.', }), ('client', ('inet:client', {}),", "('hash:sha256', ('hash:sha256', {}), { 'doc': 'A SHA256 hash of the", "'doc': 'The android intent which is broadcast by the app.'}),", "URLs that document the ATT&CK software.', }), ('techniques', ('array', {'type':", "{ 'doc': 'The registry key or value that was deleted.',", "'The software which can run on the operating system.'}), ('os',", "the flow that caused the hit.'}), ('src:port', ('inet:port', {}), {", "('time', {}), { 'doc': 'Timestamp for when this version of", "the ATT&CK group.', }), ('names', ('array', {'type': 'ou:name', 'uniq': True,", "signature that the file triggered on.' }), ('sig:name', ('str', {'lower':", "'Groups that are a member of this group.', }), ('posix:gid',", "virtual (e.g., malware sandbox) host.', }), ('it:exec:thread', ('guid', {}), {", "('str', {'regex': r'^CWE-[0-9]{1,8}$'}), { 'doc': 'NIST NVD Common Weaknesses Enumeration", "('sw_edition', ('str', {'lower': True, 'strip': True}), { 'ro': True, 'doc':", "ID.', }), ('time', ('time', {}), { 'doc': 'The start time", "{}), { 'doc': 'The file creation time.', }), ('mtime', ('time',", "('it:semver', 'synapse.models.infotech.SemVer', {}, { 'doc': 'Semantic Version type.', }), ('it:sec:cpe',", "the process, including any command line parameters.', 'disp': {'hint': 'text'},", "current version of the rule.'}), ('enabled', ('bool', {}), { 'doc':", "within the function.', }), )), ('it:reveng:filefunc', {}, ( ('function', ('it:reveng:function',", "read.', }), )), ('it:exec:file:write', {}, ( ('proc', ('it:exec:proc', {}), {", "'doc': 'The sensor host node that produced the hit.'}), ('version',", "'doc': 'The file that was deleted.', }), )), ('it:exec:file:read', {},", "}), ('arch', ('it:dev:str', {}), { 'doc': 'Software architecture.', }), ('released',", "{len(parts)} parts, expected 13.' raise s_exc.BadTypeValu(valu=valu, mesg=mesg) subs = {", "'doc': 'Used to map an ATT&CK software to a synapse", "'The host running the process that wrote to the file.", "}), )), ('it:fs:file', {}, ( ('host', ('it:host', {}), { 'doc':", "}), )), ('it:mitre:attack:tactic', {}, ( ('name', ('str', {'strip': True}), {", "}), ('semver:build', ('str', {}), { 'doc': 'Semver build string.', }),", "= {'major': major, 'minor': minor, 'patch': patch} return valu, {'subs':", "running the process that bound the listening port. Typically the", "integer to allow version ordering. Prerelease information is disregarded for", "('int', {}), { 'doc': 'The size of the memory map", "function.'}), ('impcalls', ('array', {'type': 'it:reveng:impfunc'}), { 'doc': 'Calls to imported", "array of URLs that document the ATT&CK software.', }), ('techniques',", "'doc': 'The SHA256 password hash value.', }), ('hash:sha512', ('hash:sha512', {}),", "{ 'doc': 'The host running the process that deleted data", "the YARA engine matched the process to the rule.'}), ('version',", "be the same :exe specified in :proc, if present.', }),", "'doc': 'The name of the software at a particular version.',", "s_exc.BadTypeValu(valu=valu, name=self.name, mesg='Unable to parse string as a semver.') valu", "contains the signature.', }), ('name', ('str', {'lower': True}), { 'ro':", "of the rule at the time of match.'}), )), ('it:app:yara:rule',", "Exposures (CVE) number.', 'ex': 'cve-2012-0158' }), ('it:sec:cwe', ('str', {'regex': r'^CWE-[0-9]{1,8}$'}),", "intent.'}), ('intent', ('it:os:android:intent', {}), {'ro': True, 'doc': 'The android intent", "of the major, minor and patch levels. Prerelease and build", "('impcalls', ('array', {'type': 'it:reveng:impfunc'}), { 'doc': 'Calls to imported library", "deleted.', }), ('path:dir', ('file:path', {}), { 'ro': True, 'doc': 'The", "{ 'doc': 'The time the library was loaded.', }), ('unloaded',", "documents the ATT&CK group.', }), ('tag', ('syn:tag', {}), { 'doc':", "the software.', }), ('author:org', ('ou:org', {}), { 'deprecated': True, 'doc':", "{ 'doc': 'The SHA256 password hash value.', }), ('hash:sha512', ('hash:sha512',", "'An array of techniques used by the software.', }), )),", "version of the rule at the time of match.'}), )),", "str await node.snap.addNode('it:dev:str', prop) # form the semver properly or", "containing code that deleted data from the registry. May or", "listening port.', }), ('it:fs:file', ('guid', {}), { 'doc': 'A file", "}), ('parents', ('array', {'type': 'it:sec:cwe', 'uniq': True, 'sorted': True, 'split':", "{}), { 'doc': 'A Windows registry key/value pair.', }), ('it:prod:soft',", "{}), { 'doc': 'The version of the rule at the", "('it:prod:softlib', {}, ( ('soft', ('it:prod:softver', {}), {'ro': True, 'doc': 'The", "'doc': 'The \"update\" field from the CPE 2.3 string.'}), ('edition',", "('str', {}), { 'doc': 'A string representing a mutex.', }),", "Relationships.' }), )), ('it:mitre:attack:group', {}, ( ('org', ('ou:org', {}), {", "'doc': 'The URL that documents the ATT&CK mitigation.', }), ('tag',", "{}), { 'doc': 'The source address of flow that caused", "}), ('time', ('time', {}), { 'doc': 'The start time for", "'doc': 'Other function calls within the scope of the function.',", "('manu', ('str', {}), { 'doc': 'The manufacturer of the host.',", "of a string. Args: valu (str): String to attempt to", "force the version out of a string. Args: valu (str):", "'doc': 'A raw JSON record of the log event.', }),", "}), ('semver:minor', ('int', {}), { 'doc': 'Version minor number.', }),", "{ 'doc': 'The Windows registry key.', }), ('str', ('it:dev:str', {}),", "member of.', }), ('ipv4', ('inet:ipv4', {}), { 'doc': 'The last", ")), ('it:exec:mutex', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The", "'ex': 'foo.exe --dostuff bar', }), ('it:exec:mutex', ('guid', {}), { 'doc':", "('it:exec:proc', {}), { 'doc': 'The process which created the process.'", "for this password hash.', }), )), ('it:cmd', {}, ()), ('it:exec:proc',", "{'strip': True}), { 'doc': 'The primary name for the ATT&CK", "binary data.', }), )), ('it:prod:soft', {}, ( ('name', ('str', {'lower':", "engine matched the process to the rule.'}), ('version', ('it:semver', {}),", "strings if that information is present. ''' def postTypeInit(self): s_types.Int.postTypeInit(self)", "}), ('os', ('it:prod:softver', {}), { 'doc': 'The operating system of", "is mapped with read permissions.', }), ('perms:write', ('bool', {}), {", "snort rule.'}), ('src', ('inet:addr', {}), { 'doc': 'The source address", "code that created the named pipe.', }), ('host', ('it:host', {}),", "('url', 'inet:url'))}), { 'doc': 'A url hosted on or served", "of techniques used by the software.', }), )), ('it:mitre:attack:mitigation', {},", "the software is an operating system.'}), ('islib', ('bool', {}), {", "'The primary group ID of the account.', 'ex': '1001', }),", "'ex': '1001', }), ('posix:gid', ('int', {}), { 'doc': 'The primary", "{}, ( ('app', ('it:prod:softver', {}), {'ro': True, 'doc': 'The android", "string.', 'ex': 'foo.exe --dostuff bar', }), ('it:exec:mutex', ('guid', {}), {", "allow version ordering. Prerelease information is disregarded for integer comparison", "'An advertising identification string.'}), ('it:os:windows:sid', ('str', {'regex': r'^S-1-[0-59]-\\d{2}-\\d{8,10}-\\d{8,10}-\\d{8,10}-[1-9]\\d{3}$'}), { 'doc':", "f'CPE 2.3 string has {len(parts)} parts, expected 13.' raise s_exc.BadTypeValu(valu=valu,", "('inet:net6', {}), { 'doc': 'The optional contiguous IPv6 address range", "('text', ('str', {}), { 'doc': 'The snort rule text.', 'disp':", "raise s_exc.BadTypeValu(valu=valu, name=self.name, mesg='Unable to parse string as a semver.')", "'doc': 'The source IPv4 address of the flow that caused", "( ('it:semver', 'synapse.models.infotech.SemVer', {}, { 'doc': 'Semantic Version type.', }),", "hash.', }), ('hash:md5', ('hash:md5', {}), { 'doc': 'The MD5 password", ")), ('it:exec:reg:set', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The", "('comp', {'fields': (('soft', 'it:prod:soft'), ('name', ('str', {'lower': True})))}), { 'doc':", "('file:bytes', {}), { 'ro': True, 'doc': 'The file that contains", "'The exit code for the process.', }), ('user', ('inet:user', {}),", "main process executing code that deleted data from the registry.',", "('it:exec:proc', {}), { 'doc': 'The main process executing code that", "which the software can run on.'}), )), ('it:os:android:ilisten', {}, (", "'The geo-political location string for the node.', }), ('os', ('it:prod:softver',", "}), )), ('it:exec:loadlib', {}, ( ('proc', ('it:exec:proc', {}), { 'doc':", "url hosted on or served by a host or system.',", "version integer and a subs dictionary. ''' try: valu, info", "True, 'doc': 'The \"target_hw\" field from the CPE 2.3 string.'}),", "('it:app:snort:rule', {}), { 'doc': 'The snort rule that matched the", "{}), { 'doc': 'Software architecture.', }), ('released', ('time', {}), {", "destination IPv4 address of the flow that caused the hit.'}),", "'strip': True}), { 'doc': 'Name of the software.', }), ('names',", "{ 'doc': 'The name of the domain.', }), ('desc', ('str',", "}), ('time', ('time', {}), { 'doc': 'The time the named", "name of the domain.', }), ('desc', ('str', {}), { 'doc':", "{}), { 'doc': 'The default installation path of the file.'}),", "('rule', ('it:app:snort:rule', {}), { 'doc': 'The snort rule that matched", "'doc': 'The authentication domain where the group is registered.', }),", "{}), {'ro': True, 'doc': 'The android permission requested by the", "node): pprop = node.ndef[1] await node.snap.addNode('it:dev:str', pprop) async def _onPropSoftverSoft(self,", "('inet:url', {}), { 'ro': True, 'doc': 'URL available on the", ":proc, if present.'}), ('time', ('time', {}), { 'doc': 'The time", "{ 'ro': True, 'doc': 'The final component of the file", "software which can run on the operating system.'}), ('os', ('it:prod:softver',", "strings using the it:semver normalization before attempting to extract version", "patch} return valu, {'subs': subs} def repr(self, valu): major, minor,", "the ATT&CK mitigation.', }), ('addresses', ('array', {'type': 'it:mitre:attack:technique', 'uniq': True,", "network.', }), )), ('it:account', {}, ( ('user', ('inet:user', {}), {", "('sig:soft', ('it:prod:soft', {}), { 'ro': True, 'doc': 'The anti-virus product", "s_version.packVersion(subs.get('major'), subs.get('minor', 0), subs.get('patch', 0)) return valu, subs async def", "{'strip': True}), { 'doc': 'A description of the ATT&CK mitigation.',", "the major, minor and patch levels. Prerelease and build information", "'doc': 'A YARA rule unique identifier.', }), ('it:app:yara:match', ('comp', {'fields':", "{}), { 'doc': 'A thread executing in a process.', }),", "}), ('hash:sha256', ('hash:sha256', {}), { 'doc': 'A SHA256 hash of", "to annotate nodes included in this ATT&CK tactic.', 'ex': 'cno.mitre.ta0100',", "in :proc, if present.'}), ('time', ('time', {}), { 'doc': 'The", "'doc': 'The process where the memory was mapped.', }), ('va',", "{}), { 'doc': 'The status of this ATT&CK technique.', }),", "{}), { 'doc': 'The host running the process that requested", "was created in the process.', }), ('size', ('int', {}), {", "if the mmap is a mapped view of a file.',", "'it:prod:softver'))}), { 'doc': 'A version of a software product which", "it if possible prop = node.get('software') if prop: opts =", "{ 'doc': 'The org that operates the given host.', }),", "of the rule evaluated as a match.'}), )), ('it:app:yara:procmatch', {},", "{'regex': r'^CWE-[0-9]{1,8}$'}), { 'doc': 'NIST NVD Common Weaknesses Enumeration Specification',", "valu prop = node.get('vers') if not prop: return await node.set('vers:norm',", "string subs = s_version.parseVersionParts(valu) if subs is None: raise s_exc.BadTypeValu(valu=valu,", "'The product model of the host.', }), ('serial', ('str', {}),", "True, 'doc': 'The \"other\" field from the CPE 2.3 string.'}),", "{}), { 'doc': 'The operating system of the host.' }),", "mesg='Cannot norm a integer larger than 1152921504606846975 as a semver.')", "the process that read the registry. Typically the same host", "}), ('it:dev:mutex', ('str', {}), { 'doc': 'A string representing a", "{'lower': True, 'strip': True, 'onespace': True}), { 'doc': 'The name", "IPv6 where the logon originated.', }), )), ('it:hosturl', {}, (", "('hash:sha1', ('hash:sha1', {}), { 'doc': 'The SHA1 password hash value.',", "value.', }), ('hash:sha512', ('hash:sha512', {}), { 'doc': 'The SHA512 password", "('str', {}), { 'doc': 'A brief description of the network.',", "{ 'doc': 'The NIST CPE 2.3 string specifying this software", "'doc': 'The Microsoft Windows Security Identifier of the account.', }),", "{}), { 'doc': 'The source port of the flow that", "- = N/A ''' def __init__(self, modl, name, info, opts):", "running the process that read the file. Typically the same", "('it:av:filehit', {}, ( ('file', ('file:bytes', {}), { 'ro': True, 'doc':", "'doc': 'The given software listens for an android intent.'}), ('it:os:android:ibroadcast',", "('client:ipv6', ('inet:ipv6', {}), { 'doc': 'The IPv6 where the logon", "URL retrieval..' }), ('client:port', ('inet:port', {}), { 'doc': 'The client", "logger.exception('Failed to brute force version string [%s]', prop) def getModelDefs(self):", "the process. For example, rundll32.exe may be considered the \"main\"", "'foo.exe --dostuff bar', }), ('it:exec:mutex', ('guid', {}), { 'doc': 'A", "'doc': 'The time that the YARA engine matched the process", "'doc': 'The status of this ATT&CK technique.', }), ('isnow', ('it:mitre:attack:technique',", "primary name for the ATT&CK software.', }), ('names', ('array', {'type':", "a file.', }), ('hash:sha256', ('hash:sha256', {}), { 'doc': 'A SHA256", "time.', }), ('mtime', ('time', {}), { 'doc': 'The file modification", "('comp', {'fields': ( ('soft', 'it:prod:softver'), ('lib', 'it:prod:softver'))}), { 'doc': 'A", "parts, expected 13.' raise s_exc.BadTypeValu(valu=valu, mesg=mesg) subs = { 'part':", "to a string inside a function.', }), ('it:reveng:impfunc', ('str', {'lower':", "'The synapse tag used to annotate nodes included in this", "('str', {'lower': True, 'strip': True}), { 'ro': True, 'doc': 'The", "ATT&CK tactic.', 'ex': 'cno.mitre.ta0100', }), ('references', ('array', {'type': 'inet:url', 'uniq':", "{'ro': True, 'doc': 'The file distributed by the software.'}), ('path',", "'doc': 'A developer-selected string.' }), ('it:dev:pipe', ('str', {}), { 'doc':", "(str): String to attempt to get version information for. Notes:", "{}), { 'doc': 'The operator of the host.', }), ('org',", ")), ('it:sec:cve', {}, ( ('desc', ('str', {}), { 'doc': 'A", "'The host where the logon originated.', }), ('client:ipv4', ('inet:ipv4', {}),", "host adding a file to a filesystem.', }), ('it:exec:file:del', ('guid',", "'doc': 'The IPv4 of the client during the URL retrieval..'", "host that executed the process. May be an actual or", "async def _onFormItDevStr(self, node): await node.set('norm', node.ndef[1]) async def _onFormMakeDevStr(self,", "on the host.'}) )), ('it:av:sig', {}, ( ('soft', ('it:prod:soft', {}),", "the software.', }), ('url', ('inet:url', {}), { 'doc': 'URL relevant", "known location for the host.' }), ('place', ('geo:place', {}), {", "CPE 2.3 Formatted String', }), ), 'types': ( ('it:hostname', ('str',", "actual (e.g., endpoint) or virtual (e.g., malware sandbox) host.', }),", "{ 'doc': 'The host that the account logged in to.',", "}), ('groups', ('array', {'type': 'it:group'}), { 'doc': 'An array of", "of this group.', }), ('posix:gid', ('int', {}), { 'doc': 'The", "specific file containing code that bound the listening port. May", "(Section 6.2) cpe:2.3: part : vendor : product : version", "of the host.', }), ('serial', ('str', {}), { 'doc': 'The", "('inet:net4', {}), { 'doc': 'The optional contiguous IPv4 address range", "system normalized version integer and a subs dictionary. ''' try:", "('url', ('inet:url', {}), { 'doc': 'A URL linking this CVE", "of the network.', }), ('org', ('ou:org', {}), { 'doc': 'The", "the flow that caused the hit.'}), ('src:ipv6', ('inet:ipv6', {}), {", "await node.snap.addNode('it:dev:str', prop) # form the semver properly or bruteforce", "String to attempt to get version information for. Notes: This", "an ATT&CK software to a synapse it:prod:soft.', }), ('name', ('str',", "('it:sec:cve', {}, ( ('desc', ('str', {}), { 'doc': 'A free-form", "'The IPv6 where the logon originated.', }), )), ('it:hosturl', {},", "for the ATT&CK mitigation.', }), ('desc', ('str', {'strip': True}), {", "a registry key.', }), ('it:exec:reg:set', ('guid', {}), { 'doc': 'An", "of CVEs that apply to this software version.', }), ('vers',", "a match.'}), )), ('it:app:yara:procmatch', {}, ( ('rule', ('it:app:yara:rule', {}), {", "('path', ('file:path', {}), { 'doc': 'The path to the executable", "}), )), ('it:mitre:attack:mitigation', {}, ( # TODO map to an", "'The IPv6 address specified to bind().' }), ('server:port', ('inet:port', {}),", "'The path where the file was deleted.', }), ('path:dir', ('file:path',", "}), ('src:proc', ('it:exec:proc', {}), { 'doc': 'An external process which", "about the signature.', }) )), ('it:av:filehit', {}, ( ('file', ('file:bytes',", ")), ('it:prod:softver', {}, ( ('software', ('it:prod:soft', {}), { 'doc': 'Software", "available from.', }), )), ('it:prod:softlib', {}, ( ('soft', ('it:prod:softver', {}),", "value is an integer.', }), ('bytes', ('file:bytes', {}), { 'doc':", "'doc': 'The android app which requests the permission.'}), ('perm', ('it:os:android:perm',", "{ 'doc': 'The IPv6 address specified to bind().' }), ('server:port',", "'The host running the process that read the file. Typically", "rule unique identifier.', }), ('it:app:snort:hit', ('guid', {}), { 'doc': 'An", "}), ('url', ('inet:url', {}), { 'doc': 'The URL that documents", "('inet:url', {}), { 'doc': 'The URL that documents the ATT&CK", "node.snap.nodes('it:prod:soft=$soft', opts=opts) if nodes: name = nodes[0].get('name') if name: await", "('inet:server', {}), { 'doc': 'The inet:addr of the server when", "{ 'doc': 'The file path if the mmap is a", "to extract version parts out of the string. Returns: int,", "of the registry key, if the value is an integer.',", "caused the activity.'}), ('thread', ('it:exec:thread', {}), { 'doc': 'The host", "('it:os:android:aaid', ('it:adid', {}), { 'doc': 'An android advertising identification string.'}),", "file.', }), )), ('it:exec:file:add', {}, ( ('proc', ('it:exec:proc', {}), {", "'The Microsoft Windows Security Identifier of the account.', }), ('groups',", "}), ('it:exec:loadlib', ('guid', {}), { 'doc': 'A library load event", "= True s_types.Str.__init__(self, modl, name, info, opts) def _splitCpe23(self, text):", "parts[3], 'product': parts[4], 'version': parts[5], 'update': parts[6], 'edition': parts[7], 'language':", ")), ('it:log:event', {}, ( ('mesg', ('str', {}), { 'doc': 'The", "code that requested the URL. May or may not be", "is available and set it if possible prop = node.get('software')", "{ 'doc': 'The URL that documents the ATT&CK software.', }),", "main process executing code that read the file.', }), ('host',", "available on the host.', }), )), ('it:dev:str', {}, ( ('norm',", "('it:sec:cve', ('str', {'lower': True, 'regex': r'(?i)^CVE-[0-9]{4}-[0-9]{4,}$'}), { 'doc': 'A vulnerability", ": sw_edition : target_sw : target_hw : other * =", "executing code that wrote to / modified the existing file.',", "'ex': 'cno.mitre.t0100', }), ('references', ('array', {'type': 'inet:url', 'uniq': True}), {", "'doc': 'A snort rule unique identifier.', }), ('it:app:snort:hit', ('guid', {}),", "to be compatible with the given os software version.'}), ('it:hostsoft',", "string.', }), ('semver:build', ('str', {}), { 'doc': 'Semver build string.',", "running the process that read the registry. Typically the same", "evaluated as a match.'}), )), ('it:app:yara:procmatch', {}, ( ('rule', ('it:app:yara:rule',", "'The name of the software at a particular version.', }),", "{ 'doc': 'The URL that documents the ATT&CK group.', }),", "('auth:creds', {}), { 'doc': 'The credentials that were used for", "software.', }), ('desc', ('str', {}), { 'doc': 'A description of", "('int', {}), { 'doc': 'The exit code or return value", "{}), { 'doc': 'URL where a specific version of the", "'strip': True, 'onespace': True}), { 'doc': 'The name of the", "the software.'}), ('path', ('file:path', {}), { 'doc': 'The default installation", "('inet:web:acct', {}), { 'deprecated': True, 'doc': 'Web account of the", "{ 'doc': 'An instance of a host creating or setting", "host process which caused the activity.'}), ('thread', ('it:exec:thread', {}), {", "initCoreModule(self): self.model.form('it:dev:str').onAdd(self._onFormItDevStr) self.model.form('it:dev:pipe').onAdd(self._onFormMakeDevStr) self.model.form('it:dev:mutex').onAdd(self._onFormMakeDevStr) self.model.form('it:dev:regkey').onAdd(self._onFormMakeDevStr) self.model.prop('it:prod:softver:arch').onSet(self._onPropSoftverArch) self.model.prop('it:prod:softver:vers').onSet(self._onPropSoftverVers) self.model.prop('it:prod:softver:software').onSet(self._onPropSoftverSoft) def bruteVersionStr(self,", "'doc': 'An iOS advertising identification string.'}), ('it:os:android:aaid', ('it:adid', {}), {", "'A free-form description of the signature.', 'disp': {'hint': 'text'}, }),", "'The IPv4 of the client during the URL retrieval..' }),", "log event.', }), )), ('it:domain', {}, ( ('name', ('str', {'lower':", "valu) for k, v in subs.items(): await node.set(f'semver:{k}', v) except", "'The time that the YARA engine matched the process to", "main process executing code that read the registry.', }), ('host',", "'The main process executing code that deleted data from the", "{}), { 'doc': 'Contact info for the author of the", "( ('proc', ('it:exec:proc', {}), { 'doc': 'The process where the", "originated.', }), ('client:ipv6', ('inet:ipv6', {}), { 'doc': 'The IPv6 where", "('file', ('file:bytes', {}), {'ro': True, 'doc': 'The file distributed by", "','}), { 'doc': 'An array of ATT&CK technique IDs addressed", "{ 'doc': 'The operating system of the host.' }), ('manu',", ")), ('it:group', {}, ( ('name', ('str', {'lower': True, 'strip': True,", "the process. May be an actual or a virtual /", "'doc': 'The org that owns/operates the network.', }), ('net4', ('inet:net4',", "specific antivirus signature.' }), ('it:auth:passwdhash', ('guid', {}), { 'doc': 'An", "specific file containing code that read the file. May or", "'doc': 'The library software version.'}), )), ('it:prod:softfile', {}, ( ('soft',", "'The \"target_hw\" field from the CPE 2.3 string.'}), ('other', ('str',", "major, minor and patch levels. Prerelease and build information will", "}), )), ('it:prod:soft', {}, ( ('name', ('str', {'lower': True, 'strip':", "that wrote to the registry.', }), ('host', ('it:host', {}), {", "('it:prod:softver', {}), {'ro': True, 'doc': 'The app software which listens", "version instance.', }), ('vers:norm', ('str', {'lower': True}), { 'doc': 'Normalized", "('int', {}), { 'doc': 'The complexity of the function.'}), ('funccalls',", "'doc': 'The path where the file was created.', }), ('path:dir',", "True, 'doc': 'The \"language\" field from the CPE 2.3 string.'}),", "getModelDefs(self): modl = { 'ctors': ( ('it:semver', 'synapse.models.infotech.SemVer', {}, {", "'The IPv4 where the logon originated.', }), ('client:ipv6', ('inet:ipv6', {}),", "hash.', }), ('it:exec:proc', ('guid', {}), { 'doc': 'A process executing", "of the host.', }), ('operator', ('ps:contact', {}), { 'doc': 'The", "log messsage text.', }), ('severity', ('int', {'enums': loglevels}), { 'doc':", "}), ('techniques', ('array', {'type': 'it:mitre:attack:technique', 'uniq': True, 'sorted': True, 'split':", "as a semver.') valu = s_version.packVersion(subs.get('major'), subs.get('minor'), subs.get('patch')) return valu,", "('it:os:android:ibroadcast', {}, ( ('app', ('it:prod:softver', {}), {'ro': True, 'doc': 'The", "'The \"language\" field from the CPE 2.3 string.'}), ('sw_edition', ('str',", "{}), { 'doc': 'The command string used to launch the", "was requested.', }), ('url', ('inet:url', {}), { 'doc': 'The URL", "of the function.'}), ('rank', ('int', {}), { 'doc': 'The function", "represents a logical network.' }), ('it:domain', ('guid', {}), { 'doc':", "{ 'doc': 'The host running the process that bound the", "host.' }), ('manu', ('str', {}), { 'doc': 'The manufacturer of", "('semver:build', ('str', {}), { 'doc': 'Semver build string.', }), ('url',", "triggered on.' }), ('sig:name', ('str', {'lower': True}), { 'ro': True,", "created the mutex. Typically the same host referenced in :proc,", "{ 'doc': 'The executable file which caused the activity.'}), ('proc',", "}), ('it:hosturl', ('comp', {'fields': (('host', 'it:host'), ('url', 'inet:url'))}), { 'doc':", "a integer value Major, minor and patch levels are represented", "Mitre ATT&CK element status.', 'ex': 'current', }), ('it:mitre:attack:group', ('str', {'regex':", "'An instance of a host binding a listening port.', }),", "{}), { 'doc': 'An instance of a host requesting a", "_onPropSoftverSoft(self, node, oldv): # Check to see if name is", "{ 'doc': 'The time the memory map was deleted.', }),", "{ 'doc': 'The specific file containing code that read the", "the string subs = s_version.parseVersionParts(valu) if subs is None: raise", "{ 'doc': 'The rule enabled status to be used for", "'The host running the process that created the new file.", "return valu, subs except s_exc.BadTypeValu: # Try doing version part", "encoded salt value used to calculate the password hash.', }),", "'uniq': True}), { 'doc': 'An array of strings referenced within", "'ex': 'T1548', }), ('it:mitre:attack:mitigation', ('str', {'regex': r'^M[0-9]{4}$'}), { 'doc': 'A", "{}, ( ('host', ('it:host', {}), { 'ro': True, 'doc': 'Host", "'The destination address of the trigger.'}), ('dst:ipv4', ('inet:ipv4', {}), {", "of ATT&CK tactics that include this technique.', }), )), ('it:mitre:attack:software',", "'The rule enabled status to be used for YARA evaluation", "'doc': 'An array of URLs that document the ATT&CK group.',", "('it:domain', {}, ( ('name', ('str', {'lower': True, 'strip': True, 'onespace':", "True, 'strip': True}), { 'ro': True, 'doc': 'The \"language\" field", "async def _onFormMakeDevStr(self, node): pprop = node.ndef[1] await node.snap.addNode('it:dev:str', pprop)", "to parse strings using the it:semver normalization before attempting to", "'doc': 'A Microsoft Windows Security Identifier.', 'ex': 'S-1-5-21-1220945662-1202665555-839525555-5555', }), ('it:os:ios:idfa',", "'The parent ATT&CK technique on this sub-technique.', }), ('tactics', ('array',", "URL linking this CWE to a full description.', }), ('parents',", "{ 'doc': 'An instance of a host requesting a URL.',", "{ 'doc': 'The primary name for the ATT&CK technique.', }),", "GUID that represents a group on a host or network.'", "executable for the process. For example, rundll32.exe may be considered", "process that requested the URL. Typically the same host referenced", "that created the mutex. Typically the same host referenced in", "will be parsed out and available as strings if that", "is a member of.', }), ('ipv4', ('inet:ipv4', {}), { 'doc':", "'doc': 'The primary name for the ATT&CK software.', }), ('names',", "('url', ('inet:url', {}), { 'ro': True, 'doc': 'URL available on", "primary name for the ATT&CK technique.', }), ('status', ('it:mitre:attack:status', {}),", "}), ('isnow', ('it:mitre:attack:technique', {}), { 'doc': 'If deprecated, this field", "}), ('it:sec:cwe', ('str', {'regex': r'^CWE-[0-9]{1,8}$'}), { 'doc': 'NIST NVD Common", "{}), {'ro': True, 'doc': 'The file distributed by the software.'}),", "process executing code that deleted the file.', }), ('host', ('it:host',", "that read the file. Typically the same host referenced in", "of the file path (parsed from :path).', }), ('path:ext', ('str',", "'A arbitrary, unversioned software product.', }), ('it:adid', ('str', {'lower': True,", "2.3 string.'}), ('other', ('str', {'lower': True, 'strip': True}), { 'ro':", "'doc': 'The time the memory map was created.', }), ('deleted',", "{ 'doc': 'The URL that documents the ATT&CK technique.', }),", "field may contain the current value for the technique.', }),", "'ro': True, 'doc': 'The \"part\" field from the CPE 2.3", "{ 'deprecated': True, 'doc': 'Web account of the software author.',", "[] genr = iter(text) try: while True: c = next(genr)", "}), )), ('it:group', {}, ( ('name', ('str', {'lower': True, 'strip':", "'uniq': True, 'sorted': True}), { 'doc': 'An array of alternate", "cpe:2.3: part : vendor : product : version : update", "a host getting a registry key.', }), ('it:exec:reg:set', ('guid', {}),", "file triggered on.' }), ('time', ('time', {}), { 'doc': 'The", "of the ATT&CK software.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url',", "bind().' }), ('server:port', ('inet:port', {}), { 'doc': 'The bound (listening)", "as designated by a Common Vulnerabilities and Exposures (CVE) number.',", "'doc': 'A file on a host.' }), ('it:exec:file:add', ('guid', {}),", "('it:logon', ('guid', {}), { 'doc': 'A GUID that represents an", "host.', }), ('domain', ('it:domain', {}), { 'doc': 'The authentication domain", "for the POSIX account.', }), ('posix:home', ('file:path', {}), { 'doc':", "'cno.mitre.m0100', }), ('references', ('array', {'type': 'inet:url', 'uniq': True}), { 'doc':", "'The host on which the activity occurred.'}), ('time', ('time', {}),", "authored the software.', }), ('author:org', ('ou:org', {}), { 'deprecated': True,", "the CPE 2.3 string.'}), )), ('it:sec:cwe', {}, ( ('name', ('str',", "field from the CPE 2.3 string.'}), )), ('it:sec:cwe', {}, (", "}), ('account', ('it:account', {}), { 'doc': 'The account that logged", "the client during the URL retrieval..' }), ('client:ipv6', ('inet:ipv6', {}),", "{ 'doc': 'Calls to imported library functions within the scope", "process.', }), )), ('it:exec:thread', {}, ( ('proc', ('it:exec:proc', {}), {", "'text'}, }), ('desc:short', ('str', {'lower': True}), { 'doc': 'A short", "IPv4 address specified to bind().' }), ('server:ipv6', ('inet:ipv6', {}), {", "'doc': 'An array of alternate names for the ATT&CK group.',", "modified the existing file.', }), ('host', ('it:host', {}), { 'doc':", "primary name for the ATT&CK group.', }), ('names', ('array', {'type':", "'doc': 'A developer selected integer constant.', }), ('it:dev:regkey', ('str', {}),", "URL.', }), ('host', ('it:host', {}), { 'doc': 'The host running", "the host.' }), ('latlong', ('geo:latlong', {}), { 'doc': 'The last", "rule evaluated as a match.'}), )), ('it:app:yara:procmatch', {}, ( ('rule',", "True, 'strip': True}), { 'ro': True, 'doc': 'The \"version\" field", "'doc': 'A GUID that represents a group on a host", "'The signature that the file triggered on.' }), ('time', ('time',", "the mutex was created.', }), ('name', ('it:dev:mutex', {}), { 'doc':", "'doc': 'The registry key or value that was read.', }),", "{}), { 'doc': 'A brief description of the domain.', }),", "function.'}), ('va', ('int', {}), { 'doc': 'The virtual address of", "'doc': 'The org that operates the given host.', }), )),", "'The specific file containing code that created the mutex. May", "that produced the hit.'}), ('version', ('it:semver', {}), { 'doc': 'The", "the registry key, if the value is a string.', }),", "{}), { 'doc': 'The specific file containing code that read", "file on the host.', }), ('ctime', ('time', {}), { 'doc':", "and build information will be parsed out and available as", "{}, ( ('host', ('it:host', {}), { 'doc': 'The host containing", "}), ('desc', ('str', {}), { 'doc': 'The CWE description field.',", "'doc': 'A Mitre ATT&CK element status.', 'ex': 'current', }), ('it:mitre:attack:group',", "of the function.'}), ('description', ('str', {}), { 'doc': 'Notes concerning", "name = nodes[0].get('name') if name: await node.set('software:name', name) async def", "TCP port.' }), )), ('it:fs:file', {}, ( ('host', ('it:host', {}),", "('latlong', ('geo:latlong', {}), { 'doc': 'The last known location for", "that created the new file.', }), ('host', ('it:host', {}), {", "> s_version.mask60: raise s_exc.BadTypeValu(valu=valu, name=self.name, mesg='Cannot norm a integer larger", "'doc': 'Normalized version of the version string.', }), ('arch', ('it:dev:str',", "ATT&CK Tactic ID.', 'ex': 'TA0040', }), ('it:mitre:attack:technique', ('str', {'regex': r'^T[0-9]{4}(.[0-9]{3})?$'}),", "file name (parsed from :path).', }), ('path:base', ('file:base', {}), {", "that matched the YARA rule.'}), ('time', ('time', {}), { 'doc':", "parts[12], } return ':'.join(parts), {'subs': subs} class SemVer(s_types.Int): ''' Provides", "{ 'doc': 'The file that was created.', }), )), ('it:exec:file:del',", "valu, info = self.core.model.type('it:semver').norm(valu) subs = info.get('subs') return valu, subs", "tactic.', 'ex': 'cno.mitre.ta0100', }), ('references', ('array', {'type': 'inet:url', 'uniq': True}),", "'doc': 'The last known location for the host.' }), ('place',", "running the process that created the new file. Typically the", "('it:auth:passwdhash', {}, ( ('salt', ('hex', {}), { 'doc': 'The (optional)", "}), ('path:base', ('file:base', {}), { 'ro': True, 'doc': 'The final", "('vers:norm', ('str', {'lower': True}), { 'doc': 'Normalized version of the", "('inet:ipv4', {}), { 'doc': 'The last known ipv4 address for", "('version', ('it:semver', {}), { 'doc': 'The version of the rule", "registry was deleted.', }), ('reg', ('it:dev:regval', {}), { 'doc': 'The", "{ 'doc': 'The URL that was requested.', }), ('client', ('inet:client',", "on a host.' }), ('it:exec:file:add', ('guid', {}), { 'doc': 'An", "valu, subs async def _onFormItDevStr(self, node): await node.set('norm', node.ndef[1]) async", "return valu loglevels = ( (10, 'debug'), (20, 'info'), (30,", "{'lower': True, 'strip': True}), { 'ro': True, 'doc': 'The file", "{}), { 'doc': 'URL relevant for the software.', }), ('isos',", "'The process where the library was loaded.', }), ('va', ('int',", "('model', ('str', {}), { 'doc': 'The product model of the", "'The time the registry was read.', }), ('reg', ('it:dev:regval', {}),", "synapse.lib.module as s_module import synapse.lib.version as s_version logger = logging.getLogger(__name__)", "associated with this account.', }), ('host', ('it:host', {}), { 'doc':", "string. Returns: int, dict: The system normalized version integer and", "{ 'doc': 'A raw JSON record of the log event.',", "arbitrary, unversioned software product.', }), ('it:adid', ('str', {'lower': True, 'strip':", "account's home directory.\", 'ex': '/home/visi', }), ('posix:shell', ('file:path', {}), {", "{ 'doc': 'The file that was read.', }), )), ('it:exec:file:write',", "from the registry. May or may not be the same", "('lib', 'it:prod:softver'))}), { 'doc': 'A software version contains a library", "}), ('url', ('inet:url', {}), { 'doc': 'The URL that was", "technique.', }), )), ('it:mitre:attack:software', {}, ( ('software', ('it:prod:soft', {}), {", "such as a windows domain.' }), ('it:account', ('guid', {}), {", "('it:exec:proc', {}), { 'doc': 'The process where the memory was", "'A description of the ATT&CK mitigation.', 'disp': {'hint': 'text'}, }),", "the listening port. May or may not be the same", "name of the group.', }), ('desc', ('str', {}), { 'doc':", "on or served by a host or system.', }), ('it:sec:cve',", ")), ('it:av:sig', {}, ( ('soft', ('it:prod:soft', {}), { 'ro': True,", "reference URL for information about the signature.', }) )), ('it:av:filehit',", "True, 'strip': True, 'onespace': True}), { 'doc': 'The name of", "annotate nodes included in this ATT&CK software.', 'ex': 'cno.mitre.s0100', }),", "'The android app which requests the permission.'}), ('perm', ('it:os:android:perm', {}),", "'The specific file containing code that wrote to the registry.", "()), ('it:dev:regkey', {}, ()), ('it:dev:regval', {}, ( ('key', ('it:dev:regkey', {}),", "contains a library software version.'}), ('it:prod:softos', ('comp', {'fields': ( ('soft',", "risk:mitigation ('name', ('str', {'strip': True}), { 'doc': 'The primary name", "a specific version of the software is available from.', }),", "from :path).', }), ('path:base', ('file:base', {}), { 'ro': True, 'doc':", "interesting behavior.'}), ('complexity', ('int', {}), { 'doc': 'The complexity of", "('ps:contact', {}), { 'doc': 'Additional contact information associated with this", "file containing code that read the file. May or may", "process executing on a host. May be an actual (e.g.,", "technique.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url', {}), { 'doc':", ": language : sw_edition : target_sw : target_hw : other", "host where the group is registered.', }), ('domain', ('it:domain', {}),", "software at a particular version.', }), ('names', ('array', {'type': 'it:dev:str',", "instance.', }), ('software:name', ('str', {'lower': True, 'strip': True}), { 'doc':", "{}), { 'doc': 'An instance of a host creating or", "('array', {'type': 'it:sec:cve', 'uniq': True, 'sorted': True}), { 'doc': 'A", "'doc': 'A log level integer that increases with severity.', }),", "any command line parameters.', 'disp': {'hint': 'text'}, }), ('pid', ('int',", "True, 'sorted': True}), { 'doc': 'An array of alternate names", "'deprecated': True, 'doc': 'Organization which authored the software.', }), ('author:acct',", "('comp', {'fields': ( ('soft', 'it:prod:softver'), ('file', 'file:bytes'))}), { 'doc': 'A", "'doc': 'The snort rule text.', 'disp': {'hint': 'text'}, }), ('name',", "'doc': 'The address of the client during the URL retrieval.'", "'doc': 'The file on the host.', }), ('ctime', ('time', {}),", "that deleted data from the registry. Typically the same host", "+= c except StopIteration: parts.append(part) return parts def _normPyStr(self, valu):", "default shell.\", 'ex': '/bin/bash', }), ('windows:sid', ('it:os:windows:sid', {}), { 'doc':", "value.', }), ('hash:ntlm', ('hash:ntlm', {}), { 'doc': 'The NTLM password", "'An android permission string.'}), ('it:os:android:intent', ('str', {}), { 'doc': 'An", "distributed by the software.'}), ('path', ('file:path', {}), { 'doc': 'The", "while True: c = next(genr) if c == '\\\\': c", "('int', {}), { 'doc': 'The virtual address of the first", "has {len(parts)} parts, expected 13.' raise s_exc.BadTypeValu(valu=valu, mesg=mesg) subs =", "'doc': 'A reference to a string inside a function.', }),", "intent which is broadcast by the app.'}), )), ('it:prod:softver', {},", "{}, ( ('soft', ('it:prod:soft', {}), { 'ro': True, 'doc': 'The", "{}), { 'doc': 'The source IPv4 address of the flow", "adding a file to a filesystem.', }), ('it:exec:file:del', ('guid', {}),", "{ 'doc': 'A Mitre ATT&CK Group ID.', 'ex': 'G0100', }),", "bruteforce parts try: valu, subs = self.bruteVersionStr(prop) await node.set('semver', valu)", "'doc': 'An advertising identification string.'}), ('it:os:windows:sid', ('str', {'regex': r'^S-1-[0-59]-\\d{2}-\\d{8,10}-\\d{8,10}-\\d{8,10}-[1-9]\\d{3}$'}), {", "particular version.', }), ('names', ('array', {'type': 'it:dev:str', 'uniq': True, 'sorted':", "concatenation of the major, minor and patch levels. Prerelease and", "'doc': 'The app software which broadcasts the android intent.'}), ('intent',", "antivirus signature.' }), ('it:auth:passwdhash', ('guid', {}), { 'doc': 'An instance", "time the logon occured.', }), ('success', ('bool', {}), { 'doc':", "{ 'doc': 'A vulnerability as designated by a Common Vulnerabilities", "key, if the value is an integer.', }), ('bytes', ('file:bytes',", "an executable.', }), ('it:reveng:filefunc', ('comp', {'fields': (('file', 'file:bytes'), ('function', 'it:reveng:function'))}),", "YARA rule.'}), ('author', ('ps:contact', {}), { 'doc': 'Contact info for", "URL that documents the ATT&CK tactic.', }), ('tag', ('syn:tag', {}),", "when binding the port.' }), ('server:ipv4', ('inet:ipv4', {}), { 'doc':", "from the CPE 2.3 string.'}), ('other', ('str', {'lower': True, 'strip':", "CPE 2.3 Formatted String https://nvlpubs.nist.gov/nistpubs/Legacy/IR/nistir7695.pdf (Section 6.2) cpe:2.3: part :", "'it:os:android:intent') )}), { 'doc': 'The given software broadcasts the given", "file was read.', }), ('path:dir', ('file:path', {}), { 'ro': True,", "of the version string.', }), ('arch', ('it:dev:str', {}), { 'doc':", "time the thread exited.', }), ('exitcode', ('int', {}), { 'doc':", "duration of the logon session.', }), ('client:host', ('it:host', {}), {", "address specified to bind().' }), ('server:port', ('inet:port', {}), { 'doc':", "of the signature.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url', {}),", "'doc': 'A vulnerability as designated by a Common Vulnerabilities and", "string that the function references.'}), )), ('it:reveng:impfunc', {}, ()), ),", "from the CPE 2.3 string.'}), ('update', ('str', {'lower': True, 'strip':", "{}, ( ('app', ('it:prod:softver', {}), {'ro': True, 'doc': 'The app", "name, info, opts): opts['lower'] = True s_types.Str.__init__(self, modl, name, info,", "technique IDs addressed by the mitigation.', }), )), ('it:dev:int', {},", ")), ('it:os:android:ibroadcast', {}, ( ('app', ('it:prod:softver', {}), {'ro': True, 'doc':", "\"The path to the POSIX account's default shell.\", 'ex': '/bin/bash',", "rule.'}), ('author', ('ps:contact', {}), { 'doc': 'Contact info for the", "('comp', {'fields': (('file', 'file:bytes'), ('sig', 'it:av:sig'))}), { 'doc': 'A file", "('inet:url', {}), { 'doc': 'URL relevant for the software.', }),", "= self.bruteVersionStr(prop) await node.set('semver', valu) for k, v in subs.items():", "True, 'doc': 'The app software which broadcasts the android intent.'}),", "{ 'doc': 'The process that matched the YARA rule.'}), ('time',", "the file was written to/modified.', }), ('path', ('file:path', {}), {", "'URL where a specific version of the software is available", "is an integer.', }), ('bytes', ('file:bytes', {}), { 'doc': 'The", "'The group owner of the file.', }), )), ('it:exec:file:add', {},", "Common Vulnerabilities and Exposures (CVE) number.', 'ex': 'cve-2012-0158' }), ('it:sec:cwe',", "{}), {'ro': True, 'doc': 'The android app which requests the", "'The software version is known to be compatible with the", "{ 'doc': 'The time the file was created.', }), ('path',", "'The host process which caused the activity.'}), ('thread', ('it:exec:thread', {}),", "{}), { 'doc': 'The URL that documents the ATT&CK software.',", "'doc': 'The file that matched the YARA rule.'}), ('version', ('it:semver',", "URLs that document the ATT&CK technique.', }), ('parent', ('it:mitre:attack:technique', {}),", "'it:reveng:impfunc'}), { 'doc': 'Calls to imported library functions within the", "view of a file.', }), ('hash:sha256', ('hash:sha256', {}), { 'doc':", "'The YARA rule that matched the file.'}), ('file', ('file:bytes', {}),", "('va', ('int', {}), { 'doc': 'The virtual address of the", "field from the CPE 2.3 string.'}), ('edition', ('str', {'lower': True,", "requests the android permission.'}), ('it:os:android:ilisten', ('comp', {'fields': ( ('app', 'it:prod:soft'),", "{}), { 'doc': 'The host where the group is registered.',", "of the ATT&CK tactic.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url',", "the technique.', }), ('desc', ('str', {'strip': True}), { 'doc': 'A", "host.', }), ('operator', ('ps:contact', {}), { 'doc': 'The operator of", "'doc': \"The path to the POSIX account's home directory.\", 'ex':", "time the port was bound.', }), ('server', ('inet:server', {}), {", "('inet:ipv6', {}), { 'doc': 'The IPv6 address specified to bind().'", "'The file representing the value of the registry key, if", "'doc': 'The string that the function references.'}), )), ('it:reveng:impfunc', {},", "advertising identification string.'}), ('it:os:windows:sid', ('str', {'regex': r'^S-1-[0-59]-\\d{2}-\\d{8,10}-\\d{8,10}-\\d{8,10}-[1-9]\\d{3}$'}), { 'doc': 'A", "if it exhibits interesting behavior.'}), ('complexity', ('int', {}), { 'doc':", "{ 'ro': True, 'doc': 'The \"product\" field from the CPE", "log level integer that increases with severity.', }), ('data', ('data',", "('time', {}), { 'doc': 'The time the file was created.',", "{}), { 'doc': 'A raw JSON record of the log", "for this software.', }), ('desc', ('str', {}), { 'doc': 'A", "of an antivirus engine name.' }), ('it:av:filehit', ('comp', {'fields': (('file',", "out of the string') if subs: valu = s_version.packVersion(subs.get('major'), subs.get('minor',", "('it:mitre:attack:technique', ('str', {'regex': r'^T[0-9]{4}(.[0-9]{3})?$'}), { 'doc': 'A Mitre ATT&CK Technique", "('it:sec:cwe', {}, ( ('name', ('str', {}), { 'doc': 'The CWE", "'The primary name for the ATT&CK technique.', }), ('status', ('it:mitre:attack:status',", "ATT&CK Software ID.', 'ex': 'S0154', }), ('it:dev:str', ('str', {}), {", "':': parts.append(part) part = '' continue part += c except", "('it:av:filehit', ('comp', {'fields': (('file', 'file:bytes'), ('sig', 'it:av:sig'))}), { 'doc': 'A", "('software', ('it:prod:soft', {}), { 'doc': 'Used to map an ATT&CK", "{}, ()), ('it:dev:pipe', {}, ()), ('it:dev:mutex', {}, ()), ('it:dev:regkey', {},", "triggered the signature hit.', }), ('sig', ('it:av:sig', {}), { 'doc':", "}), ('name', ('it:dev:pipe', {}), { 'doc': 'The named pipe string.',", "{}), { 'deprecated': True, 'doc': 'Web account of the software", "domain where the group is registered.', }), ('groups', ('array', {'type':", "'doc': 'The path to the executable which started the process.',", "the node.', }), ('os', ('it:prod:softver', {}), { 'doc': 'The operating", "name is available and set it if possible prop =", "}), ('it:exec:mutex', ('guid', {}), { 'doc': 'A mutex created by", "('it:mitre:attack:mitigation', {}, ( # TODO map to an eventual risk:mitigation", "text password for this password hash.', }), )), ('it:cmd', {},", "= node.get('arch') if prop: await node.snap.addNode('it:dev:str', prop) async def _onPropSoftverVers(self,", "same :exe specified in :proc, if present.', }), ('time', ('time',", "given os software version.'}), ('it:hostsoft', ('comp', {'fields': (('host', 'it:host'), ('softver',", "account that logged in.', }), ('creds', ('auth:creds', {}), { 'doc':", "'doc': 'The specific file containing code that deleted data from", "True, 'doc': 'The YARA rule that matched the file.'}), ('file',", "}), ('bytes', ('file:bytes', {}), { 'doc': 'The file representing the", "that matched the file.'}), ('file', ('file:bytes', {}), { 'ro': True,", "('str', {'lower': True, 'strip': True}), { 'doc': 'The name of", "file access time.', }), ('user', ('inet:user', {}), { 'doc': 'The", "{ 'doc': 'Used to map an ATT&CK software to a", "{}), { 'doc': 'The main process executing code that wrote", "activity.'}), ('proc', ('it:exec:proc', {}), { 'doc': 'The host process which", "'The NIST CPE 2.3 string specifying this software version', }),", "loaded by that program.', }), ('cmd', ('it:cmd', {}), { 'doc':", "the ATT&CK group.', }), ('desc', ('str', {}), { 'doc': 'A", "'doc': 'An instance of a host deleting a file from", "('ou:org', {}), { 'doc': 'The org that operates the given", "()), ('it:dev:mutex', {}, ()), ('it:dev:regkey', {}, ()), ('it:dev:regval', {}, (", "{ 'doc': \"The path to the POSIX account's home directory.\",", "level integer that increases with severity.', }), ('data', ('data', {}),", "('sig', ('it:av:sig', {}), { 'doc': 'The signature that the file", "'A software version contains a library software version.'}), ('it:prod:softos', ('comp',", "{ 'doc': 'The primary name for the ATT&CK group.', }),", "'The time the file was read.', }), ('path', ('file:path', {}),", "rule.'}), ('version', ('it:semver', {}), { 'doc': 'The most recent version", "start with \"cpe:2.3:\"' raise s_exc.BadTypeValu(valu=valu, mesg=mesg) text, info = s_types.Str._normPyStr(self,", "within the scope of the function.', }), ('strings', ('array', {'type':", "self.model.form('it:dev:pipe').onAdd(self._onFormMakeDevStr) self.model.form('it:dev:mutex').onAdd(self._onFormMakeDevStr) self.model.form('it:dev:regkey').onAdd(self._onFormMakeDevStr) self.model.prop('it:prod:softver:arch').onSet(self._onPropSoftverArch) self.model.prop('it:prod:softver:vers').onSet(self._onPropSoftverVers) self.model.prop('it:prod:softver:software').onSet(self._onPropSoftverSoft) def bruteVersionStr(self, valu): '''", "'doc': 'If deprecated, this field may contain the current value", "s_exc.BadTypeValu(valu=valu, mesg=mesg) text, info = s_types.Str._normPyStr(self, valu) parts = self._splitCpe23(text)", "was created.', }), ('path', ('file:path', {}), { 'doc': 'The path", "semver.') major, minor, patch = s_version.unpackVersion(valu) valu = s_version.packVersion(major, minor,", "True}), { 'ro': True, 'doc': 'The \"product\" field from the", "read the registry. May or may not be the same", "major, 'minor': minor, 'patch': patch} return valu, {'subs': subs} def", "('guid', {}), { 'doc': 'A specific version of a software", "array of ChildOf CWE Relationships.' }), )), ('it:mitre:attack:group', {}, (", "'The \"part\" field from the CPE 2.3 string.'}), ('vendor', ('ou:name',", "location string for the node.', }), ('os', ('it:prod:softver', {}), {", "name for the ATT&CK technique.', }), ('status', ('it:mitre:attack:status', {}), {", "{}), { 'doc': 'An android permission string.'}), ('it:os:android:intent', ('str', {}),", "'The CWE description field.', 'ex': 'Buffer Copy without Checking Size", "'vendor': parts[3], 'product': parts[4], 'version': parts[5], 'update': parts[6], 'edition': parts[7],", "{}, ()), ), } name = 'it' return ((name, modl),", "prop = node.get('vers') if not prop: return await node.set('vers:norm', prop)", "'doc': 'The host process which caused the activity.'}), ('thread', ('it:exec:thread',", "('str', {'lower': True, 'regex': r'(?i)^CVE-[0-9]{4}-[0-9]{4,}$'}), { 'doc': 'A vulnerability as", "('created', ('time', {}), { 'doc': 'The time the memory map", "('str', {}), { 'doc': 'The YARA rule text.', 'disp': {'hint':", "{}), { 'doc': 'The process which killed this process.', }),", "this account.', }), ('host', ('it:host', {}), { 'doc': 'The host", "software IDs used by the group.', }), )), ('it:mitre:attack:tactic', {},", "that caused the hit.'}), ('src:port', ('inet:port', {}), { 'doc': 'The", "'The specific file containing code that created the named pipe.", "written to.', }), ('reg', ('it:dev:regval', {}), { 'doc': 'The registry", "referenced within the function.', }), )), ('it:reveng:filefunc', {}, ( ('function',", "('va', ('int', {}), { 'doc': 'The base memory address where", "for this software version.', }), ('cpe', ('it:sec:cpe', {}), { 'doc':", "('soft', 'it:prod:softver'), ('os', 'it:prod:softver'))}), { 'doc': 'The software version is", "bruteVersionStr(self, valu): ''' Brute force the version out of a", "{}), { 'doc': 'The time the logon session ended.', }),", "('lib', ('it:prod:softver', {}), {'ro': True, 'doc': 'The library software version.'}),", "to the file. Typically the same host referenced in :proc,", "'A version of a software product which is present on", "_normPyStr(self, valu): if not valu.startswith('cpe:2.3:'): mesg = 'CPE 2.3 string", "the process.', }), ('user', ('inet:user', {}), { 'doc': 'The user", "('name', ('str', {}), { 'doc': 'The name of the YARA", "{}), { 'doc': 'A memory mapped segment located in a", "triggering an alert on a specific antivirus signature.' }), ('it:auth:passwdhash',", "a host or network.' }), ('it:logon', ('guid', {}), { 'doc':", "the file was read.', }), ('path', ('file:path', {}), { 'doc':", "was deleted.', }), )), ('it:app:snort:rule', {}, ( ('text', ('str', {}),", "}), ('url', ('inet:url', {}), { 'ro': True, 'doc': 'URL available", "included in this ATT&CK mitigation.', 'ex': 'cno.mitre.m0100', }), ('references', ('array',", "'The \"update\" field from the CPE 2.3 string.'}), ('edition', ('str',", "'The \"edition\" field from the CPE 2.3 string.'}), ('language', ('str',", "oldv): # Set vers:norm and make it's normed valu prop", "}), )), ('it:av:prochit', {}, ( ('proc', ('it:exec:proc', {}), { 'doc':", "person who authored the software.', }), ('author:org', ('ou:org', {}), {", "( ('part', ('str', {'lower': True, 'strip': True}), { 'ro': True,", "created the named pipe. May or may not be the", "code that read the file. May or may not be", "which is broadcast by the app.'}), )), ('it:prod:softver', {}, (", "'The host running the process that requested the URL. Typically", "hit.'}), ('version', ('it:semver', {}), { 'doc': 'The version of the", "value for the technique.', }), ('desc', ('str', {'strip': True}), {", "specified in :proc, if present.', }), ('time', ('time', {}), {", "that matched the file.'}), ('flow', ('inet:flow', {}), { 'doc': 'The", "{ 'doc': 'The name of the network.', }), ('desc', ('str',", "time that the AV engine detected the signature.' }), )),", "stripping whitespace') subs = s_version.parseSemver(valu) if subs is None: raise", "'doc': 'The synapse tag used to annotate nodes included in", "an actual (e.g., endpoint) or virtual (e.g., malware sandbox) host.',", "that the account is a member of.', }), )), ('it:group',", "'A brief description of the group.', }), ('host', ('it:host', {}),", "}), ('created', ('time', {}), { 'doc': 'The time the thread", "('it:hostname', ('str', {'strip': True, 'lower': True}), { 'doc': 'The name", "('it:exec:file:add', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The main", "{ 'doc': 'A Mitre ATT&CK Software ID.', 'ex': 'S0154', }),", "}), ('software:name', ('str', {'lower': True, 'strip': True}), { 'doc': 'The", "Args: valu (str): String to attempt to get version information", "'The process where the memory was mapped.', }), ('va', ('int',", "be the same :exe referenced in :proc, if present.', }),", "'doc': 'The place where the host resides.', }), ('loc', ('loc',", "'doc': 'The operator of the host.', }), ('org', ('ou:org', {}),", "('time', {}), { 'doc': 'The time the mutex was created.',", "which distributes the file.'}), ('file', ('file:bytes', {}), {'ro': True, 'doc':", "try: while True: c = next(genr) if c == '\\\\':", "{}), { 'ro': True, 'doc': 'The \"vendor\" field from the", "async def _onPropSoftverVers(self, node, oldv): # Set vers:norm and make", "file considered the \"main\" executable for the process. For example,", ":proc, if present.', }), ('exe', ('file:bytes', {}), { 'doc': 'The", "process which created the process.' }), ('killedby', ('it:exec:proc', {}), {", "self._normPyInt) def _normPyStr(self, valu): valu = valu.strip() if not valu:", "('string', ('str', {}), { 'ro': True, 'doc': 'The string that", "{'fields': ( ('soft', 'it:prod:softver'), ('os', 'it:prod:softver'))}), { 'doc': 'The software", "('name', ('str', {'lower': True, 'strip': True}), { 'doc': 'Name of", "'The contact information of the org or person who authored", "{}), { 'doc': 'The host process which caused the activity.'}),", "'The account that logged in.', }), ('creds', ('auth:creds', {}), {", "code that created the mutex. May or may not be", "('desc', ('str', {}), { 'doc': 'A free-form description of the", "'ro': True, 'doc': 'The anti-virus product which contains the signature.',", "instance of a host requesting a URL.', }), ('it:exec:bind', ('guid',", "{ 'doc': 'A short description of the software.', }), ('cpe',", "'doc': 'An array of ATT&CK technique IDs addressed by the", "{ 'doc': 'A brief description of the network.', }), ('org',", "a synapse it:prod:soft.', }), ('name', ('str', {'strip': True}), { 'doc':", "('it:semver', {}), { 'doc': 'The version of the rule at", "(60, 'crit'), (70, 'alert'), (80, 'emerg'), ) class ItModule(s_module.CoreModule): async", "code that created the named pipe. May or may not", "('str', {}), { 'doc': 'The serial number of the host.',", "exit code for the process.', }), ('user', ('inet:user', {}), {", "{}), { 'doc': 'The CWE description field.', 'ex': 'Buffer Copy", "it:dev:str for arch prop = node.get('arch') if prop: await node.snap.addNode('it:dev:str',", "the hit.'}), ('version', ('it:semver', {}), { 'doc': 'The version of", "on a specific antivirus signature.' }), ('it:auth:passwdhash', ('guid', {}), {", "{}, ( ('key', ('it:dev:regkey', {}), { 'doc': 'The Windows registry", "( ('name', ('str', {'strip': True}), { 'doc': 'The primary name", "represents a group on a host or network.' }), ('it:logon',", "an integer.', }), ('bytes', ('file:bytes', {}), { 'doc': 'The file", "registry key, if the value is binary data.', }), )),", "that the host is a member of.', }), ('ipv4', ('inet:ipv4',", "'A Mitre ATT&CK Mitigation ID.', 'ex': 'M1036', }), ('it:mitre:attack:software', ('str',", "library was loaded in the process.', }), ('loaded', ('time', {}),", "command string used to launch the process, including any command", "group.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url', {}), { 'doc':", "{}), { 'doc': 'The virtual address of the first codeblock", "map to an eventual risk:mitigation ('name', ('str', {'strip': True}), {", "the log event.', }), )), ('it:domain', {}, ( ('name', ('str',", "('it:mitre:attack:technique', {}), { 'doc': 'The parent ATT&CK technique on this", "ordering. Prerelease information is disregarded for integer comparison purposes, as", "a library.'}), )), ('it:adid', {}, ()), ('it:os:ios:idfa', {}, ()), ('it:os:android:aaid',", "string.'}), ('sw_edition', ('str', {'lower': True, 'strip': True}), { 'ro': True,", "deleting a file from a filesystem.', }), ('it:exec:file:read', ('guid', {}),", "subs} def _normPyInt(self, valu): if valu < 0: raise s_exc.BadTypeValu(valu=valu,", "opts['lower'] = True s_types.Str.__init__(self, modl, name, info, opts) def _splitCpe23(self,", "library software version.'}), ('it:prod:softos', ('comp', {'fields': ( ('soft', 'it:prod:softver'), ('os',", "{}), { 'doc': 'The host containing the file.', }), ('path',", "'doc': 'An array of ChildOf CWE Relationships.' }), )), ('it:mitre:attack:group',", "'True if the mmap is mapped with write permissions.', }),", "('array', {'type': 'it:sec:cwe', 'uniq': True, 'sorted': True, 'split': ','}), {", "as a semver.') major, minor, patch = s_version.unpackVersion(valu) valu =", "was created.', }), ('name', ('it:dev:mutex', {}), { 'doc': 'The mutex", "optionally be present in the axon.', }), )), ('it:exec:mutex', {},", "or person who authored the software.', }), ('author:org', ('ou:org', {}),", "by the app.'}), )), ('it:os:android:ibroadcast', {}, ( ('app', ('it:prod:softver', {}),", "Vulnerabilities and Exposures (CVE) number.', 'ex': 'cve-2012-0158' }), ('it:sec:cwe', ('str',", "deleted.', }), )), ('it:app:snort:rule', {}, ( ('text', ('str', {}), {", "the function.', }), )), ('it:reveng:funcstr', {}, ( ('function', ('it:reveng:function', {}),", "'doc': 'An instance of a host requesting a URL.', }),", "('exe', ('file:bytes', {}), { 'doc': 'The file considered the \"main\"", "ChildOf CWE Relationships.' }), )), ('it:mitre:attack:group', {}, ( ('org', ('ou:org',", "a negative integer as a semver.') if valu > s_version.mask60:", "for parsing a semantic version string into its component parts.", "group.', }), )), ('it:logon', {}, ( ('time', ('time', {}), {", "{'strip': True}), { 'doc': 'A description of the ATT&CK technique.',", "the file.'}), ('file', ('file:bytes', {}), { 'ro': True, 'doc': 'The", "{}), { 'doc': 'The NTLM password hash value.', }), ('passwd',", "('it:os:ios:idfa', ('it:adid', {}), { 'doc': 'An iOS advertising identification string.'}),", "GUID that represents an account on a host or network.'", "'uniq': True, 'sorted': True}), { 'doc': 'Associated names for the", "'The anti-virus product which contains the signature.', }), ('name', ('str',", "file containing code that created the new file. May or", "the host.' }), ('place', ('geo:place', {}), { 'doc': 'The place", "valu, {'subs': subs} def repr(self, valu): major, minor, patch =", "this version of the software was released.', }), ('semver', ('it:semver',", "the string') if subs: valu = s_version.packVersion(subs.get('major'), subs.get('minor', 0), subs.get('patch',", "('it:hosturl', {}, ( ('host', ('it:host', {}), { 'ro': True, 'doc':", "snort rule text.', 'disp': {'hint': 'text'}, }), ('name', ('str', {}),", "'props': ( ('exe', ('file:bytes', {}), { 'doc': 'The executable file", "this software.', }), ('author', ('ps:contact', {}), { 'doc': 'The contact", "virtual / notional host.', }), ('exe', ('file:bytes', {}), { 'doc':", "}), ('cpe', ('it:sec:cpe', {}), { 'doc': 'The NIST CPE 2.3", "modl, name, info, opts) def _splitCpe23(self, text): part = ''", "('references', ('array', {'type': 'inet:url', 'uniq': True}), { 'doc': 'An array", "{ 'doc': 'The time the memory map was created.', }),", "directory of the file path (parsed from :path).', }), ('path:ext',", "Prerelease and build information will be parsed out and available", "'The signature that the file triggered on.' }), ('sig:name', ('str',", "','}), { 'doc': 'An array of techniques used by the", "prop}} nodes = await node.snap.nodes('it:prod:soft=$soft', opts=opts) if nodes: name =", "('it:host', {}), { 'doc': 'The sensor host node that produced", "matched the snort rule.'}), ('src', ('inet:addr', {}), { 'doc': 'The", "{}), { 'doc': 'The exit code for the process.', }),", "{'hint': 'text'}, }), ('pid', ('int', {}), { 'doc': 'The process", "{'strip': True}), { 'doc': 'A description of the ATT&CK software.',", "{ 'doc': 'The Microsoft Windows Security Identifier of the account.',", "'Additional contact information associated with this account.', }), ('host', ('it:host',", "and Exposures (CVE) number.', 'ex': 'cve-2012-0158' }), ('it:sec:cwe', ('str', {'regex':", "'The manufacturer of the host.', }), ('model', ('str', {}), {", "function.'}), ('funccalls', ('array', {'type': 'it:reveng:filefunc'}), { 'doc': 'Other function calls", "software version contains a library software version.'}), ('it:prod:softos', ('comp', {'fields':", "}), ('domain', ('it:domain', {}), { 'doc': 'The authentication domain where", "'The host that the account logged in to.', }), ('account',", "( ('app', 'it:prod:soft'), ('intent', 'it:os:android:intent'))}), { 'doc': 'The given software", "('proc', ('it:exec:proc', {}), { 'doc': 'The file that triggered the", "is a mapped view of a file.', }), ('hash:sha256', ('hash:sha256',", "URLs that document the ATT&CK mitigation.', }), ('addresses', ('array', {'type':", "file containing code that created the named pipe. May or", "account is a member of.', }), )), ('it:group', {}, (", "IPv6 of the client during the URL retrieval..' }), ('client:port',", "{'ro': True, 'doc': 'Host with the software.'}), ('softver', ('it:prod:softver', {}),", "a given host.', }), ('it:av:sig', ('comp', {'fields': (('soft', 'it:prod:soft'), ('name',", "('str', {'enums': 'current,deprecated,withdrawn'}), { 'doc': 'A Mitre ATT&CK element status.',", "URL that documents the ATT&CK technique.', }), ('tag', ('syn:tag', {}),", "('tag', ('syn:tag', {}), { 'doc': 'The synapse tag used to", "('path:ext', ('str', {'lower': True, 'strip': True}), { 'ro': True, 'doc':", "ID.', }), )), ('it:sec:cpe', {}, ( ('part', ('str', {'lower': True,", "('it:sec:cpe', {}, ( ('part', ('str', {'lower': True, 'strip': True}), {", "version of the rule evaluated as a match.'}), )), ('it:reveng:function',", "event.', 'interfaces': ('it:host:activity',), }), ('it:network', ('guid', {}), { 'doc': 'A", "named pipe. Typically the same host referenced in :proc, if", "}), ('pid', ('int', {}), { 'doc': 'The process ID.', }),", "where the account is registered.', }), ('posix:uid', ('int', {}), {", "True, 'doc': 'Email address of the sofware author.', }), ('author:person',", "}), ('posix:shell', ('file:path', {}), { 'doc': \"The path to the", "( ('app', ('it:prod:softver', {}), {'ro': True, 'doc': 'The android app", "of the server when binding the port.' }), ('server:ipv4', ('inet:ipv4',", "True}), { 'doc': 'The name of a host or system.',", "('inet:ipv6', {}), { 'doc': 'The destination IPv4 address of the", "{ 'doc': 'The registry key or value that was read.',", "the rule.'}), ('enabled', ('bool', {}), { 'doc': 'The rule enabled", "expected to start with \"cpe:2.3:\"' raise s_exc.BadTypeValu(valu=valu, mesg=mesg) text, info", "'An instance of a host adding a file to a", "data from the registry was deleted.', }), ('reg', ('it:dev:regval', {}),", "deleted.', }), )), ('it:exec:file:read', {}, ( ('proc', ('it:exec:proc', {}), {", "launch the process, including any command line parameters.', 'disp': {'hint':", "{ 'doc': 'The process which created the process.' }), ('killedby',", "android intent.'}), ('it:os:android:ibroadcast', ('comp', {'fields': ( ('app', 'it:prod:soft'), ('intent', 'it:os:android:intent')", "'A Mitre ATT&CK Group ID.', 'ex': 'G0100', }), ('it:mitre:attack:tactic', ('str',", "'The file extension of the file name (parsed from :path).',", "file to a filesystem.', }), ('it:exec:file:del', ('guid', {}), { 'doc':", "('it:hostsoft', ('comp', {'fields': (('host', 'it:host'), ('softver', 'it:prod:softver'))}), { 'doc': 'A", "('src:exe', ('file:path', {}), { 'doc': 'The path to the executable", "of the flow that caused the hit.'}), ('time', ('time', {}),", "'The specific file containing code that created the new file.", "}), )), ('it:exec:file:del', {}, ( ('proc', ('it:exec:proc', {}), { 'doc':", "}), ('it:os:ios:idfa', ('it:adid', {}), { 'doc': 'An iOS advertising identification", "'it:prod:softver'))}), { 'doc': 'The software version is known to be", "'doc': 'The path where the file was deleted.', }), ('path:dir',", "the file was created.', }), ('path', ('file:path', {}), { 'doc':", "'doc': 'An instance of a process triggering an alert on", "operating system.'}), ('islib', ('bool', {}), { 'doc': 'Set to True", "{}, ()), ('it:dev:regval', {}, ( ('key', ('it:dev:regkey', {}), { 'doc':", "{}), { 'doc': 'The time that the YARA engine matched", "{ 'doc': 'Groups that are a member of this group.',", "of a host adding a file to a filesystem.', }),", "the CVE ID.', }), )), ('it:sec:cpe', {}, ( ('part', ('str',", "individual logon/logoff event.' }), ('it:hosturl', ('comp', {'fields': (('host', 'it:host'), ('url',", "'doc': 'Timestamp for when this version of the software was", "loaded.', }), ('va', ('int', {}), { 'doc': 'The base memory", "{ 'doc': 'A Windows registry key.', 'ex': 'HKEY_LOCAL_MACHINE\\\\SOFTWARE\\\\Microsoft\\\\Windows\\\\CurrentVersion\\\\Run', }), ('it:dev:regval',", "specified to bind().' }), ('server:port', ('inet:port', {}), { 'doc': 'The", "of the string') if subs: valu = s_version.packVersion(subs.get('major'), subs.get('minor', 0),", "specific file containing code that requested the URL. May or", "levels. Prerelease and build information will be parsed out and", "software.', }), ('desc', ('str', {'strip': True}), { 'doc': 'A description", "('inet:ipv6', {}), { 'doc': 'The IPv6 where the logon originated.',", "= self.core.model.type('it:semver').norm(valu) subs = info.get('subs') return valu, subs except s_exc.BadTypeValu:", "on a host or network.' }), ('it:logon', ('guid', {}), {", "the function.'}), ('rank', ('int', {}), { 'doc': 'The function rank", "{ 'doc': 'The path where the file was created.', }),", "# Make it:dev:str from version str await node.snap.addNode('it:dev:str', prop) #", "}), ('serial', ('str', {}), { 'doc': 'The serial number of", "target_hw : other * = \"any\" - = N/A '''", "('it:semver', {}), { 'doc': 'The most recent version of the", "that are a member of this group.', }), ('posix:gid', ('int',", "}), ('org', ('ou:org', {}), { 'doc': 'The org that owns/operates", "the network flow that caused the hit.'}), ('sensor', ('it:host', {}),", "{}), { 'doc': 'The path to the executable of the", "creating or setting a registry key.', }), ('it:exec:reg:del', ('guid', {}),", "of the snort rule.'}), ('version', ('it:semver', {}), { 'doc': 'The", "}), ('url', ('inet:url', {}), { 'doc': 'URL relevant for the", "containing code that read the registry. May or may not", "'ctors': ( ('it:semver', 'synapse.models.infotech.SemVer', {}, { 'doc': 'Semantic Version type.',", "'The given software broadcasts the given Android intent.'}), ('it:prod:softver', ('guid',", "a filesystem.', }), ('it:exec:file:read', ('guid', {}), { 'doc': 'An instance", "('str', {'regex': r'^S[0-9]{4}$'}), { 'doc': 'A Mitre ATT&CK Software ID.',", "or virtual (e.g., malware sandbox) host.', }), ('it:exec:thread', ('guid', {}),", "host running the process that created the mutex. Typically the", "instance of a host getting a registry key.', }), ('it:exec:reg:set',", "('str', {}), { 'doc': 'The CWE description field.', 'ex': 'Buffer", "mutex. Typically the same host referenced in :proc, if present.',", "'ro': True, 'doc': 'The signature that the file triggered on.'", "software listens for an android intent.'}), ('it:os:android:ibroadcast', ('comp', {'fields': (", "'doc': 'The credentials that were used for the logon.', }),", "'A Windows registry key.', 'ex': 'HKEY_LOCAL_MACHINE\\\\SOFTWARE\\\\Microsoft\\\\Windows\\\\CurrentVersion\\\\Run', }), ('it:dev:regval', ('guid', {}),", "{ 'doc': 'An array of ATT&CK tactics that include this", "network.' }), ('it:group', ('guid', {}), { 'doc': 'A GUID that", "('exited', ('time', {}), { 'doc': 'The time the process exited.',", "valu = valu.strip() if not valu: raise s_exc.BadTypeValu(valu=valu, name=self.name, mesg='No", "'doc': 'Observed/variant names for this software.', }), ('desc', ('str', {}),", "flow that caused the hit.'}), ('dst:ipv6', ('inet:ipv6', {}), { 'doc':", "'The time the registry was written to.', }), ('reg', ('it:dev:regval',", "segment located in a process.', }), ('it:cmd', ('str', {'strip': True}),", "'The file that was read.', }), )), ('it:exec:file:write', {}, (", "name of a host or system.', }), ('it:host', ('guid', {}),", ")), ('it:network', {}, ( ('name', ('str', {'lower': True, 'strip': True,", "semantic version string into its component parts. This normalizes a", "the mmap is mapped with read permissions.', }), ('perms:write', ('bool',", "('client:ipv4', ('inet:ipv4', {}), { 'doc': 'The IPv4 of the client", "listening port. Typically the same host referenced in :proc, if", "name=self.name, mesg='Cannot norm a negative integer as a semver.') if", "advertising identification string.'}), ('it:os:android:perm', ('str', {}), { 'doc': 'An android", "text.', }), ('severity', ('int', {'enums': loglevels}), { 'doc': 'A log", "a semver.') valu = s_version.packVersion(subs.get('major'), subs.get('minor'), subs.get('patch')) return valu, {'subs':", "'doc': 'The product model of the host.', }), ('serial', ('str',", "('it:exec:reg:del', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The main", "{}, ()), ('it:os:android:intent', {}, ()), ('it:os:android:reqperm', {}, ( ('app', ('it:prod:softver',", "('posix:home', ('file:path', {}), { 'doc': \"The path to the POSIX", "software.', 'ex': 'cno.mitre.s0100', }), ('references', ('array', {'type': 'inet:url', 'uniq': True}),", "'An array of URLs that document the ATT&CK mitigation.', }),", "{}), { 'doc': 'The time the file was written to/modified.',", "('geo:place', {}), { 'doc': 'The place where the host resides.',", "ATT&CK group.', }), ('desc', ('str', {}), { 'doc': 'A description", "executable.', }), ('it:reveng:funcstr', ('comp', {'fields': (('function', 'it:reveng:function'), ('string', 'str'))}), {", "can run on the operating system.'}), ('os', ('it:prod:softver', {}), {'ro':", "attempt.', }), ('logoff:time', ('time', {}), { 'doc': 'The time the", "out of a string. Args: valu (str): String to attempt", "{}), { 'doc': 'The authentication domain where the account is", "representing the value of the registry key, if the value", "of the group.', }), ('host', ('it:host', {}), { 'doc': 'The", "event.' }), ('it:hosturl', ('comp', {'fields': (('host', 'it:host'), ('url', 'inet:url'))}), {", "('it:app:yara:rule', {}), { 'ro': True, 'doc': 'The YARA rule that", "engine detected the signature.' }), )), ('it:auth:passwdhash', {}, ( ('salt',", "'doc': 'The function rank score used to evaluate if it", "}), )), ('it:dev:int', {}, ()), ('it:dev:pipe', {}, ()), ('it:dev:mutex', {},", ": update : edition : language : sw_edition : target_sw", "( ('name', ('str', {}), { 'doc': 'The CWE description field.',", "referenced in :proc, if present.', }), ('exe', ('file:bytes', {}), {", "('file:bytes', {}), { 'doc': 'The file that was deleted.', }),", "SHA512 password hash value.', }), ('hash:lm', ('hash:lm', {}), { 'doc':", "'doc': 'The anti-virus product which contains the signature.', }), )),", "{}), { 'doc': 'An instance of a host getting a", "path where the file was written to/modified.', }), ('path:dir', ('file:path',", "time the file was created.', }), ('path', ('file:path', {}), {", "('it:exec:reg:get', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The main", "network.', }), ('desc', ('str', {}), { 'doc': 'A brief description", "the memory map in bytes.', }), ('perms:read', ('bool', {}), {", "Input (Classic Buffer Overflow)', }), ('desc', ('str', {}), { 'doc':", "network flow that caused the hit.'}), ('sensor', ('it:host', {}), {", "resides.', }), ('loc', ('loc', {}), { 'doc': 'The geo-political location", "{ 'doc': 'A Mitre ATT&CK element status.', 'ex': 'current', }),", "= 'CPE 2.3 string is expected to start with \"cpe:2.3:\"'", "during the URL retrieval..' }), ('client:port', ('inet:port', {}), { 'doc':", "created the new file.', }), ('host', ('it:host', {}), { 'doc':", "constant.', }), ('it:dev:regkey', ('str', {}), { 'doc': 'A Windows registry", "wrote to the registry. May or may not be the", "'The path to the executable of the process.', }), ('src:exe',", "minor, patch = s_version.unpackVersion(valu) valu = s_version.fmtVersion(major, minor, patch) return", "bound.', }), ('server', ('inet:server', {}), { 'doc': 'The inet:addr of", "}) )), ('it:av:filehit', {}, ( ('file', ('file:bytes', {}), { 'ro':", "next(genr) if c == ':': parts.append(part) part = '' continue", "'doc': 'The \"part\" field from the CPE 2.3 string.'}), ('vendor',", "software.', }), ('author', ('ps:contact', {}), { 'doc': 'The contact information", "and make it's normed valu prop = node.get('vers') if not", "'info'), (30, 'notice'), (40, 'warning'), (50, 'err'), (60, 'crit'), (70,", "groups that the account is a member of.', }), )),", "executing code that wrote to the registry.', }), ('host', ('it:host',", "{}), { 'doc': 'The primary name for the ATT&CK group.',", "'The signature name.' }), ('desc', ('str', {}), { 'doc': 'A", "function.', }), ('it:reveng:impfunc', ('str', {'lower': 1}), { 'doc': 'A function", "pipe.', }), ('it:dev:mutex', ('str', {}), { 'doc': 'A string representing", "'doc': 'An instance of a host binding a listening port.',", "'The file distributed by the software.'}), ('path', ('file:path', {}), {", ") class ItModule(s_module.CoreModule): async def initCoreModule(self): self.model.form('it:dev:str').onAdd(self._onFormItDevStr) self.model.form('it:dev:pipe').onAdd(self._onFormMakeDevStr) self.model.form('it:dev:mutex').onAdd(self._onFormMakeDevStr) self.model.form('it:dev:regkey').onAdd(self._onFormMakeDevStr)", "('file:bytes', {}), { 'doc': 'The file representing the value of", "the library was unloaded.', }), ('path', ('file:path', {}), { 'doc':", "('it:av:sig', ('comp', {'fields': (('soft', 'it:prod:soft'), ('name', ('str', {'lower': True})))}), {", "synapse.exc as s_exc import synapse.lib.types as s_types import synapse.lib.module as", "('ou:org', {}), { 'doc': 'The org that owns/operates the network.',", "await node.set('software:name', name) async def _onPropSoftverArch(self, node, oldv): # make", "the flow that caused the hit.'}), ('dst:ipv6', ('inet:ipv6', {}), {", "}), ('tag', ('syn:tag', {}), { 'doc': 'The synapse tag used", "code that read the registry.', }), ('host', ('it:host', {}), {", "version.', }), ('cpe', ('it:sec:cpe', {}), { 'doc': 'The NIST CPE", "match to a process.', }), ('it:app:snort:rule', ('guid', {}), { 'doc':", "if not valu: raise s_exc.BadTypeValu(valu=valu, name=self.name, mesg='No text left after", "from the CPE 2.3 string.'}), ('target_hw', ('str', {'lower': True, 'strip':", "('it:dev:int', {}), { 'doc': 'The value of the registry key,", "to annotate nodes included in this ATT&CK software.', 'ex': 'cno.mitre.s0100',", "loaded in the process.', }), ('loaded', ('time', {}), { 'doc':", "{ 'doc': 'The status of this ATT&CK technique.', }), ('isnow',", "represents an account on a host or network.' }), ('it:group',", "document the ATT&CK group.', }), ('techniques', ('array', {'type': 'it:mitre:attack:technique', 'uniq':", "snort rule hit.', }), ('it:reveng:function', ('guid', {}), { 'doc': 'A", "of a software product which is present on a given", ")), ('it:mitre:attack:technique', {}, ( ('name', ('str', {'strip': True}), { 'doc':", "an eventual risk:mitigation ('name', ('str', {'strip': True}), { 'doc': 'The", "Security Identifier.', 'ex': 'S-1-5-21-1220945662-1202665555-839525555-5555', }), ('it:os:ios:idfa', ('it:adid', {}), { 'doc':", "file from a filesystem.', }), ('it:exec:file:read', ('guid', {}), { 'doc':", ")), ('it:exec:file:write', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The", "{ 'doc': 'The name of the host or system.', }),", "self.model.prop('it:prod:softver:arch').onSet(self._onPropSoftverArch) self.model.prop('it:prod:softver:vers').onSet(self._onPropSoftverVers) self.model.prop('it:prod:softver:software').onSet(self._onPropSoftverSoft) def bruteVersionStr(self, valu): ''' Brute force the", "','}), { 'doc': 'An array of ATT&CK tactics that include", "True}), { 'ro': True, 'doc': 'The \"edition\" field from the", "valu = s_version.packVersion(subs.get('major'), subs.get('minor', 0), subs.get('patch', 0)) return valu, subs", "was unloaded.', }), ('path', ('file:path', {}), { 'doc': 'The path", "{ 'doc': 'Semver prerelease string.', }), ('semver:build', ('str', {}), {", "description of the ATT&CK software.', 'disp': {'hint': 'text'}, }), ('url',", "'The specific file containing code that requested the URL. May", "('flow', ('inet:flow', {}), { 'doc': 'The inet:flow that matched the", "'\\\\': c += next(genr) if c == ':': parts.append(part) part", "{ 'doc': 'A brief description of the group.', }), ('host',", "of ATT&CK software IDs used by the group.', }), )),", "{ 'doc': 'If deprecated, this field may contain the current", "hash value.', }), ('passwd', ('inet:passwd', {}), { 'doc': 'The (optional)", "is the bitwise concatenation of the major, minor and patch", "file that was read.', }), )), ('it:exec:file:write', {}, ( ('proc',", "}), ('posix:uid', ('int', {}), { 'doc': 'The user ID of", "{}), { 'doc': 'An external process which created the thread.',", "'doc': 'Semantic Version type.', }), ('it:sec:cpe', 'synapse.models.infotech.Cpe23Str', {}, { 'doc':", "key/value pair.', }), ('it:prod:soft', ('guid', {}), { 'doc': 'A arbitrary,", "# Try doing version part extraction by noming through the", "{ 'doc': 'The time the port was bound.', }), ('server',", "}), ('exe', ('file:bytes', {}), { 'doc': 'The specific file containing", "mesg = 'CPE 2.3 string is expected to start with", "caused the hit.'}), ('src:port', ('inet:port', {}), { 'doc': 'The source", "executing in a process.', }), ('it:exec:loadlib', ('guid', {}), { 'doc':", "time the memory map was created.', }), ('deleted', ('time', {}),", "('guid', {}), { 'doc': 'A library load event in a", "free-form description of the CVE vulnerability.', 'disp': {'hint': 'text'}, }),", "document the ATT&CK software.', }), ('techniques', ('array', {'type': 'it:mitre:attack:technique', 'uniq':", "{}), { 'doc': 'The account that logged in.', }), ('creds',", "'doc': 'Set to false to indicate an unsuccessful logon attempt.',", "'doc': 'An android intent string.'}), ('it:os:android:reqperm', ('comp', {'fields': ( ('app',", "('inet:addr', {}), { 'doc': 'The source address of flow that", "{}), { 'ro': True, 'doc': 'URL available on the host.',", "is None: raise s_exc.BadTypeValu(valu=valu, name='bruteVersionStr', mesg='Unable to brute force version", "signature hit.', }), ('sig', ('it:av:sig', {}), { 'ro': True, 'doc':", "True}), { 'doc': 'An array of alternate names for the", "name of the host or system.', }), ('desc', ('str', {}),", "}), ('src:proc', ('it:exec:proc', {}), { 'doc': 'The process which created", "('hash:ntlm', ('hash:ntlm', {}), { 'doc': 'The NTLM password hash value.',", "map in bytes.', }), ('perms:read', ('bool', {}), { 'doc': 'True", "ID.', 'ex': 'TA0040', }), ('it:mitre:attack:technique', ('str', {'regex': r'^T[0-9]{4}(.[0-9]{3})?$'}), { 'doc':", "{ 'doc': 'The time the library was unloaded.', }), ('path',", "the given host.', }), )), ('it:log:event', {}, ( ('mesg', ('str',", "'The last known ipv4 address for the host.' }), ('latlong',", "write permissions.', }), ('perms:execute', ('bool', {}), { 'doc': 'True if", "self.core.model.type('it:semver').norm(valu) subs = info.get('subs') return valu, subs except s_exc.BadTypeValu: #", "version str await node.snap.addNode('it:dev:str', prop) # form the semver properly", "'doc': 'The last known ipv4 address for the host.' }),", "that was read.', }), )), ('it:exec:reg:set', {}, ( ('proc', ('it:exec:proc',", "that the function references.'}), )), ('it:reveng:impfunc', {}, ()), ), }", "from the CPE 2.3 string.'}), ('language', ('str', {'lower': True, 'strip':", "version of the it:dev:str.', }), )), ('it:sec:cve', {}, ( ('desc',", "reference to a string inside a function.', }), ('it:reveng:impfunc', ('str',", "version', }), ('cves', ('array', {'type': 'it:sec:cve', 'uniq': True, 'sorted': True}),", "'The main process executing code that created the new file.',", "('str', {}), { 'doc': 'A description of the ATT&CK tactic.',", "{ 'doc': 'A description of the ATT&CK tactic.', 'disp': {'hint':", "genr = iter(text) try: while True: c = next(genr) if", "synapse ou:org.', }), ('name', ('ou:name', {}), { 'doc': 'The primary", "hit.'}), ('src:ipv6', ('inet:ipv6', {}), { 'doc': 'The source IPv6 address", "'The time the port was bound.', }), ('server', ('inet:server', {}),", "{}), { 'doc': 'The snort rule text.', 'disp': {'hint': 'text'},", "author.', }), ('author:person', ('ps:person', {}), { 'deprecated': True, 'doc': 'Person", "{}), { 'doc': 'The file that triggered the signature hit.',", "}), ('path', ('file:path', {}), { 'doc': 'The path for the", "}), ('tactics', ('array', {'type': 'it:mitre:attack:tactic', 'uniq': True, 'sorted': True, 'split':", "('it:auth:passwdhash', ('guid', {}), { 'doc': 'An instance of a password", "('comp', {'fields': (('host', 'it:host'), ('softver', 'it:prod:softver'))}), { 'doc': 'A version", "{ 'ro': True, 'doc': 'The anti-virus product which contains the", ")), ('it:sec:cpe', {}, ( ('part', ('str', {'lower': True, 'strip': True}),", "it:prod:soft.', }), ('name', ('str', {'strip': True}), { 'doc': 'The primary", "that represents an individual logon/logoff event.' }), ('it:hosturl', ('comp', {'fields':", "'The time the logon session ended.', }), ('host', ('it:host', {}),", "the sofware author.', }), ('author:person', ('ps:person', {}), { 'deprecated': True,", "name for the ATT&CK tactic.', }), ('desc', ('str', {}), {", "semver.') if valu > s_version.mask60: raise s_exc.BadTypeValu(valu=valu, name=self.name, mesg='Cannot norm", "'The time the library was unloaded.', }), ('path', ('file:path', {}),", "{}), {'ro': True, 'doc': 'The software which distributes the file.'}),", "'doc': 'The time the thread was created.', }), ('exited', ('time',", "}), ('it:app:snort:rule', ('guid', {}), { 'doc': 'A snort rule unique", "without Checking Size of Input (Classic Buffer Overflow)', }), ('desc',", "to this software version.', }), ('vers', ('it:dev:str', {}), { 'doc':", "# form the semver properly or bruteforce parts try: valu,", "= { 'part': parts[2], 'vendor': parts[3], 'product': parts[4], 'version': parts[5],", "('soft', ('it:prod:softver', {}), {'ro': True, 'doc': 'The software version that", "{ 'doc': 'The CWE description field.', 'ex': 'Buffer Copy without", "that contains the library.'}), ('lib', ('it:prod:softver', {}), {'ro': True, 'doc':", "host. May be an actual (e.g., endpoint) or virtual (e.g.,", "the function.'}), ('file', ('file:bytes', {}), { 'ro': True, 'doc': 'The", "_onPropSoftverArch(self, node, oldv): # make it:dev:str for arch prop =", "0: raise s_exc.BadTypeValu(valu=valu, name=self.name, mesg='Cannot norm a negative integer as", "'current,deprecated,withdrawn'}), { 'doc': 'A Mitre ATT&CK element status.', 'ex': 'current',", "a function.', }), ('it:reveng:impfunc', ('str', {'lower': 1}), { 'doc': 'A", "'cno.mitre.ta0100', }), ('references', ('array', {'type': 'inet:url', 'uniq': True}), { 'doc':", "host running the process that requested the URL. Typically the", "same :exe referenced in :proc, if present.', }), ('time', ('time',", "vulnerability as designated by a Common Vulnerabilities and Exposures (CVE)", "'The time the URL was requested.', }), ('url', ('inet:url', {}),", "r'^S[0-9]{4}$'}), { 'doc': 'A Mitre ATT&CK Software ID.', 'ex': 'S0154',", "field from the CPE 2.3 string.'}), ('target_hw', ('str', {'lower': True,", "}), ('client:ipv6', ('inet:ipv6', {}), { 'doc': 'The IPv6 where the", "registry key.', 'ex': 'HKEY_LOCAL_MACHINE\\\\SOFTWARE\\\\Microsoft\\\\Windows\\\\CurrentVersion\\\\Run', }), ('it:dev:regval', ('guid', {}), { 'doc':", "{ 'doc': 'A GUID representing an individual log event.', 'interfaces':", "primary group ID of the account.', 'ex': '1001', }), ('posix:gecos',", "mesg='Unable to brute force version parts out of the string')", "and configuration such as a windows domain.' }), ('it:account', ('guid',", "that documents the ATT&CK software.', }), ('tag', ('syn:tag', {}), {", "process executing code that wrote to the registry.', }), ('host',", "path where the file was deleted.', }), ('path:dir', ('file:path', {}),", "True, 'doc': 'The final component of the file path (parsed", "{ 'ro': True, 'doc': 'The file that triggered the signature", "the \"main\" executable for the process. For example, rundll32.exe may", "references.'}), )), ('it:reveng:impfunc', {}, ()), ), } name = 'it'", "}), ('str', ('it:dev:str', {}), { 'doc': 'The value of the", "}), ('hash:md5', ('hash:md5', {}), { 'doc': 'The MD5 password hash", "{'hint': 'text'}, }), ('url', ('inet:url', {}), { 'doc': 'The URL", "listens for an android intent.'}), ('it:os:android:ibroadcast', ('comp', {'fields': ( ('app',", "ATT&CK tactics that include this technique.', }), )), ('it:mitre:attack:software', {},", "on the host.', }), ('ctime', ('time', {}), { 'doc': 'The", "port. May or may not be the same :exe specified", "( ('name', ('str', {'lower': True, 'strip': True, 'onespace': True}), {", "group.', }), ('tag', ('syn:tag', {}), { 'doc': 'The synapse tag", "{'hint': 'text'}, }), ('url', ('inet:url', {}), { 'doc': 'A reference", "string.', }), ('url', ('inet:url', {}), { 'doc': 'URL where a", "Mitigation ID.', 'ex': 'M1036', }), ('it:mitre:attack:software', ('str', {'regex': r'^S[0-9]{4}$'}), {", "'The \"version\" field from the CPE 2.3 string.'}), ('update', ('str',", "the signature.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url', {}), {", "containing code that bound the listening port. May or may", "a process.', }), ('it:cmd', ('str', {'strip': True}), { 'doc': 'A", "{}), { 'ro': True, 'doc': 'Host serving a url.', }),", "tag used to annotate nodes included in this ATT&CK technique.',", "authentication domain where the group is registered.', }), ('groups', ('array',", "}), ('sig:soft', ('it:prod:soft', {}), { 'ro': True, 'doc': 'The anti-virus", "( ('proc', ('it:exec:proc', {}), { 'doc': 'The process which contains", "a host or system.', }), ('it:host', ('guid', {}), { 'doc':", "expected 13.' raise s_exc.BadTypeValu(valu=valu, mesg=mesg) subs = { 'part': parts[2],", "{ 'doc': 'System normalized semantic version number.', }), ('semver:major', ('int',", "'doc': 'Host serving a url.', }), ('url', ('inet:url', {}), {", ")), ('it:cmd', {}, ()), ('it:exec:proc', {}, ( ('host', ('it:host', {}),", "flow that caused the hit.'}), ('src:ipv6', ('inet:ipv6', {}), { 'doc':", "software.', }), ('names', ('array', {'type': 'str', 'uniq': True, 'sorted': True}),", "2.3 string is expected to start with \"cpe:2.3:\"' raise s_exc.BadTypeValu(valu=valu,", "address of the flow that caused the hit.'}), ('dst:port', ('inet:port',", "file.'}), ('flow', ('inet:flow', {}), { 'doc': 'The inet:flow that matched", "True}), { 'ro': True, 'doc': 'The \"version\" field from the", "host getting a registry key.', }), ('it:exec:reg:set', ('guid', {}), {", "integer as a semver.') if valu > s_version.mask60: raise s_exc.BadTypeValu(valu=valu,", "be an actual (e.g., endpoint) or virtual (e.g., malware sandbox)", "('it:os:android:intent', ('str', {}), { 'doc': 'An android intent string.'}), ('it:os:android:reqperm',", "'The NTLM password hash value.', }), ('passwd', ('inet:passwd', {}), {", "started the process.', }), ('src:proc', ('it:exec:proc', {}), { 'doc': 'The", "{ 'doc': 'An android advertising identification string.'}), ('it:os:android:perm', ('str', {}),", "of a host deleting a registry key.', }), ('it:app:yara:rule', ('guid',", "{ 'doc': 'The authentication domain where the account is registered.',", "{}), { 'doc': 'A developer-selected string.' }), ('it:dev:pipe', ('str', {}),", "'file:bytes'))}), { 'doc': 'A file is distributed by a specific", "bind().' }), ('server:ipv6', ('inet:ipv6', {}), { 'doc': 'The IPv6 address", "('it:prod:softver', ('guid', {}), { 'doc': 'A specific version of a", "the file.', }), ('host', ('it:host', {}), { 'doc': 'The host", "}), ('name', ('ou:name', {}), { 'doc': 'The primary name for", "self.model.prop('it:prod:softver:software').onSet(self._onPropSoftverSoft) def bruteVersionStr(self, valu): ''' Brute force the version out", "'The name of the snort rule.'}), ('version', ('it:semver', {}), {", "file.', }), ('path', ('file:path', {}), { 'doc': 'The path for", "valu = s_version.fmtVersion(major, minor, patch) return valu loglevels = (", "self.model.form('it:dev:regkey').onAdd(self._onFormMakeDevStr) self.model.prop('it:prod:softver:arch').onSet(self._onPropSoftverArch) self.model.prop('it:prod:softver:vers').onSet(self._onPropSoftverVers) self.model.prop('it:prod:softver:software').onSet(self._onPropSoftverSoft) def bruteVersionStr(self, valu): ''' Brute force", "('it:mitre:attack:mitigation', ('str', {'regex': r'^M[0-9]{4}$'}), { 'doc': 'A Mitre ATT&CK Mitigation", "time that the YARA engine matched the process to the", "postTypeInit(self): s_types.Int.postTypeInit(self) self.setNormFunc(str, self._normPyStr) self.setNormFunc(int, self._normPyInt) def _normPyStr(self, valu): valu", "who authored the software.', }), ('author:org', ('ou:org', {}), { 'deprecated':", "{}, ( ('software', ('it:prod:soft', {}), { 'doc': 'Software associated with", "('operator', ('ps:contact', {}), { 'doc': 'The operator of the host.',", "None: raise s_exc.BadTypeValu(valu=valu, name=self.name, mesg='Unable to parse string as a", "ATT&CK Group ID.', 'ex': 'G0100', }), ('it:mitre:attack:tactic', ('str', {'regex': r'^TA[0-9]{4}$'}),", "configuration such as a windows domain.' }), ('it:account', ('guid', {}),", "{'lower': True}), { 'ro': True, 'doc': 'The signature name.' }),", "signature.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url', {}), { 'doc':", "file containing code that created the mutex. May or may", "'Set to True if the software is an operating system.'}),", "of the file name (parsed from :path).', }), ('path:base', ('file:base',", "string.'}), ('it:os:android:reqperm', ('comp', {'fields': ( ('app', 'it:prod:soft'), ('perm', 'it:os:android:perm'))}), {", "host.', }), ('org', ('ou:org', {}), { 'doc': 'The org that", "{}), { 'doc': 'A mutex created by a process at", "to True if the software is an operating system.'}), ('islib',", "which broadcasts the android intent.'}), ('intent', ('it:os:android:intent', {}), {'ro': True,", ")), ('it:exec:pipe', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The", "('hash:sha256', ('hash:sha256', {}), { 'doc': 'The SHA256 password hash value.',", "'The SHA256 password hash value.', }), ('hash:sha512', ('hash:sha512', {}), {", "{ 'doc': 'The start time for the process.', }), ('exited',", "{ 'doc': 'The file representing the value of the registry", "purposes, as we cannot map an arbitrary pre-release version into", "{}, ( ('name', ('str', {'lower': True, 'strip': True}), { 'doc':", "'doc': 'A function from an imported library.', }), ), 'interfaces':", "CPE 2.3 string specifying this software.', }), ('author', ('ps:contact', {}),", "created this thread.', }), )), ('it:exec:loadlib', {}, ( ('proc', ('it:exec:proc',", "'synapse.models.infotech.SemVer', {}, { 'doc': 'Semantic Version type.', }), ('it:sec:cpe', 'synapse.models.infotech.Cpe23Str',", "windows domain.' }), ('it:account', ('guid', {}), { 'doc': 'A GUID", "}), ('path', ('file:path', {}), { 'doc': 'The path where the", "the host resides.', }), ('loc', ('loc', {}), { 'doc': 'The", "parameters.', 'disp': {'hint': 'text'}, }), ('pid', ('int', {}), { 'doc':", "the rule evaluated as a match.'}), )), ('it:reveng:function', {}, (", "if the software is a library.'}), )), ('it:adid', {}, ()),", "('time', ('time', {}), { 'doc': 'The start time for the", "optional contiguous IPv6 address range of this network.', }), )),", "process where the memory was mapped.', }), ('va', ('int', {}),", "{ 'doc': 'The primary group ID of the account.', 'ex':", "Make it:dev:str from version str await node.snap.addNode('it:dev:str', prop) # form", "registry key or value that was deleted.', }), )), ('it:app:snort:rule',", "}), ('it:sec:cpe', 'synapse.models.infotech.Cpe23Str', {}, { 'doc': 'A NIST CPE 2.3", "mitigation.', }), )), ('it:dev:int', {}, ()), ('it:dev:pipe', {}, ()), ('it:dev:mutex',", "('string', 'str'))}), { 'deprecated': True, 'doc': 'A reference to a", "the same :exe specified in :proc, if present.'}), ('time', ('time',", "registered.', }), ('domain', ('it:domain', {}), { 'doc': 'The authentication domain", "}), ('sig', ('it:av:sig', {}), { 'ro': True, 'doc': 'The signature", "('time', {}), { 'doc': 'The time the named pipe was", "True, 'doc': 'Software on the host.'}) )), ('it:av:sig', {}, (", "{ 'doc': 'The time the URL was requested.', }), ('url',", "'The IPv6 of the client during the URL retrieval..' }),", "{}, ( ('part', ('str', {'lower': True, 'strip': True}), { 'ro':", "'Host serving a url.', }), ('url', ('inet:url', {}), { 'ro':", "to be used for YARA evaluation engines.'}), )), ('it:app:yara:match', {},", "('it:exec:proc', {}), { 'doc': 'The process where the library was", "}), ('client:ipv4', ('inet:ipv4', {}), { 'doc': 'The IPv4 where the", "log event.', 'interfaces': ('it:host:activity',), }), ('it:network', ('guid', {}), { 'doc':", "of the memory map. Bytes may optionally be present in", "the URL retrieval..' }), )), ('it:exec:bind', {}, ( ('proc', ('it:exec:proc',", "('semver:patch', ('int', {}), { 'doc': 'Version patch number.', }), ('semver:pre',", "of the software at a particular version.', }), ('names', ('array',", "}), ('references', ('array', {'type': 'inet:url', 'uniq': True}), { 'doc': 'An", "('it:exec:mutex', ('guid', {}), { 'doc': 'A mutex created by a", "host or system.', }), ('desc', ('str', {}), { 'doc': 'A", "rule.'}), ('version', ('it:semver', {}), { 'doc': 'The current version of", "{}), { 'doc': 'The start time for the process.', }),", "unsuccessful logon attempt.', }), ('logoff:time', ('time', {}), { 'doc': 'The", "app which requests the permission.'}), ('perm', ('it:os:android:perm', {}), {'ro': True,", "'ex': 'G0100', }), ('it:mitre:attack:tactic', ('str', {'regex': r'^TA[0-9]{4}$'}), { 'doc': 'A", "= s_version.unpackVersion(valu) valu = s_version.packVersion(major, minor, patch) subs = {'major':", "{}), { 'doc': 'The IPv6 of the client during the", "logon.', }), ('duration', ('duration', {}), { 'doc': 'The duration of", "system which the software can run on.'}), )), ('it:os:android:ilisten', {},", "the hit.'}), ('src:port', ('inet:port', {}), { 'doc': 'The source port", "software which broadcasts the android intent.'}), ('intent', ('it:os:android:intent', {}), {'ro':", "Notes: This first attempts to parse strings using the it:semver", "'doc': 'The specific file containing code that created the new", "if present.', }), ('time', ('time', {}), { 'doc': 'The time", "{}), { 'doc': 'The destination address of the trigger.'}), ('dst:ipv4',", "{'regex': r'^S-1-[0-59]-\\d{2}-\\d{8,10}-\\d{8,10}-\\d{8,10}-[1-9]\\d{3}$'}), { 'doc': 'A Microsoft Windows Security Identifier.', 'ex':", "version.'}), ('it:hostsoft', ('comp', {'fields': (('host', 'it:host'), ('softver', 'it:prod:softver'))}), { 'doc':", "identification string.'}), ('it:os:android:aaid', ('it:adid', {}), { 'doc': 'An android advertising", "'The time of the network flow that caused the hit.'}),", "True, 'doc': 'Host with the software.'}), ('softver', ('it:prod:softver', {}), {'ro':", "'A mutex created by a process at runtime.', }), ('it:exec:pipe',", "the process.', }), ('size', ('int', {}), { 'doc': 'The size", "username associated with the account', }), ('contact', ('ps:contact', {}), {", "that program.', }), ('cmd', ('it:cmd', {}), { 'doc': 'The command", "('it:exec:proc', {}), { 'doc': 'The process that matched the YARA", "}), ('hash:lm', ('hash:lm', {}), { 'doc': 'The LM password hash", "'doc': \"The path to the POSIX account's default shell.\", 'ex':", "('it:exec:proc', ('guid', {}), { 'doc': 'A process executing on a", "'part': parts[2], 'vendor': parts[3], 'product': parts[4], 'version': parts[5], 'update': parts[6],", "created the new file. May or may not be the", "requests the permission.'}), ('perm', ('it:os:android:perm', {}), {'ro': True, 'doc': 'The", "('inet:port', {}), { 'doc': 'The source port of the flow", "'The name of the group.', }), ('desc', ('str', {}), {", "}), ('exe', ('file:bytes', {}), { 'doc': 'The file considered the", "{}), { 'doc': 'The host on which the activity occurred.'}),", "'doc': 'The source port of the flow that caused the", "{'type': 'it:group'}), { 'doc': 'An array of groups that the", "('file:base', {}), { 'ro': True, 'doc': 'The final component of", "('str', ('it:dev:str', {}), { 'doc': 'The value of the registry", "valu): major, minor, patch = s_version.unpackVersion(valu) valu = s_version.fmtVersion(major, minor,", "same :exe specified in :proc, if present.'}), ('time', ('time', {}),", "{}), { 'doc': 'A brief description of the network.', }),", "'doc': 'An array of ATT&CK software IDs used by the", "short description of the software.', }), ('cpe', ('it:sec:cpe', {}), {", "('str', {}), { 'doc': 'An android intent string.'}), ('it:os:android:reqperm', ('comp',", "('it:app:yara:procmatch', ('guid', {}), { 'doc': 'An instance of a YARA", "make it:dev:str for arch prop = node.get('arch') if prop: await", "{}), { 'doc': 'A arbitrary, unversioned software product.', }), ('it:adid',", "('domain', ('it:domain', {}), { 'doc': 'The authentication domain that the", "'doc': 'The given software broadcasts the given Android intent.'}), ('it:prod:softver',", "the rule.'}), ('version', ('it:semver', {}), { 'doc': 'The most recent", "('it:dev:regval', {}, ( ('key', ('it:dev:regkey', {}), { 'doc': 'The Windows", "host deleting a registry key.', }), ('it:app:yara:rule', ('guid', {}), {", "('it:account', {}, ( ('user', ('inet:user', {}), { 'doc': 'The username", "new file.', }), ('host', ('it:host', {}), { 'doc': 'The host", "}), )), ('it:exec:pipe', {}, ( ('proc', ('it:exec:proc', {}), { 'doc':", "and available as strings if that information is present. '''", "_onPropSoftverVers(self, node, oldv): # Set vers:norm and make it's normed", "that increases with severity.', }), ('data', ('data', {}), { 'doc':", "{}), { 'doc': 'Software associated with this version instance.', }),", "s_version.unpackVersion(valu) valu = s_version.fmtVersion(major, minor, patch) return valu loglevels =", "software.', }), ('cpe', ('it:sec:cpe', {}), { 'doc': 'The NIST CPE", "'The thread which created this thread.', }), )), ('it:exec:loadlib', {},", "to start with \"cpe:2.3:\"' raise s_exc.BadTypeValu(valu=valu, mesg=mesg) text, info =", "hit.'}), ('dst', ('inet:addr', {}), { 'doc': 'The destination address of", "ATT&CK mitigation.', 'ex': 'cno.mitre.m0100', }), ('references', ('array', {'type': 'inet:url', 'uniq':", "('it:exec:thread', {}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The process", "('host', ('it:host', {}), { 'doc': 'The host containing the file.',", "of the string. Returns: int, dict: The system normalized version", "'doc': 'Person who authored the software.', }), ('url', ('inet:url', {}),", "}), ('cmd', ('it:cmd', {}), { 'doc': 'The command string used", "port was bound.', }), ('server', ('inet:server', {}), { 'doc': 'The", "{}), { 'doc': 'A logical boundary of authentication and configuration", "{}), { 'doc': 'The executable file which caused the activity.'}),", "written to/modified.', }), ('path:dir', ('file:path', {}), { 'ro': True, 'doc':", "True, 'doc': 'The \"part\" field from the CPE 2.3 string.'}),", "of the function.'}), ('funccalls', ('array', {'type': 'it:reveng:filefunc'}), { 'doc': 'Other", "'ro': True, 'doc': 'The \"update\" field from the CPE 2.3", "through the string subs = s_version.parseVersionParts(valu) if subs is None:", "{ 'doc': 'The snort rule text.', 'disp': {'hint': 'text'}, }),", "is mapped with write permissions.', }), ('perms:execute', ('bool', {}), {", "'uniq': True, 'sorted': True}), { 'doc': 'A list of CVEs", "(optional) clear text password for this password hash.', }), )),", "not be the same :exe specified in :proc, if present.'}),", "('url', ('inet:url', {}), { 'doc': 'A URL linking this CWE", "async def initCoreModule(self): self.model.form('it:dev:str').onAdd(self._onFormItDevStr) self.model.form('it:dev:pipe').onAdd(self._onFormMakeDevStr) self.model.form('it:dev:mutex').onAdd(self._onFormMakeDevStr) self.model.form('it:dev:regkey').onAdd(self._onFormMakeDevStr) self.model.prop('it:prod:softver:arch').onSet(self._onPropSoftverArch) self.model.prop('it:prod:softver:vers').onSet(self._onPropSoftverVers) self.model.prop('it:prod:softver:software').onSet(self._onPropSoftverSoft)", "continue part += c except StopIteration: parts.append(part) return parts def", "specific file containing code that wrote to the registry. May", "('inet:flow', {}), { 'doc': 'The inet:flow that matched the snort", "('user', ('inet:user', {}), { 'doc': 'The user name of the", "in a process.', }), ('it:exec:mmap', ('guid', {}), { 'doc': 'A", "'text'}, }), ('name', ('str', {}), { 'doc': 'The name of", "process.', }), ('size', ('int', {}), { 'doc': 'The size of", "}), ('it:app:yara:match', ('comp', {'fields': (('rule', 'it:app:yara:rule'), ('file', 'file:bytes'))}), { 'doc':", "}), ('desc', ('str', {}), { 'doc': 'A free-form description of", "bits. The comparable integer value representing the semver is the", "'The time the thread was created.', }), ('exited', ('time', {}),", "account.', }), ('posix:home', ('file:path', {}), { 'doc': \"The path to", "hash of the memory map. Bytes may optionally be present", "string specifying this software.', }), ('author', ('ps:contact', {}), { 'doc':", "node, oldv): # make it:dev:str for arch prop = node.get('arch')", "{ 'doc': 'A GUID that represents an account on a", "rule enabled status to be used for YARA evaluation engines.'}),", "that the activity started.'}), ), }), ), 'forms': ( ('it:hostname',", "'True if the mmap is mapped with read permissions.', }),", "password hash value.', }), ('hash:ntlm', ('hash:ntlm', {}), { 'doc': 'The", "a logical network.' }), ('it:domain', ('guid', {}), { 'doc': 'A", "that executed the process. May be an actual or a", "doing version part extraction by noming through the string subs", "host or system.', }), ('it:host', ('guid', {}), { 'doc': 'A", "rank score used to evaluate if it exhibits interesting behavior.'}),", "the file.'}), )), ('it:hostsoft', {}, ( ('host', ('it:host', {}), {'ro':", "('int', {}), { 'doc': 'A developer selected integer constant.', }),", "'The host where the account is registered.', }), ('domain', ('it:domain',", "hit.'}), ('src:port', ('inet:port', {}), { 'doc': 'The source port of", "rule at the time of match.'}), )), ('it:app:yara:rule', {}, (", "# make it:dev:str for arch prop = node.get('arch') if prop:", "oldv): # make it:dev:str for arch prop = node.get('arch') if", "the memory map was created.', }), ('deleted', ('time', {}), {", "}), ('it:app:yara:rule', ('guid', {}), { 'doc': 'A YARA rule unique", "('it:dev:pipe', ('str', {}), { 'doc': 'A string representing a named", "Copy without Checking Size of Input (Classic Buffer Overflow)', }),", "'doc': 'The bound (listening) TCP port.' }), )), ('it:fs:file', {},", "{}), { 'doc': 'The optional contiguous IPv6 address range of", "a process at runtime.', }), ('it:exec:url', ('guid', {}), { 'doc':", "a host or network.' }), ('it:group', ('guid', {}), { 'doc':", "the software can run on.'}), )), ('it:os:android:ilisten', {}, ( ('app',", "'A GUID that represents a logical network.' }), ('it:domain', ('guid',", "{ 'doc': 'The destination IPv4 address of the flow that", "code that bound the listening port. May or may not", "'An android intent string.'}), ('it:os:android:reqperm', ('comp', {'fields': ( ('app', 'it:prod:soft'),", "valu): valu = valu.strip() if not valu: raise s_exc.BadTypeValu(valu=valu, name=self.name,", "('part', ('str', {'lower': True, 'strip': True}), { 'ro': True, 'doc':", "deleted the file.', }), ('host', ('it:host', {}), { 'doc': 'The", "Security Identifier of the account.', }), ('groups', ('array', {'type': 'it:group'}),", "('time', {}), { 'doc': 'The time the URL was requested.',", "'TA0040', }), ('it:mitre:attack:technique', ('str', {'regex': r'^T[0-9]{4}(.[0-9]{3})?$'}), { 'doc': 'A Mitre", "('inet:ipv4', {}), { 'doc': 'The destination IPv4 address of the", "}), ('loaded', ('time', {}), { 'doc': 'The time the library", "('it:exec:url', ('guid', {}), { 'doc': 'An instance of a host", "the value is an integer.', }), ('bytes', ('file:bytes', {}), {", "{}), { 'doc': 'A free-form description of the host.', }),", "logon occured.', }), ('success', ('bool', {}), { 'doc': 'Set to", "subs = self.bruteVersionStr(prop) await node.set('semver', valu) for k, v in", "('rule', ('it:app:yara:rule', {}), { 'ro': True, 'doc': 'The YARA rule", "{}), { 'doc': 'The time the port was bound.', }),", "('file:bytes', {}), { 'doc': 'The file considered the \"main\" executable", "the host is a member of.', }), ('ipv4', ('inet:ipv4', {}),", "imported library functions within the scope of the function.', }),", "which is present on a given host.', }), ('it:av:sig', ('comp',", "is registered.', }), ('domain', ('it:domain', {}), { 'doc': 'The authentication", "{ 'doc': 'The time the process exited.', }), ('exitcode', ('int',", "'strip': True}), { 'ro': True, 'doc': 'The \"version\" field from", "'doc': 'The MD5 password hash value.', }), ('hash:sha1', ('hash:sha1', {}),", "'The authentication domain that the host is a member of.',", "'strip': True}), { 'ro': True, 'doc': 'The \"update\" field from", "( (10, 'debug'), (20, 'info'), (30, 'notice'), (40, 'warning'), (50,", "{}), { 'doc': 'The product model of the host.', }),", "# Check to see if name is available and set", "('guid', {}), { 'doc': 'An instance of a snort rule", "number.', }), ('semver:major', ('int', {}), { 'doc': 'Version major number.',", "'A brief description of the domain.', }), ('org', ('ou:org', {}),", "self.model.form('it:dev:mutex').onAdd(self._onFormMakeDevStr) self.model.form('it:dev:regkey').onAdd(self._onFormMakeDevStr) self.model.prop('it:prod:softver:arch').onSet(self._onPropSoftverArch) self.model.prop('it:prod:softver:vers').onSet(self._onPropSoftverVers) self.model.prop('it:prod:softver:software').onSet(self._onPropSoftverSoft) def bruteVersionStr(self, valu): ''' Brute", "brief description of the domain.', }), ('org', ('ou:org', {}), {", "('it:host', ('guid', {}), { 'doc': 'A GUID that represents a", "os software version.'}), ('it:hostsoft', ('comp', {'fields': (('host', 'it:host'), ('softver', 'it:prod:softver'))}),", "snort rule.'}), ('version', ('it:semver', {}), { 'doc': 'The current version", "'doc': 'A specific version of a software product.'}), ('it:prod:softfile', ('comp',", "as a match.'}), )), ('it:app:yara:procmatch', {}, ( ('rule', ('it:app:yara:rule', {}),", "{}), { 'doc': 'A URL linking this CVE to a", "class ItModule(s_module.CoreModule): async def initCoreModule(self): self.model.form('it:dev:str').onAdd(self._onFormItDevStr) self.model.form('it:dev:pipe').onAdd(self._onFormMakeDevStr) self.model.form('it:dev:mutex').onAdd(self._onFormMakeDevStr) self.model.form('it:dev:regkey').onAdd(self._onFormMakeDevStr) self.model.prop('it:prod:softver:arch').onSet(self._onPropSoftverArch)", "{'lower': True}), { 'ro': True, 'doc': 'The signature name.', }),", "for the author of the YARA rule.'}), ('version', ('it:semver', {}),", "= '' continue part += c except StopIteration: parts.append(part) return", "('int', {}), { 'doc': 'The GECOS field for the POSIX", "{}), { 'doc': 'The IPv6 where the logon originated.', }),", "of the host.' }), ('manu', ('str', {}), { 'doc': 'The", "deprecated, this field may contain the current value for the", "logon session.', }), ('client:host', ('it:host', {}), { 'doc': 'The host", "{ 'doc': 'Associated names for the ATT&CK software.', }), ('desc',", "documents the ATT&CK mitigation.', }), ('tag', ('syn:tag', {}), { 'doc':", "async def _onPropSoftverArch(self, node, oldv): # make it:dev:str for arch", "'The SHA1 password hash value.', }), ('hash:sha256', ('hash:sha256', {}), {", "the listening port. Typically the same host referenced in :proc,", "Microsoft Windows Security Identifier.', 'ex': 'S-1-5-21-1220945662-1202665555-839525555-5555', }), ('it:os:ios:idfa', ('it:adid', {}),", "'ex': 'Buffer Copy without Checking Size of Input (Classic Buffer", "{}, ( ('software', ('it:prod:soft', {}), { 'doc': 'Used to map", "IPv4 address of the flow that caused the hit.'}), ('dst:ipv6',", "{}), {'ro': True, 'doc': 'The android intent which is broadcast", "the activity.'}), ('host', ('it:host', {}), { 'doc': 'The host on", "host binding a listening port.', }), ('it:fs:file', ('guid', {}), {", "free-form description of the signature.', 'disp': {'hint': 'text'}, }), ('url',", "'The URL that documents the ATT&CK software.', }), ('tag', ('syn:tag',", "{ 'doc': 'The group owner of the file.', }), )),", "{}), { 'doc': 'The process where the memory was mapped.',", "major, minor, patch = s_version.unpackVersion(valu) valu = s_version.packVersion(major, minor, patch)", "{'lower': True, 'strip': True}), { 'ro': True, 'doc': 'The \"part\"", "software can run on.'}), )), ('it:os:android:ilisten', {}, ( ('app', ('it:prod:softver',", "'cno.mitre.t0100', }), ('references', ('array', {'type': 'inet:url', 'uniq': True}), { 'doc':", "('dst:ipv6', ('inet:ipv6', {}), { 'doc': 'The destination IPv4 address of", "import synapse.lib.version as s_version logger = logging.getLogger(__name__) class Cpe23Str(s_types.Str): '''", "c += next(genr) if c == ':': parts.append(part) part =", "software is a library.'}), )), ('it:adid', {}, ()), ('it:os:ios:idfa', {},", "'uniq': True}), { 'doc': 'An array of URLs that document", "host running the process that bound the listening port. Typically", "'ex': '1001', }), ('windows:sid', ('it:os:windows:sid', {}), { 'doc': 'The Microsoft", "software version.'}), ('it:prod:softos', ('comp', {'fields': ( ('soft', 'it:prod:softver'), ('os', 'it:prod:softver'))}),", "code that wrote to the registry. May or may not", "data from the registry. May or may not be the", "'doc': 'Semver prerelease string.', }), ('semver:build', ('str', {}), { 'doc':", ")), ('it:app:yara:rule', {}, ( ('text', ('str', {}), { 'doc': 'The", "field from the CPE 2.3 string.'}), ('other', ('str', {'lower': True,", "('url', ('inet:url', {}), { 'doc': 'URL relevant for the software.',", "the host.'}) )), ('it:av:sig', {}, ( ('soft', ('it:prod:soft', {}), {", "'file:bytes'))}), { 'doc': 'A YARA rule match to a file.',", "hash value.', }), ('hash:sha256', ('hash:sha256', {}), { 'doc': 'The SHA256", "}), ('name', ('str', {'strip': True}), { 'doc': 'The primary name", "as strings if that information is present. ''' def postTypeInit(self):", "running the process that wrote to the file. Typically the", "the mutex.', }), ('host', ('it:host', {}), { 'doc': 'The host", "'doc': 'A memory mapped segment located in a process.', }),", "at runtime.', }), ('it:exec:url', ('guid', {}), { 'doc': 'An instance", "'doc': 'The final component of the file path (parsed from", "port during the URL retrieval..' }), )), ('it:exec:bind', {}, (", "file.'}), ('proc', ('it:exec:proc', {}), { 'doc': 'The process that matched", "{}), { 'doc': 'True if the mmap is mapped with", "array of strings referenced within the function.', }), )), ('it:reveng:filefunc',", "('path', ('file:path', {}), { 'doc': 'The file path if the", "('softver', 'it:prod:softver'))}), { 'doc': 'A version of a software product", "asyncio import logging import synapse.exc as s_exc import synapse.lib.types as", "True, 'doc': 'The operating system which the software can run", "relevant for the software.', }), ('isos', ('bool', {}), { 'doc':", "(Classic Buffer Overflow)', }), ('desc', ('str', {}), { 'doc': 'The", "'doc': 'The file that was read.', }), )), ('it:exec:file:write', {},", "that apply to this software version.', }), ('vers', ('it:dev:str', {}),", "{}), { 'doc': 'The LM password hash value.', }), ('hash:ntlm',", "CVEs that apply to this software version.', }), ('vers', ('it:dev:str',", "'doc': 'The time the URL was requested.', }), ('url', ('inet:url',", "the rule at the time of match.'}), )), ('it:app:yara:rule', {},", "of this network.', }), ('net6', ('inet:net6', {}), { 'doc': 'The", "registry key, if the value is a string.', }), ('int',", "'The registry key or value that was written to.', }),", "'doc': 'The name of the YARA rule.'}), ('author', ('ps:contact', {}),", "the software is a library.'}), )), ('it:adid', {}, ()), ('it:os:ios:idfa',", "in :proc, if present.', }), ('time', ('time', {}), { 'doc':", "software version is known to be compatible with the given", "the named pipe. May or may not be the same", "{'ro': True, 'doc': 'The operating system which the software can", "Identifier.', 'ex': 'S-1-5-21-1220945662-1202665555-839525555-5555', }), ('it:os:ios:idfa', ('it:adid', {}), { 'doc': 'An", "('client:ipv4', ('inet:ipv4', {}), { 'doc': 'The IPv4 where the logon", "{ 'doc': 'The SHA512 password hash value.', }), ('hash:lm', ('hash:lm',", "ATT&CK mitigation.', }), ('tag', ('syn:tag', {}), { 'doc': 'The synapse", "noming through the string subs = s_version.parseVersionParts(valu) if subs is", "group.', }), ('posix:gid', ('int', {}), { 'doc': 'The primary group", "the same :exe specified in :proc, if present.', }), ('time',", "ATT&CK software.', 'disp': {'hint': 'text'}, }), ('url', ('inet:url', {}), {", "group.', }), )), ('it:mitre:attack:tactic', {}, ( ('name', ('str', {'strip': True}),", "{'lower': True})))}), { 'doc': 'A signature name within the namespace", "file that contains the function.'}), ('va', ('int', {}), { 'doc':", "wrote to the file. May or may not be the", "of the network flow that caused the hit.'}), ('sensor', ('it:host',", "host.' }), ('latlong', ('geo:latlong', {}), { 'doc': 'The last known", "('time', {}), { 'doc': 'The time the memory map was", "process.', }), ('user', ('inet:user', {}), { 'doc': 'The user name", "credentials that were used for the logon.', }), ('duration', ('duration',", "'A description of the ATT&CK technique.', 'disp': {'hint': 'text'}, }),", "(('file', 'file:bytes'), ('function', 'it:reveng:function'))}), { 'doc': 'An instance of a", "'sorted': True}), { 'doc': 'Observed/variant names for this software version.',", "signature.', }), ('name', ('str', {'lower': True}), { 'ro': True, 'doc':", "string.'}), ('it:os:android:intent', ('str', {}), { 'doc': 'An android intent string.'}),", "}), ('it:app:snort:hit', ('guid', {}), { 'doc': 'An instance of a", "of the function.', }), )), ('it:reveng:funcstr', {}, ( ('function', ('it:reveng:function',", "main process executing code that bound the listening port.', }),", "ATT&CK group.', }), ('tag', ('syn:tag', {}), { 'doc': 'The synapse", "name of the network.', }), ('desc', ('str', {}), { 'doc':", "the host or system.', }), ('desc', ('str', {}), { 'doc':", "{ 'doc': 'The process where the library was loaded.', }),", "( ('name', ('str', {}), { 'doc': 'The name of the", "to calculate the password hash.', }), ('hash:md5', ('hash:md5', {}), {", "'doc': 'The destination port of the flow that caused the", "('file', 'file:bytes'))}), { 'doc': 'A YARA rule match to a", "software is an operating system.'}), ('islib', ('bool', {}), { 'doc':", "served by a host or system.', }), ('it:sec:cve', ('str', {'lower':", "to annotate nodes included in this ATT&CK group ID.', 'ex':", "('name', ('ou:name', {}), { 'doc': 'The primary name for the", "{}), { 'doc': 'The URL that documents the ATT&CK technique.',", "for the host.' }), ('place', ('geo:place', {}), { 'doc': 'The", "arch prop = node.get('arch') if prop: await node.snap.addNode('it:dev:str', prop) async", "the URL retrieval..' }), ('client:ipv6', ('inet:ipv6', {}), { 'doc': 'The", "version parts out of the string') if subs: valu =", "software to a synapse it:prod:soft.', }), ('name', ('str', {'strip': True}),", "True, 'doc': 'The parent directory of the file path (parsed", "self._splitCpe23(text) if len(parts) != 13: mesg = f'CPE 2.3 string", "process.', }), ('exited', ('time', {}), { 'doc': 'The time the", "('int', {}), { 'doc': 'The exit code for the process.',", "('file:bytes', {}), { 'doc': 'The executable file which caused the", "of a host or system.', }), ('it:host', ('guid', {}), {", "('str', {'lower': True, 'strip': True}), { 'doc': 'Name of the", "'doc': 'The main process executing code that deleted data from", "{}, ( ('rule', ('it:app:snort:rule', {}), { 'doc': 'The snort rule", "{}), { 'doc': 'An instance of a host deleting a", "last known location for the host.' }), ('place', ('geo:place', {}),", "('it:dev:int', ('int', {}), { 'doc': 'A developer selected integer constant.',", "{ 'ro': True, 'doc': 'The \"version\" field from the CPE", "of match.'}), )), ('it:app:yara:rule', {}, ( ('text', ('str', {}), {", "of a software product.'}), ('it:prod:softfile', ('comp', {'fields': ( ('soft', 'it:prod:softver'),", "information is disregarded for integer comparison purposes, as we cannot", "'interfaces': ( ('it:host:activity', { 'props': ( ('exe', ('file:bytes', {}), {", "match.'}), )), ('it:reveng:function', {}, ( ('name', ('str', {}), { 'doc':", "True, 'doc': 'The android intent which is listened for by", "= nodes[0].get('name') if name: await node.set('software:name', name) async def _onPropSoftverArch(self,", "Exception: logger.exception('Failed to brute force version string [%s]', prop) def", "True, 'sorted': True}), { 'doc': 'Observed/variant names for this software", "parts[8], 'sw_edition': parts[9], 'target_sw': parts[10], 'target_hw': parts[11], 'other': parts[12], }", "{}, ( ('proc', ('it:exec:proc', {}), { 'doc': 'The file that", "Mitre ATT&CK Software ID.', 'ex': 'S0154', }), ('it:dev:str', ('str', {}),", "Windows Security Identifier of the group.', }), )), ('it:logon', {},", "('str', {'strip': True}), { 'doc': 'The primary name for the", "== ':': parts.append(part) part = '' continue part += c", "('str', {}), { 'doc': 'The name of the snort rule.'}),", "{ 'doc': 'The account that logged in.', }), ('creds', ('auth:creds',", "}), )), ('it:reveng:funcstr', {}, ( ('function', ('it:reveng:function', {}), { 'ro':", "'The org that operates the given domain.', }), )), ('it:network',", "{ 'ctors': ( ('it:semver', 'synapse.models.infotech.SemVer', {}, { 'doc': 'Semantic Version", "that deleted the file. Typically the same host referenced in", "of the domain.', }), ('desc', ('str', {}), { 'doc': 'A", "True if the software is an operating system.'}), ('islib', ('bool',", "('it:exec:proc', {}), { 'doc': 'The file that triggered the signature", "'A memory mapped segment located in a process.', }), ('it:cmd',", "Technique ID.', 'ex': 'T1548', }), ('it:mitre:attack:mitigation', ('str', {'regex': r'^M[0-9]{4}$'}), {", "a file.', }), ('it:app:yara:procmatch', ('guid', {}), { 'doc': 'An instance", "True}), { 'ro': True, 'doc': 'The \"target_hw\" field from the", "('parent', ('it:mitre:attack:technique', {}), { 'doc': 'The parent ATT&CK technique on", "out and available as strings if that information is present.", "to/modified.', }), ('path', ('file:path', {}), { 'doc': 'The path where", "'ro': True, 'doc': 'The \"version\" field from the CPE 2.3", "('inet:email', {}), { 'deprecated': True, 'doc': 'Email address of the", "password hash value.', }), ('passwd', ('inet:passwd', {}), { 'doc': 'The", "host referenced in :proc, if present.', }), ('exe', ('file:bytes', {}),", "'it:dev:str', 'uniq': True}), { 'doc': 'An array of strings referenced", "{ 'doc': 'A URL linking this CWE to a full", "('app', 'it:prod:soft'), ('perm', 'it:os:android:perm'))}), { 'doc': 'The given software requests", "info for the author of the YARA rule.'}), ('version', ('it:semver',", "comparable integer value representing the semver is the bitwise concatenation", "if valu > s_version.mask60: raise s_exc.BadTypeValu(valu=valu, name=self.name, mesg='Cannot norm a", "{ 'doc': 'The time the thread was created.', }), ('exited',", "to the registry.', }), ('host', ('it:host', {}), { 'doc': 'The", "'err'), (60, 'crit'), (70, 'alert'), (80, 'emerg'), ) class ItModule(s_module.CoreModule):", "nodes included in this ATT&CK technique.', 'ex': 'cno.mitre.t0100', }), ('references',", "the server when binding the port.' }), ('server:ipv4', ('inet:ipv4', {}),", "'The exit code or return value for the thread.', }),", "{}), { 'doc': 'The manufacturer of the host.', }), ('model',", "'The given software listens for an android intent.'}), ('it:os:android:ibroadcast', ('comp',", "Check to see if name is available and set it", "authentication domain that the host is a member of.', }),", "{'ro': True, 'doc': 'The library software version.'}), )), ('it:prod:softfile', {},", "to a full description.', }), ('references', ('array', {'type': 'inet:url', 'uniq':", "{ 'props': ( ('exe', ('file:bytes', {}), { 'doc': 'The executable", "{ 'doc': 'An instance of a function in an executable.',", "{}), { 'doc': 'The path to the executable which started", "major, minor, patch = s_version.unpackVersion(valu) valu = s_version.fmtVersion(major, minor, patch)", "{'regex': r'^M[0-9]{4}$'}), { 'doc': 'A Mitre ATT&CK Mitigation ID.', 'ex':", "host or system.' }), ('it:log:event', ('guid', {}), { 'doc': 'A", "used for the logon.', }), ('duration', ('duration', {}), { 'doc':", "GUID representing an individual log event.', 'interfaces': ('it:host:activity',), }), ('it:network',", "'doc': 'The main process executing code that created the mutex.',", "the same :exe referenced in :proc, if present.', }), ('time',", "a file to a filesystem.', }), ('it:exec:reg:get', ('guid', {}), {", "version of the software was released.', }), ('semver', ('it:semver', {}),", "of this ATT&CK technique.', }), ('isnow', ('it:mitre:attack:technique', {}), { 'doc':", "ou:org.', }), ('name', ('ou:name', {}), { 'doc': 'The primary name", "('inet:client', {}), { 'doc': 'The address of the client during", "('str', {'regex': r'^TA[0-9]{4}$'}), { 'doc': 'A Mitre ATT&CK Tactic ID.',", "file.', }), ('path:dir', ('file:path', {}), { 'ro': True, 'doc': 'The", "Common Weaknesses Enumeration Specification', 'ex': 'CWE-120', }), ('it:mitre:attack:status', ('str', {'enums':", "}), ('path:ext', ('str', {'lower': True, 'strip': True}), { 'ro': True,", "requested by the app.'}), )), ('it:prod:softos', {}, ( ('soft', ('it:prod:softver',", "the memory map was deleted.', }), ('path', ('file:path', {}), {", "synapse.lib.types as s_types import synapse.lib.module as s_module import synapse.lib.version as", "for the host.' }), ('latlong', ('geo:latlong', {}), { 'doc': 'The", "or system.', }), ('it:host', ('guid', {}), { 'doc': 'A GUID", "was deleted.', }), ('path', ('file:path', {}), { 'doc': 'The file", "2.3 string.'}), ('product', ('str', {'lower': True, 'strip': True}), { 'ro':", "('host', ('it:host', {}), {'ro': True, 'doc': 'Host with the software.'}),", "sw_edition : target_sw : target_hw : other * = \"any\"", "'inet:url'))}), { 'doc': 'A url hosted on or served by", "True, 'strip': True}), { 'ro': True, 'doc': 'The \"other\" field", "{ 'doc': 'The time the registry was read.', }), ('reg',", "'doc': 'Software on the host.'}) )), ('it:av:sig', {}, ( ('soft',", "port.', }), ('host', ('it:host', {}), { 'doc': 'The host running", "org that owns/operates the network.', }), ('net4', ('inet:net4', {}), {", "'The host running the process that bound the listening port.", "URL that documents the ATT&CK mitigation.', }), ('tag', ('syn:tag', {}),", "{ 'doc': 'The time the named pipe was created.', }),", "True}), { 'doc': 'Observed/variant names for this software.', }), ('desc',", "raw JSON record of the log event.', }), )), ('it:domain',", "with \"cpe:2.3:\"' raise s_exc.BadTypeValu(valu=valu, mesg=mesg) text, info = s_types.Str._normPyStr(self, valu)", "'target_sw': parts[10], 'target_hw': parts[11], 'other': parts[12], } return ':'.join(parts), {'subs':", "or served by a host or system.', }), ('it:sec:cve', ('str',", "a Common Vulnerabilities and Exposures (CVE) number.', 'ex': 'cve-2012-0158' }),", "'it:reveng:function'), ('string', 'str'))}), { 'deprecated': True, 'doc': 'A reference to", "{ 'doc': 'Lower case normalized version of the it:dev:str.', }),", "'doc': 'An instance of a host deleting a registry key.',", "identification string.'}), ('it:os:android:perm', ('str', {}), { 'doc': 'An android permission", "{ 'doc': 'A memory mapped segment located in a process.',", "('it:reveng:funcstr', {}, ( ('function', ('it:reveng:function', {}), { 'ro': True, 'doc':", "{ 'doc': 'An external process which created the thread.', }),", "file was written to/modified.', }), ('path:dir', ('file:path', {}), { 'ro':", "an operating system.'}), ('islib', ('bool', {}), { 'doc': 'Set to", "= s_types.Str._normPyStr(self, valu) parts = self._splitCpe23(text) if len(parts) != 13:", "await node.snap.addNode('it:dev:str', pprop) async def _onPropSoftverSoft(self, node, oldv): # Check", "}), ('it:exec:file:write', ('guid', {}), { 'doc': 'An instance of a", "{}), { 'doc': 'The duration of the logon session.', }),", "string.', }), ('arch', ('it:dev:str', {}), { 'doc': 'Software architecture.', }),", "the same host referenced in :proc, if present.', }), ('exe',", "flow that caused the hit.'}), ('sensor', ('it:host', {}), { 'doc':", "if the value is binary data.', }), )), ('it:prod:soft', {},", "URLs that document the CVE ID.', }), )), ('it:sec:cpe', {},", "('host', ('it:host', {}), { 'doc': 'The host where the group", "YARA rule unique identifier.', }), ('it:app:yara:match', ('comp', {'fields': (('rule', 'it:app:yara:rule'),", "tag used to annotate nodes included in this ATT&CK software.',", "value is a string.', }), ('int', ('it:dev:int', {}), { 'doc':", "the data from the registry was deleted.', }), ('reg', ('it:dev:regval',", "('guid', {}), { 'doc': 'A Windows registry key/value pair.', }),", "{ 'doc': 'The owner of the file.', }), ('group', ('inet:user',", "patch = s_version.unpackVersion(valu) valu = s_version.fmtVersion(major, minor, patch) return valu", "string.', }), )), ('it:exec:pipe', {}, ( ('proc', ('it:exec:proc', {}), {", "{ 'doc': 'The user ID of the account.', 'ex': '1001'," ]
[ "class TestRoutes(unittest.TestCase): \"\"\"Test routes.\"\"\" ignore_routes = ('/static/<path:filename>',) ignore_end_patterns = ('>',)", "\"\"\"Smoke test routes to ensure no runtime errors..\"\"\" routes =", "bool: True if valid, else False. \"\"\" if route in", "up: Put Flask app in test mode.\"\"\" app = create_app()", "\"\"\"Test routes.\"\"\" ignore_routes = ('/static/<path:filename>',) ignore_end_patterns = ('>',) def setUp(self):", "route (str): Route url pattern. Returns: bool: True if valid,", "import create_app class TestRoutes(unittest.TestCase): \"\"\"Test routes.\"\"\" ignore_routes = ('/static/<path:filename>',) ignore_end_patterns", "unittest from copy import copy from webui.app import create_app class", "#!/usr/bin/env python3 # -*- coding: utf-8 -*- \"\"\"Unit tests.\"\"\" import", "test mode.\"\"\" app = create_app() self.initial_app = copy(app) app.testing =", "self.app.get(route) if __name__ == '__main__': from test.utils.doctest_unittest_runner import doctest_unittest_runner TEST_DIR", "no runtime errors..\"\"\" routes = [route.rule for route in self.initial_app.url_map.iter_rules()", "import copy from webui.app import create_app class TestRoutes(unittest.TestCase): \"\"\"Test routes.\"\"\"", "copy import copy from webui.app import create_app class TestRoutes(unittest.TestCase): \"\"\"Test", "Returns: bool: True if valid, else False. \"\"\" if route", "copy(app) app.testing = True self.app = app.test_client() @staticmethod def valid_route(route):", "True self.app = app.test_client() @staticmethod def valid_route(route): \"\"\"Validate route. Args:", "test.utils.doctest_unittest_runner import doctest_unittest_runner TEST_DIR = os.path.dirname(os.path.realpath(__file__)) + '/' doctest_unittest_runner(test_dir=TEST_DIR, relative_path_to_root='../',", "ignore_routes = ('/static/<path:filename>',) ignore_end_patterns = ('>',) def setUp(self): \"\"\"Set up:", "for route in routes: self.app.get(route) if __name__ == '__main__': from", "# -*- coding: utf-8 -*- \"\"\"Unit tests.\"\"\" import os import", "TestRoutes(unittest.TestCase): \"\"\"Test routes.\"\"\" ignore_routes = ('/static/<path:filename>',) ignore_end_patterns = ('>',) def", "from test.utils.doctest_unittest_runner import doctest_unittest_runner TEST_DIR = os.path.dirname(os.path.realpath(__file__)) + '/' doctest_unittest_runner(test_dir=TEST_DIR,", "test_routes(self): \"\"\"Smoke test routes to ensure no runtime errors..\"\"\" routes", "in TestRoutes.ignore_routes \\ or route.endswith(TestRoutes.ignore_end_patterns): return False return True def", "False. \"\"\" if route in TestRoutes.ignore_routes \\ or route.endswith(TestRoutes.ignore_end_patterns): return", "def test_routes(self): \"\"\"Smoke test routes to ensure no runtime errors..\"\"\"", "('>',) def setUp(self): \"\"\"Set up: Put Flask app in test", "url pattern. Returns: bool: True if valid, else False. \"\"\"", "Put Flask app in test mode.\"\"\" app = create_app() self.initial_app", "= ('>',) def setUp(self): \"\"\"Set up: Put Flask app in", "Route url pattern. Returns: bool: True if valid, else False.", "True def test_routes(self): \"\"\"Smoke test routes to ensure no runtime", "tests.\"\"\" import os import unittest from copy import copy from", "if route in TestRoutes.ignore_routes \\ or route.endswith(TestRoutes.ignore_end_patterns): return False return", "route in TestRoutes.ignore_routes \\ or route.endswith(TestRoutes.ignore_end_patterns): return False return True", "route.endswith(TestRoutes.ignore_end_patterns): return False return True def test_routes(self): \"\"\"Smoke test routes", "self.initial_app = copy(app) app.testing = True self.app = app.test_client() @staticmethod", "route in self.initial_app.url_map.iter_rules() if self.valid_route(route.rule)] for route in routes: self.app.get(route)", "valid_route(route): \"\"\"Validate route. Args: route (str): Route url pattern. Returns:", "TestRoutes.ignore_routes \\ or route.endswith(TestRoutes.ignore_end_patterns): return False return True def test_routes(self):", "routes to ensure no runtime errors..\"\"\" routes = [route.rule for", "to ensure no runtime errors..\"\"\" routes = [route.rule for route", "python3 # -*- coding: utf-8 -*- \"\"\"Unit tests.\"\"\" import os", "test routes to ensure no runtime errors..\"\"\" routes = [route.rule", "import unittest from copy import copy from webui.app import create_app", "(str): Route url pattern. Returns: bool: True if valid, else", "errors..\"\"\" routes = [route.rule for route in self.initial_app.url_map.iter_rules() if self.valid_route(route.rule)]", "import doctest_unittest_runner TEST_DIR = os.path.dirname(os.path.realpath(__file__)) + '/' doctest_unittest_runner(test_dir=TEST_DIR, relative_path_to_root='../', package_names=['webui',", "in self.initial_app.url_map.iter_rules() if self.valid_route(route.rule)] for route in routes: self.app.get(route) if", "== '__main__': from test.utils.doctest_unittest_runner import doctest_unittest_runner TEST_DIR = os.path.dirname(os.path.realpath(__file__)) +", "if __name__ == '__main__': from test.utils.doctest_unittest_runner import doctest_unittest_runner TEST_DIR =", "def setUp(self): \"\"\"Set up: Put Flask app in test mode.\"\"\"", "os import unittest from copy import copy from webui.app import", "True if valid, else False. \"\"\" if route in TestRoutes.ignore_routes", "create_app class TestRoutes(unittest.TestCase): \"\"\"Test routes.\"\"\" ignore_routes = ('/static/<path:filename>',) ignore_end_patterns =", "or route.endswith(TestRoutes.ignore_end_patterns): return False return True def test_routes(self): \"\"\"Smoke test", "ignore_end_patterns = ('>',) def setUp(self): \"\"\"Set up: Put Flask app", "self.initial_app.url_map.iter_rules() if self.valid_route(route.rule)] for route in routes: self.app.get(route) if __name__", "-*- \"\"\"Unit tests.\"\"\" import os import unittest from copy import", "app.testing = True self.app = app.test_client() @staticmethod def valid_route(route): \"\"\"Validate", "self.app = app.test_client() @staticmethod def valid_route(route): \"\"\"Validate route. Args: route", "route. Args: route (str): Route url pattern. Returns: bool: True", "\"\"\"Set up: Put Flask app in test mode.\"\"\" app =", "from copy import copy from webui.app import create_app class TestRoutes(unittest.TestCase):", "valid, else False. \"\"\" if route in TestRoutes.ignore_routes \\ or", "\"\"\"Validate route. Args: route (str): Route url pattern. Returns: bool:", "\"\"\"Unit tests.\"\"\" import os import unittest from copy import copy", "('/static/<path:filename>',) ignore_end_patterns = ('>',) def setUp(self): \"\"\"Set up: Put Flask", "from webui.app import create_app class TestRoutes(unittest.TestCase): \"\"\"Test routes.\"\"\" ignore_routes =", "route in routes: self.app.get(route) if __name__ == '__main__': from test.utils.doctest_unittest_runner", "Args: route (str): Route url pattern. Returns: bool: True if", "= [route.rule for route in self.initial_app.url_map.iter_rules() if self.valid_route(route.rule)] for route", "routes: self.app.get(route) if __name__ == '__main__': from test.utils.doctest_unittest_runner import doctest_unittest_runner", "create_app() self.initial_app = copy(app) app.testing = True self.app = app.test_client()", "= ('/static/<path:filename>',) ignore_end_patterns = ('>',) def setUp(self): \"\"\"Set up: Put", "[route.rule for route in self.initial_app.url_map.iter_rules() if self.valid_route(route.rule)] for route in", "return True def test_routes(self): \"\"\"Smoke test routes to ensure no", "in test mode.\"\"\" app = create_app() self.initial_app = copy(app) app.testing", "= app.test_client() @staticmethod def valid_route(route): \"\"\"Validate route. Args: route (str):", "if valid, else False. \"\"\" if route in TestRoutes.ignore_routes \\", "__name__ == '__main__': from test.utils.doctest_unittest_runner import doctest_unittest_runner TEST_DIR = os.path.dirname(os.path.realpath(__file__))", "= True self.app = app.test_client() @staticmethod def valid_route(route): \"\"\"Validate route.", "in routes: self.app.get(route) if __name__ == '__main__': from test.utils.doctest_unittest_runner import", "app.test_client() @staticmethod def valid_route(route): \"\"\"Validate route. Args: route (str): Route", "else False. \"\"\" if route in TestRoutes.ignore_routes \\ or route.endswith(TestRoutes.ignore_end_patterns):", "= copy(app) app.testing = True self.app = app.test_client() @staticmethod def", "setUp(self): \"\"\"Set up: Put Flask app in test mode.\"\"\" app", "copy from webui.app import create_app class TestRoutes(unittest.TestCase): \"\"\"Test routes.\"\"\" ignore_routes", "app = create_app() self.initial_app = copy(app) app.testing = True self.app", "'__main__': from test.utils.doctest_unittest_runner import doctest_unittest_runner TEST_DIR = os.path.dirname(os.path.realpath(__file__)) + '/'", "\\ or route.endswith(TestRoutes.ignore_end_patterns): return False return True def test_routes(self): \"\"\"Smoke", "return False return True def test_routes(self): \"\"\"Smoke test routes to", "= create_app() self.initial_app = copy(app) app.testing = True self.app =", "\"\"\" if route in TestRoutes.ignore_routes \\ or route.endswith(TestRoutes.ignore_end_patterns): return False", "routes.\"\"\" ignore_routes = ('/static/<path:filename>',) ignore_end_patterns = ('>',) def setUp(self): \"\"\"Set", "webui.app import create_app class TestRoutes(unittest.TestCase): \"\"\"Test routes.\"\"\" ignore_routes = ('/static/<path:filename>',)", "app in test mode.\"\"\" app = create_app() self.initial_app = copy(app)", "import os import unittest from copy import copy from webui.app", "coding: utf-8 -*- \"\"\"Unit tests.\"\"\" import os import unittest from", "mode.\"\"\" app = create_app() self.initial_app = copy(app) app.testing = True", "@staticmethod def valid_route(route): \"\"\"Validate route. Args: route (str): Route url", "utf-8 -*- \"\"\"Unit tests.\"\"\" import os import unittest from copy", "doctest_unittest_runner TEST_DIR = os.path.dirname(os.path.realpath(__file__)) + '/' doctest_unittest_runner(test_dir=TEST_DIR, relative_path_to_root='../', package_names=['webui', 'test'])", "runtime errors..\"\"\" routes = [route.rule for route in self.initial_app.url_map.iter_rules() if", "routes = [route.rule for route in self.initial_app.url_map.iter_rules() if self.valid_route(route.rule)] for", "def valid_route(route): \"\"\"Validate route. Args: route (str): Route url pattern.", "self.valid_route(route.rule)] for route in routes: self.app.get(route) if __name__ == '__main__':", "Flask app in test mode.\"\"\" app = create_app() self.initial_app =", "if self.valid_route(route.rule)] for route in routes: self.app.get(route) if __name__ ==", "-*- coding: utf-8 -*- \"\"\"Unit tests.\"\"\" import os import unittest", "ensure no runtime errors..\"\"\" routes = [route.rule for route in", "for route in self.initial_app.url_map.iter_rules() if self.valid_route(route.rule)] for route in routes:", "False return True def test_routes(self): \"\"\"Smoke test routes to ensure", "pattern. Returns: bool: True if valid, else False. \"\"\" if" ]
[ "currentDirectories = line.split(os.sep) if directory in currentDirectories: found = True", "format on 'f' file.\"\"\" return subprocess.check_output(['clang-format', '-style=file', f]) def test_clang_format_is_correctly_installed(self):", "'include/controller', 'projects', 'resources/projects', 'resources/wren/shaders', 'tests', 'include/wren', 'src/controller/c', 'src/controller/cpp', 'src/license/sign', 'src/webots',", "clangFormatConfigFile = self.WEBOTS_HOME + os.sep + '.clang-format' self.assertTrue( os.path.exists(clangFormatConfigFile), msg=clangFormatConfigFile", "True break if not skipFile: sources.append(path) curdir = os.getcwd() os.chdir(self.WEBOTS_HOME)", "2.0 (the \"License\"); # you may not use this file", "shouldContinue = False for path in skippedPaths: if rootPath.startswith(self.WEBOTS_HOME +", "path in skippedPaths: if rootPath.startswith(self.WEBOTS_HOME + os.sep + path.replace('/', os.sep)):", "subprocess.check_output(['clang-format', '-style=file', f]) def test_clang_format_is_correctly_installed(self): \"\"\"Test ClangFormat is correctly installed.\"\"\"", "found: continue for directory in skippedDirectories: currentDirectories = line.split(os.sep) if", "open(source, encoding='utf8') as file: try: for line in difflib.context_diff(self._runClangFormat(source).decode('utf-8').splitlines(), file.read().splitlines()):", "with open(source, encoding='utf8') as file: try: for line in difflib.context_diff(self._runClangFormat(source).decode('utf-8').splitlines(),", "msg=clangFormatConfigFile + ' not found.' ) def test_sources_are_clang_format_compliant(self): \"\"\"Test that", "for line in file: line = line.strip() extension = os.path.splitext(line)[1][1:].lower()", "\"\"\"Set up called before each test.\"\"\" self.WEBOTS_HOME = os.environ['WEBOTS_HOME'] def", "if line.startswith(directory): found = True break if not found: continue", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "python # Copyright 1996-2021 Cyberbotics Ltd. # # Licensed under", "up called before each test.\"\"\" self.WEBOTS_HOME = os.environ['WEBOTS_HOME'] def _runClangFormat(self,", "rootPath, dirNames, fileNames in os.walk(path): shouldContinue = False for path", "\"\"\"Test that sources are ClangFormat compliant.\"\"\" directories = [ 'include/controller',", "os.path.splitext(line)[1][1:].lower() if extension not in extensions: continue found = False", "code is compliant with ClangFormat.\"\"\" import unittest import difflib import", "shouldContinue: continue for fileName in fileNames: extension = os.path.splitext(fileName)[1][1:].lower() if", "break if shouldContinue: continue for fileName in fileNames: extension =", "fileName in fileNames: extension = os.path.splitext(fileName)[1][1:].lower() if extension not in", "'projects/robots/robotis/darwin-op/libraries/robotis-op2/robotis', 'projects/robots/robotis/darwin-op/remote_control/libjpeg-turbo', 'projects/vehicles/controllers/ros_automobile/include', 'src/webots/external' ] skippedFiles = [ 'projects/robots/robotis/darwin-op/plugins/remote_controls/robotis-op2_tcpip/stb_image.h' ]", "sources.append(path) curdir = os.getcwd() os.chdir(self.WEBOTS_HOME) for source in sources: diff", "+ os.sep + directory.replace('/', os.sep) for rootPath, dirNames, fileNames in", "\"\"\"Test ClangFormat is correctly installed.\"\"\" self.assertTrue( find_executable('clang-format') is not None,", "ClangFormat.\"\"\" import unittest import difflib import os import subprocess from", "] extensions = ['c', 'h', 'cpp', 'hpp', 'cc', 'hh', 'c++',", "continue sources.append(line.replace('/', os.sep)) else: for directory in directories: path =", "directories = [ 'include/controller', 'projects', 'resources/projects', 'resources/wren/shaders', 'tests', 'include/wren', 'src/controller/c',", "Cyberbotics Ltd. # # Licensed under the Apache License, Version", "file: for line in file: line = line.strip() extension =", "use this file except in compliance with the License. #", "False for directory in directories: if line.startswith(directory): found = True", "problem in %s' % source) self.assertTrue( len(diff) == 0, msg='Source", "os.sep + '.clang-format' self.assertTrue( os.path.exists(clangFormatConfigFile), msg=clangFormatConfigFile + ' not found.'", "if os.path.isfile(modified_files): with open(modified_files, 'r') as file: for line in", "] skippedFiles = [ 'projects/robots/robotis/darwin-op/plugins/remote_controls/robotis-op2_tcpip/stb_image.h' ] skippedDirectories = [ 'build',", "for file in skippedFiles: if os.path.normpath((self.WEBOTS_HOME + os.sep + file.replace('/',", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "License. # You may obtain a copy of the License", "import unittest import difflib import os import subprocess from io", "os.sep))) == path: skipFile = True break if not skipFile:", "line in file: line = line.strip() extension = os.path.splitext(line)[1][1:].lower() if", "under the License is distributed on an \"AS IS\" BASIS,", "License for the specific language governing permissions and # limitations", "distutils.spawn import find_executable class TestClangFormat(unittest.TestCase): \"\"\"Unit test for ClangFormat compliance.\"\"\"", "'projects/robots/gctronic/e-puck/transfer', 'projects/robots/mobsya/thymio/controllers/thymio2_aseba/aseba', 'projects/robots/mobsya/thymio/libraries/dashel', 'projects/robots/mobsya/thymio/libraries/dashel-src', 'projects/robots/robotis/darwin-op/libraries/libssh', 'projects/robots/robotis/darwin-op/libraries/libzip', 'projects/robots/robotis/darwin-op/libraries/robotis-op2/robotis', 'projects/robots/robotis/darwin-op/remote_control/libjpeg-turbo', 'projects/vehicles/controllers/ros_automobile/include', 'src/webots/external'", "not compliant with ClangFormat:\\n\\nDIFF:%s' % (source, diff) ) os.chdir(curdir) if", "permissions and # limitations under the License. \"\"\"Test that the", "= False for directory in skippedPaths + skippedFiles: if line.startswith(directory):", "for directory in skippedPaths + skippedFiles: if line.startswith(directory): found =", "if rootPath.startswith(self.WEBOTS_HOME + os.sep + path.replace('/', os.sep)): shouldContinue = True", "% source) self.assertTrue( len(diff) == 0, msg='Source file \"%s\" is", "+ '.clang-format' self.assertTrue( os.path.exists(clangFormatConfigFile), msg=clangFormatConfigFile + ' not found.' )", "skipFile = False for file in skippedFiles: if os.path.normpath((self.WEBOTS_HOME +", "in compliance with the License. # You may obtain a", "software # distributed under the License is distributed on an", "'frag'] modified_files = os.path.join(self.WEBOTS_HOME, 'tests', 'sources', 'modified_files.txt') sources = []", "os.walk(path): shouldContinue = False for path in skippedPaths: if rootPath.startswith(self.WEBOTS_HOME", "clang format on 'f' file.\"\"\" return subprocess.check_output(['clang-format', '-style=file', f]) def", "'r') as file: for line in file: line = line.strip()", "found = False for directory in skippedPaths + skippedFiles: if", "is not compliant with ClangFormat:\\n\\nDIFF:%s' % (source, diff) ) os.chdir(curdir)", "in currentDirectories: shouldContinue = True break if shouldContinue: continue for", "installed on this computer.' ) clangFormatConfigFile = self.WEBOTS_HOME + os.sep", "'sources', 'modified_files.txt') sources = [] if os.path.isfile(modified_files): with open(modified_files, 'r')", "difflib import os import subprocess from io import open from", "continue found = False for directory in directories: if line.startswith(directory):", "except UnicodeDecodeError: self.assertTrue(False, msg='utf-8 decode problem in %s' % source)", "if shouldContinue: continue for fileName in fileNames: extension = os.path.splitext(fileName)[1][1:].lower()", "directory in skippedDirectories: currentDirectories = line.split(os.sep) if directory in currentDirectories:", "shader source code is compliant with ClangFormat.\"\"\" import unittest import", "_runClangFormat(self, f): \"\"\"Run clang format on 'f' file.\"\"\" return subprocess.check_output(['clang-format',", "0, msg='Source file \"%s\" is not compliant with ClangFormat:\\n\\nDIFF:%s' %", "if not found: continue found = False for directory in", "= [] if os.path.isfile(modified_files): with open(modified_files, 'r') as file: for", "test_clang_format_is_correctly_installed(self): \"\"\"Test ClangFormat is correctly installed.\"\"\" self.assertTrue( find_executable('clang-format') is not", "line = line.strip() extension = os.path.splitext(line)[1][1:].lower() if extension not in", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "f): \"\"\"Run clang format on 'f' file.\"\"\" return subprocess.check_output(['clang-format', '-style=file',", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "directory.replace('/', os.sep) for rootPath, dirNames, fileNames in os.walk(path): shouldContinue =", "extension = os.path.splitext(fileName)[1][1:].lower() if extension not in extensions: continue path", "to in writing, software # distributed under the License is", "currentDirectories: found = True if found: continue sources.append(line.replace('/', os.sep)) else:", "try: for line in difflib.context_diff(self._runClangFormat(source).decode('utf-8').splitlines(), file.read().splitlines()): diff += line +", "# See the License for the specific language governing permissions", "= [ 'build', 'python', 'java' ] extensions = ['c', 'h',", "limitations under the License. \"\"\"Test that the C, C++ and", "or agreed to in writing, software # distributed under the", "os.path.isfile(modified_files): with open(modified_files, 'r') as file: for line in file:", "required by applicable law or agreed to in writing, software", "'projects', 'resources/projects', 'resources/wren/shaders', 'tests', 'include/wren', 'src/controller/c', 'src/controller/cpp', 'src/license/sign', 'src/webots', 'src/wren'", "found = True break if not found: continue found =", "compliant.\"\"\" directories = [ 'include/controller', 'projects', 'resources/projects', 'resources/wren/shaders', 'tests', 'include/wren',", "in skippedPaths + skippedFiles: if line.startswith(directory): found = True break", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "with the License. # You may obtain a copy of", "decode problem in %s' % source) self.assertTrue( len(diff) == 0,", "extensions: continue found = False for directory in directories: if", "subprocess from io import open from distutils.spawn import find_executable class", "line.strip() extension = os.path.splitext(line)[1][1:].lower() if extension not in extensions: continue", "= True break if not found: continue found = False", "[ 'projects/default/controllers/ros/include', 'projects/robots/gctronic/e-puck/transfer', 'projects/robots/mobsya/thymio/controllers/thymio2_aseba/aseba', 'projects/robots/mobsya/thymio/libraries/dashel', 'projects/robots/mobsya/thymio/libraries/dashel-src', 'projects/robots/robotis/darwin-op/libraries/libssh', 'projects/robots/robotis/darwin-op/libraries/libzip', 'projects/robots/robotis/darwin-op/libraries/robotis-op2/robotis', 'projects/robots/robotis/darwin-op/remote_control/libjpeg-turbo',", "= self.WEBOTS_HOME + os.sep + '.clang-format' self.assertTrue( os.path.exists(clangFormatConfigFile), msg=clangFormatConfigFile +", "compliance with the License. # You may obtain a copy", "def _runClangFormat(self, f): \"\"\"Run clang format on 'f' file.\"\"\" return", "agreed to in writing, software # distributed under the License", "os.sep + directory.replace('/', os.sep) for rootPath, dirNames, fileNames in os.walk(path):", "+ os.sep + file.replace('/', os.sep))) == path: skipFile = True", "distributed under the License is distributed on an \"AS IS\"", "def test_sources_are_clang_format_compliant(self): \"\"\"Test that sources are ClangFormat compliant.\"\"\" directories =", "in difflib.context_diff(self._runClangFormat(source).decode('utf-8').splitlines(), file.read().splitlines()): diff += line + '\\n' except UnicodeDecodeError:", "'src/license/sign', 'src/webots', 'src/wren' ] skippedPaths = [ 'projects/default/controllers/ros/include', 'projects/robots/gctronic/e-puck/transfer', 'projects/robots/mobsya/thymio/controllers/thymio2_aseba/aseba',", "'src/wren' ] skippedPaths = [ 'projects/default/controllers/ros/include', 'projects/robots/gctronic/e-puck/transfer', 'projects/robots/mobsya/thymio/controllers/thymio2_aseba/aseba', 'projects/robots/mobsya/thymio/libraries/dashel', 'projects/robots/mobsya/thymio/libraries/dashel-src',", "% (source, diff) ) os.chdir(curdir) if __name__ == '__main__': unittest.main()", "not skipFile: sources.append(path) curdir = os.getcwd() os.chdir(self.WEBOTS_HOME) for source in", "express or implied. # See the License for the specific", "except in compliance with the License. # You may obtain", "file: try: for line in difflib.context_diff(self._runClangFormat(source).decode('utf-8').splitlines(), file.read().splitlines()): diff += line", "rootPath.startswith(self.WEBOTS_HOME + os.sep + path.replace('/', os.sep)): shouldContinue = True break", "in file: line = line.strip() extension = os.path.splitext(line)[1][1:].lower() if extension", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "'projects/default/controllers/ros/include', 'projects/robots/gctronic/e-puck/transfer', 'projects/robots/mobsya/thymio/controllers/thymio2_aseba/aseba', 'projects/robots/mobsya/thymio/libraries/dashel', 'projects/robots/mobsya/thymio/libraries/dashel-src', 'projects/robots/robotis/darwin-op/libraries/libssh', 'projects/robots/robotis/darwin-op/libraries/libzip', 'projects/robots/robotis/darwin-op/libraries/robotis-op2/robotis', 'projects/robots/robotis/darwin-op/remote_control/libjpeg-turbo', 'projects/vehicles/controllers/ros_automobile/include',", "not use this file except in compliance with the License.", "+ path.replace('/', os.sep)): shouldContinue = True break for directory in", "import find_executable class TestClangFormat(unittest.TestCase): \"\"\"Unit test for ClangFormat compliance.\"\"\" def", "not None, msg='ClangFormat is not installed on this computer.' )", "writing, software # distributed under the License is distributed on", "currentDirectories = rootPath.replace(self.WEBOTS_HOME, '').split(os.sep) if directory in currentDirectories: shouldContinue =", "for fileName in fileNames: extension = os.path.splitext(fileName)[1][1:].lower() if extension not", "Copyright 1996-2021 Cyberbotics Ltd. # # Licensed under the Apache", "UnicodeDecodeError: self.assertTrue(False, msg='utf-8 decode problem in %s' % source) self.assertTrue(", "you may not use this file except in compliance with", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "compliance.\"\"\" def setUp(self): \"\"\"Set up called before each test.\"\"\" self.WEBOTS_HOME", "skippedDirectories = [ 'build', 'python', 'java' ] extensions = ['c',", "with ClangFormat.\"\"\" import unittest import difflib import os import subprocess", "'h', 'cpp', 'hpp', 'cc', 'hh', 'c++', 'h++', 'vert', 'frag'] modified_files", "if extension not in extensions: continue path = os.path.normpath(os.path.join(rootPath, fileName))", "CONDITIONS OF ANY KIND, either express or implied. # See", "found: continue sources.append(line.replace('/', os.sep)) else: for directory in directories: path", "test_sources_are_clang_format_compliant(self): \"\"\"Test that sources are ClangFormat compliant.\"\"\" directories = [", "os.sep)) else: for directory in directories: path = self.WEBOTS_HOME +", "False for path in skippedPaths: if rootPath.startswith(self.WEBOTS_HOME + os.sep +", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "class TestClangFormat(unittest.TestCase): \"\"\"Unit test for ClangFormat compliance.\"\"\" def setUp(self): \"\"\"Set", "= [ 'include/controller', 'projects', 'resources/projects', 'resources/wren/shaders', 'tests', 'include/wren', 'src/controller/c', 'src/controller/cpp',", "test for ClangFormat compliance.\"\"\" def setUp(self): \"\"\"Set up called before", "os.getcwd() os.chdir(self.WEBOTS_HOME) for source in sources: diff = '' with", "is not installed on this computer.' ) clangFormatConfigFile = self.WEBOTS_HOME", "skippedFiles: if line.startswith(directory): found = True break if found: continue", "not found: continue found = False for directory in skippedPaths", "self.assertTrue( os.path.exists(clangFormatConfigFile), msg=clangFormatConfigFile + ' not found.' ) def test_sources_are_clang_format_compliant(self):", "in extensions: continue path = os.path.normpath(os.path.join(rootPath, fileName)) skipFile = False", "in fileNames: extension = os.path.splitext(fileName)[1][1:].lower() if extension not in extensions:", "= True if found: continue sources.append(line.replace('/', os.sep)) else: for directory", "io import open from distutils.spawn import find_executable class TestClangFormat(unittest.TestCase): \"\"\"Unit", "def setUp(self): \"\"\"Set up called before each test.\"\"\" self.WEBOTS_HOME =", "os.chdir(self.WEBOTS_HOME) for source in sources: diff = '' with open(source,", "= os.getcwd() os.chdir(self.WEBOTS_HOME) for source in sources: diff = ''", "OR CONDITIONS OF ANY KIND, either express or implied. #", "'').split(os.sep) if directory in currentDirectories: shouldContinue = True break if", "directory in currentDirectories: shouldContinue = True break if shouldContinue: continue", "C++ and shader source code is compliant with ClangFormat.\"\"\" import", "the License is distributed on an \"AS IS\" BASIS, #", "'java' ] extensions = ['c', 'h', 'cpp', 'hpp', 'cc', 'hh',", "path.replace('/', os.sep)): shouldContinue = True break for directory in skippedDirectories:", "encoding='utf8') as file: try: for line in difflib.context_diff(self._runClangFormat(source).decode('utf-8').splitlines(), file.read().splitlines()): diff", "'projects/robots/robotis/darwin-op/plugins/remote_controls/robotis-op2_tcpip/stb_image.h' ] skippedDirectories = [ 'build', 'python', 'java' ] extensions", "'h++', 'vert', 'frag'] modified_files = os.path.join(self.WEBOTS_HOME, 'tests', 'sources', 'modified_files.txt') sources", "open from distutils.spawn import find_executable class TestClangFormat(unittest.TestCase): \"\"\"Unit test for", "= [ 'projects/default/controllers/ros/include', 'projects/robots/gctronic/e-puck/transfer', 'projects/robots/mobsya/thymio/controllers/thymio2_aseba/aseba', 'projects/robots/mobsya/thymio/libraries/dashel', 'projects/robots/mobsya/thymio/libraries/dashel-src', 'projects/robots/robotis/darwin-op/libraries/libssh', 'projects/robots/robotis/darwin-op/libraries/libzip', 'projects/robots/robotis/darwin-op/libraries/robotis-op2/robotis',", "[] if os.path.isfile(modified_files): with open(modified_files, 'r') as file: for line", "fileName)) skipFile = False for file in skippedFiles: if os.path.normpath((self.WEBOTS_HOME", "and shader source code is compliant with ClangFormat.\"\"\" import unittest", "skipFile = True break if not skipFile: sources.append(path) curdir =", "directory in directories: if line.startswith(directory): found = True break if", "extensions = ['c', 'h', 'cpp', 'hpp', 'cc', 'hh', 'c++', 'h++',", "file.read().splitlines()): diff += line + '\\n' except UnicodeDecodeError: self.assertTrue(False, msg='utf-8", "file in skippedFiles: if os.path.normpath((self.WEBOTS_HOME + os.sep + file.replace('/', os.sep)))", "os.sep) for rootPath, dirNames, fileNames in os.walk(path): shouldContinue = False", "1996-2021 Cyberbotics Ltd. # # Licensed under the Apache License,", "modified_files = os.path.join(self.WEBOTS_HOME, 'tests', 'sources', 'modified_files.txt') sources = [] if", "governing permissions and # limitations under the License. \"\"\"Test that", "extension = os.path.splitext(line)[1][1:].lower() if extension not in extensions: continue found", "as file: try: for line in difflib.context_diff(self._runClangFormat(source).decode('utf-8').splitlines(), file.read().splitlines()): diff +=", "fileNames in os.walk(path): shouldContinue = False for path in skippedPaths:", "os.environ['WEBOTS_HOME'] def _runClangFormat(self, f): \"\"\"Run clang format on 'f' file.\"\"\"", "skippedPaths + skippedFiles: if line.startswith(directory): found = True break if", "with ClangFormat:\\n\\nDIFF:%s' % (source, diff) ) os.chdir(curdir) if __name__ ==", "law or agreed to in writing, software # distributed under", "each test.\"\"\" self.WEBOTS_HOME = os.environ['WEBOTS_HOME'] def _runClangFormat(self, f): \"\"\"Run clang", "not installed on this computer.' ) clangFormatConfigFile = self.WEBOTS_HOME +", "'projects/robots/robotis/darwin-op/libraries/libssh', 'projects/robots/robotis/darwin-op/libraries/libzip', 'projects/robots/robotis/darwin-op/libraries/robotis-op2/robotis', 'projects/robots/robotis/darwin-op/remote_control/libjpeg-turbo', 'projects/vehicles/controllers/ros_automobile/include', 'src/webots/external' ] skippedFiles = [", "not in extensions: continue path = os.path.normpath(os.path.join(rootPath, fileName)) skipFile =", "'resources/wren/shaders', 'tests', 'include/wren', 'src/controller/c', 'src/controller/cpp', 'src/license/sign', 'src/webots', 'src/wren' ] skippedPaths", "== path: skipFile = True break if not skipFile: sources.append(path)", "= '' with open(source, encoding='utf8') as file: try: for line", "msg='utf-8 decode problem in %s' % source) self.assertTrue( len(diff) ==", "\"%s\" is not compliant with ClangFormat:\\n\\nDIFF:%s' % (source, diff) )", "C, C++ and shader source code is compliant with ClangFormat.\"\"\"", "test.\"\"\" self.WEBOTS_HOME = os.environ['WEBOTS_HOME'] def _runClangFormat(self, f): \"\"\"Run clang format", "self.assertTrue( len(diff) == 0, msg='Source file \"%s\" is not compliant", "True break if shouldContinue: continue for fileName in fileNames: extension", "'tests', 'sources', 'modified_files.txt') sources = [] if os.path.isfile(modified_files): with open(modified_files,", "line in difflib.context_diff(self._runClangFormat(source).decode('utf-8').splitlines(), file.read().splitlines()): diff += line + '\\n' except", "= self.WEBOTS_HOME + os.sep + directory.replace('/', os.sep) for rootPath, dirNames,", "fileNames: extension = os.path.splitext(fileName)[1][1:].lower() if extension not in extensions: continue", "ClangFormat compliant.\"\"\" directories = [ 'include/controller', 'projects', 'resources/projects', 'resources/wren/shaders', 'tests',", "extension not in extensions: continue path = os.path.normpath(os.path.join(rootPath, fileName)) skipFile", "if found: continue for directory in skippedDirectories: currentDirectories = line.split(os.sep)", "continue path = os.path.normpath(os.path.join(rootPath, fileName)) skipFile = False for file", "+ skippedFiles: if line.startswith(directory): found = True break if found:", "['c', 'h', 'cpp', 'hpp', 'cc', 'hh', 'c++', 'h++', 'vert', 'frag']", "may obtain a copy of the License at # #", "line.startswith(directory): found = True break if found: continue for directory", "skippedDirectories: currentDirectories = rootPath.replace(self.WEBOTS_HOME, '').split(os.sep) if directory in currentDirectories: shouldContinue", "= os.path.normpath(os.path.join(rootPath, fileName)) skipFile = False for file in skippedFiles:", "= os.environ['WEBOTS_HOME'] def _runClangFormat(self, f): \"\"\"Run clang format on 'f'", "+ os.sep + '.clang-format' self.assertTrue( os.path.exists(clangFormatConfigFile), msg=clangFormatConfigFile + ' not", "skippedPaths = [ 'projects/default/controllers/ros/include', 'projects/robots/gctronic/e-puck/transfer', 'projects/robots/mobsya/thymio/controllers/thymio2_aseba/aseba', 'projects/robots/mobsya/thymio/libraries/dashel', 'projects/robots/mobsya/thymio/libraries/dashel-src', 'projects/robots/robotis/darwin-op/libraries/libssh', 'projects/robots/robotis/darwin-op/libraries/libzip',", "+ os.sep + path.replace('/', os.sep)): shouldContinue = True break for", ") clangFormatConfigFile = self.WEBOTS_HOME + os.sep + '.clang-format' self.assertTrue( os.path.exists(clangFormatConfigFile),", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "line.startswith(directory): found = True break if not found: continue found", "is not None, msg='ClangFormat is not installed on this computer.'", "'src/controller/c', 'src/controller/cpp', 'src/license/sign', 'src/webots', 'src/wren' ] skippedPaths = [ 'projects/default/controllers/ros/include',", "may not use this file except in compliance with the", "= True break if found: continue for directory in skippedDirectories:", "skippedFiles = [ 'projects/robots/robotis/darwin-op/plugins/remote_controls/robotis-op2_tcpip/stb_image.h' ] skippedDirectories = [ 'build', 'python',", "found = True break if found: continue for directory in", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "os.sep + file.replace('/', os.sep))) == path: skipFile = True break", "this file except in compliance with the License. # You", "= os.path.splitext(line)[1][1:].lower() if extension not in extensions: continue found =", "= line.strip() extension = os.path.splitext(line)[1][1:].lower() if extension not in extensions:", "found = False for directory in directories: if line.startswith(directory): found", "diff += line + '\\n' except UnicodeDecodeError: self.assertTrue(False, msg='utf-8 decode", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "# # Licensed under the Apache License, Version 2.0 (the", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "sources = [] if os.path.isfile(modified_files): with open(modified_files, 'r') as file:", "line.split(os.sep) if directory in currentDirectories: found = True if found:", "for source in sources: diff = '' with open(source, encoding='utf8')", "called before each test.\"\"\" self.WEBOTS_HOME = os.environ['WEBOTS_HOME'] def _runClangFormat(self, f):", "import difflib import os import subprocess from io import open", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "sources are ClangFormat compliant.\"\"\" directories = [ 'include/controller', 'projects', 'resources/projects',", "in extensions: continue found = False for directory in directories:", "in directories: path = self.WEBOTS_HOME + os.sep + directory.replace('/', os.sep)", "os import subprocess from io import open from distutils.spawn import", "os.path.exists(clangFormatConfigFile), msg=clangFormatConfigFile + ' not found.' ) def test_sources_are_clang_format_compliant(self): \"\"\"Test", "True if found: continue sources.append(line.replace('/', os.sep)) else: for directory in", "is compliant with ClangFormat.\"\"\" import unittest import difflib import os", "skippedPaths: if rootPath.startswith(self.WEBOTS_HOME + os.sep + path.replace('/', os.sep)): shouldContinue =", "def test_clang_format_is_correctly_installed(self): \"\"\"Test ClangFormat is correctly installed.\"\"\" self.assertTrue( find_executable('clang-format') is", "True break if found: continue for directory in skippedDirectories: currentDirectories", "source in sources: diff = '' with open(source, encoding='utf8') as", "Ltd. # # Licensed under the Apache License, Version 2.0", "open(modified_files, 'r') as file: for line in file: line =", "= True break if not skipFile: sources.append(path) curdir = os.getcwd()", "'src/webots/external' ] skippedFiles = [ 'projects/robots/robotis/darwin-op/plugins/remote_controls/robotis-op2_tcpip/stb_image.h' ] skippedDirectories = [", "ClangFormat is correctly installed.\"\"\" self.assertTrue( find_executable('clang-format') is not None, msg='ClangFormat", "'f' file.\"\"\" return subprocess.check_output(['clang-format', '-style=file', f]) def test_clang_format_is_correctly_installed(self): \"\"\"Test ClangFormat", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "'c++', 'h++', 'vert', 'frag'] modified_files = os.path.join(self.WEBOTS_HOME, 'tests', 'sources', 'modified_files.txt')", "if directory in currentDirectories: shouldContinue = True break if shouldContinue:", "self.assertTrue(False, msg='utf-8 decode problem in %s' % source) self.assertTrue( len(diff)", "os.path.splitext(fileName)[1][1:].lower() if extension not in extensions: continue path = os.path.normpath(os.path.join(rootPath,", "that sources are ClangFormat compliant.\"\"\" directories = [ 'include/controller', 'projects',", "sources.append(line.replace('/', os.sep)) else: for directory in directories: path = self.WEBOTS_HOME", "False for directory in skippedPaths + skippedFiles: if line.startswith(directory): found", "or implied. # See the License for the specific language", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "= False for file in skippedFiles: if os.path.normpath((self.WEBOTS_HOME + os.sep", "source) self.assertTrue( len(diff) == 0, msg='Source file \"%s\" is not", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "os.path.normpath((self.WEBOTS_HOME + os.sep + file.replace('/', os.sep))) == path: skipFile =", "extension not in extensions: continue found = False for directory", "'projects/robots/robotis/darwin-op/libraries/libzip', 'projects/robots/robotis/darwin-op/libraries/robotis-op2/robotis', 'projects/robots/robotis/darwin-op/remote_control/libjpeg-turbo', 'projects/vehicles/controllers/ros_automobile/include', 'src/webots/external' ] skippedFiles = [ 'projects/robots/robotis/darwin-op/plugins/remote_controls/robotis-op2_tcpip/stb_image.h'", "break if not skipFile: sources.append(path) curdir = os.getcwd() os.chdir(self.WEBOTS_HOME) for", "'include/wren', 'src/controller/c', 'src/controller/cpp', 'src/license/sign', 'src/webots', 'src/wren' ] skippedPaths = [", "# Copyright 1996-2021 Cyberbotics Ltd. # # Licensed under the", "(the \"License\"); # you may not use this file except", "# you may not use this file except in compliance", "under the License. \"\"\"Test that the C, C++ and shader", "directories: path = self.WEBOTS_HOME + os.sep + directory.replace('/', os.sep) for", "correctly installed.\"\"\" self.assertTrue( find_executable('clang-format') is not None, msg='ClangFormat is not", "import open from distutils.spawn import find_executable class TestClangFormat(unittest.TestCase): \"\"\"Unit test", "'projects/vehicles/controllers/ros_automobile/include', 'src/webots/external' ] skippedFiles = [ 'projects/robots/robotis/darwin-op/plugins/remote_controls/robotis-op2_tcpip/stb_image.h' ] skippedDirectories =", "'vert', 'frag'] modified_files = os.path.join(self.WEBOTS_HOME, 'tests', 'sources', 'modified_files.txt') sources =", "'.clang-format' self.assertTrue( os.path.exists(clangFormatConfigFile), msg=clangFormatConfigFile + ' not found.' ) def", "None, msg='ClangFormat is not installed on this computer.' ) clangFormatConfigFile", "# # Unless required by applicable law or agreed to", "\"\"\"Test that the C, C++ and shader source code is", "msg='Source file \"%s\" is not compliant with ClangFormat:\\n\\nDIFF:%s' % (source,", "currentDirectories: shouldContinue = True break if shouldContinue: continue for fileName", "if not skipFile: sources.append(path) curdir = os.getcwd() os.chdir(self.WEBOTS_HOME) for source", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "path = self.WEBOTS_HOME + os.sep + directory.replace('/', os.sep) for rootPath,", "Version 2.0 (the \"License\"); # you may not use this", "+= line + '\\n' except UnicodeDecodeError: self.assertTrue(False, msg='utf-8 decode problem", "'src/webots', 'src/wren' ] skippedPaths = [ 'projects/default/controllers/ros/include', 'projects/robots/gctronic/e-puck/transfer', 'projects/robots/mobsya/thymio/controllers/thymio2_aseba/aseba', 'projects/robots/mobsya/thymio/libraries/dashel',", "break for directory in skippedDirectories: currentDirectories = rootPath.replace(self.WEBOTS_HOME, '').split(os.sep) if", "'tests', 'include/wren', 'src/controller/c', 'src/controller/cpp', 'src/license/sign', 'src/webots', 'src/wren' ] skippedPaths =", "= rootPath.replace(self.WEBOTS_HOME, '').split(os.sep) if directory in currentDirectories: shouldContinue = True", "directory in skippedDirectories: currentDirectories = rootPath.replace(self.WEBOTS_HOME, '').split(os.sep) if directory in", "sources: diff = '' with open(source, encoding='utf8') as file: try:", "implied. # See the License for the specific language governing", "compliant with ClangFormat.\"\"\" import unittest import difflib import os import", "= os.path.join(self.WEBOTS_HOME, 'tests', 'sources', 'modified_files.txt') sources = [] if os.path.isfile(modified_files):", "file \"%s\" is not compliant with ClangFormat:\\n\\nDIFF:%s' % (source, diff)", "under the Apache License, Version 2.0 (the \"License\"); # you", "line + '\\n' except UnicodeDecodeError: self.assertTrue(False, msg='utf-8 decode problem in", "in directories: if line.startswith(directory): found = True break if not", "= os.path.splitext(fileName)[1][1:].lower() if extension not in extensions: continue path =", "find_executable('clang-format') is not None, msg='ClangFormat is not installed on this", "import subprocess from io import open from distutils.spawn import find_executable", "in skippedDirectories: currentDirectories = rootPath.replace(self.WEBOTS_HOME, '').split(os.sep) if directory in currentDirectories:", "by applicable law or agreed to in writing, software #", "+ ' not found.' ) def test_sources_are_clang_format_compliant(self): \"\"\"Test that sources", "'projects/robots/robotis/darwin-op/remote_control/libjpeg-turbo', 'projects/vehicles/controllers/ros_automobile/include', 'src/webots/external' ] skippedFiles = [ 'projects/robots/robotis/darwin-op/plugins/remote_controls/robotis-op2_tcpip/stb_image.h' ] skippedDirectories", "in sources: diff = '' with open(source, encoding='utf8') as file:", "\"\"\"Run clang format on 'f' file.\"\"\" return subprocess.check_output(['clang-format', '-style=file', f])", "[ 'projects/robots/robotis/darwin-op/plugins/remote_controls/robotis-op2_tcpip/stb_image.h' ] skippedDirectories = [ 'build', 'python', 'java' ]", "'\\n' except UnicodeDecodeError: self.assertTrue(False, msg='utf-8 decode problem in %s' %", "'projects/robots/mobsya/thymio/controllers/thymio2_aseba/aseba', 'projects/robots/mobsya/thymio/libraries/dashel', 'projects/robots/mobsya/thymio/libraries/dashel-src', 'projects/robots/robotis/darwin-op/libraries/libssh', 'projects/robots/robotis/darwin-op/libraries/libzip', 'projects/robots/robotis/darwin-op/libraries/robotis-op2/robotis', 'projects/robots/robotis/darwin-op/remote_control/libjpeg-turbo', 'projects/vehicles/controllers/ros_automobile/include', 'src/webots/external' ]", "import os import subprocess from io import open from distutils.spawn", "break if found: continue for directory in skippedDirectories: currentDirectories =", "return subprocess.check_output(['clang-format', '-style=file', f]) def test_clang_format_is_correctly_installed(self): \"\"\"Test ClangFormat is correctly", "is correctly installed.\"\"\" self.assertTrue( find_executable('clang-format') is not None, msg='ClangFormat is", "'cc', 'hh', 'c++', 'h++', 'vert', 'frag'] modified_files = os.path.join(self.WEBOTS_HOME, 'tests',", "in os.walk(path): shouldContinue = False for path in skippedPaths: if", "language governing permissions and # limitations under the License. \"\"\"Test", "file.replace('/', os.sep))) == path: skipFile = True break if not", "shouldContinue = True break for directory in skippedDirectories: currentDirectories =", "os.sep)): shouldContinue = True break for directory in skippedDirectories: currentDirectories", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "directory in directories: path = self.WEBOTS_HOME + os.sep + directory.replace('/',", "if os.path.normpath((self.WEBOTS_HOME + os.sep + file.replace('/', os.sep))) == path: skipFile", "Unless required by applicable law or agreed to in writing,", "path = os.path.normpath(os.path.join(rootPath, fileName)) skipFile = False for file in", "for line in difflib.context_diff(self._runClangFormat(source).decode('utf-8').splitlines(), file.read().splitlines()): diff += line + '\\n'", "difflib.context_diff(self._runClangFormat(source).decode('utf-8').splitlines(), file.read().splitlines()): diff += line + '\\n' except UnicodeDecodeError: self.assertTrue(False,", "extensions: continue path = os.path.normpath(os.path.join(rootPath, fileName)) skipFile = False for", "= ['c', 'h', 'cpp', 'hpp', 'cc', 'hh', 'c++', 'h++', 'vert',", "in skippedPaths: if rootPath.startswith(self.WEBOTS_HOME + os.sep + path.replace('/', os.sep)): shouldContinue", "the specific language governing permissions and # limitations under the", "on 'f' file.\"\"\" return subprocess.check_output(['clang-format', '-style=file', f]) def test_clang_format_is_correctly_installed(self): \"\"\"Test", "'hh', 'c++', 'h++', 'vert', 'frag'] modified_files = os.path.join(self.WEBOTS_HOME, 'tests', 'sources',", "= False for path in skippedPaths: if rootPath.startswith(self.WEBOTS_HOME + os.sep", "applicable law or agreed to in writing, software # distributed", "skipFile: sources.append(path) curdir = os.getcwd() os.chdir(self.WEBOTS_HOME) for source in sources:", "directory in currentDirectories: found = True if found: continue sources.append(line.replace('/',", "on this computer.' ) clangFormatConfigFile = self.WEBOTS_HOME + os.sep +", "= [ 'projects/robots/robotis/darwin-op/plugins/remote_controls/robotis-op2_tcpip/stb_image.h' ] skippedDirectories = [ 'build', 'python', 'java'", "curdir = os.getcwd() os.chdir(self.WEBOTS_HOME) for source in sources: diff =", "compliant with ClangFormat:\\n\\nDIFF:%s' % (source, diff) ) os.chdir(curdir) if __name__", "in writing, software # distributed under the License is distributed", "not found.' ) def test_sources_are_clang_format_compliant(self): \"\"\"Test that sources are ClangFormat", "if directory in currentDirectories: found = True if found: continue", "\"\"\"Unit test for ClangFormat compliance.\"\"\" def setUp(self): \"\"\"Set up called", "'hpp', 'cc', 'hh', 'c++', 'h++', 'vert', 'frag'] modified_files = os.path.join(self.WEBOTS_HOME,", "ClangFormat compliance.\"\"\" def setUp(self): \"\"\"Set up called before each test.\"\"\"", "self.WEBOTS_HOME + os.sep + directory.replace('/', os.sep) for rootPath, dirNames, fileNames", "+ '\\n' except UnicodeDecodeError: self.assertTrue(False, msg='utf-8 decode problem in %s'", "unittest import difflib import os import subprocess from io import", "file: line = line.strip() extension = os.path.splitext(line)[1][1:].lower() if extension not", "'python', 'java' ] extensions = ['c', 'h', 'cpp', 'hpp', 'cc',", "continue found = False for directory in skippedPaths + skippedFiles:", "as file: for line in file: line = line.strip() extension", "%s' % source) self.assertTrue( len(diff) == 0, msg='Source file \"%s\"", "'-style=file', f]) def test_clang_format_is_correctly_installed(self): \"\"\"Test ClangFormat is correctly installed.\"\"\" self.assertTrue(", "skippedDirectories: currentDirectories = line.split(os.sep) if directory in currentDirectories: found =", "True break for directory in skippedDirectories: currentDirectories = rootPath.replace(self.WEBOTS_HOME, '').split(os.sep)", "skippedFiles: if os.path.normpath((self.WEBOTS_HOME + os.sep + file.replace('/', os.sep))) == path:", "== 0, msg='Source file \"%s\" is not compliant with ClangFormat:\\n\\nDIFF:%s'", "found = True if found: continue sources.append(line.replace('/', os.sep)) else: for", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "if found: continue sources.append(line.replace('/', os.sep)) else: for directory in directories:", "diff = '' with open(source, encoding='utf8') as file: try: for", "] skippedPaths = [ 'projects/default/controllers/ros/include', 'projects/robots/gctronic/e-puck/transfer', 'projects/robots/mobsya/thymio/controllers/thymio2_aseba/aseba', 'projects/robots/mobsya/thymio/libraries/dashel', 'projects/robots/mobsya/thymio/libraries/dashel-src', 'projects/robots/robotis/darwin-op/libraries/libssh',", "License, Version 2.0 (the \"License\"); # you may not use", "in skippedDirectories: currentDirectories = line.split(os.sep) if directory in currentDirectories: found", "# You may obtain a copy of the License at", "found: continue found = False for directory in skippedPaths +", "[ 'build', 'python', 'java' ] extensions = ['c', 'h', 'cpp',", "#!/usr/bin/env python # Copyright 1996-2021 Cyberbotics Ltd. # # Licensed", "directories: if line.startswith(directory): found = True break if not found:", "file.\"\"\" return subprocess.check_output(['clang-format', '-style=file', f]) def test_clang_format_is_correctly_installed(self): \"\"\"Test ClangFormat is", "for directory in skippedDirectories: currentDirectories = line.split(os.sep) if directory in", "that the C, C++ and shader source code is compliant", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "] skippedDirectories = [ 'build', 'python', 'java' ] extensions =", "shouldContinue = True break if shouldContinue: continue for fileName in", "this computer.' ) clangFormatConfigFile = self.WEBOTS_HOME + os.sep + '.clang-format'", "else: for directory in directories: path = self.WEBOTS_HOME + os.sep", "from distutils.spawn import find_executable class TestClangFormat(unittest.TestCase): \"\"\"Unit test for ClangFormat", "+ directory.replace('/', os.sep) for rootPath, dirNames, fileNames in os.walk(path): shouldContinue", ") def test_sources_are_clang_format_compliant(self): \"\"\"Test that sources are ClangFormat compliant.\"\"\" directories", "from io import open from distutils.spawn import find_executable class TestClangFormat(unittest.TestCase):", "are ClangFormat compliant.\"\"\" directories = [ 'include/controller', 'projects', 'resources/projects', 'resources/wren/shaders',", "the License for the specific language governing permissions and #", "'' with open(source, encoding='utf8') as file: try: for line in", "Apache License, Version 2.0 (the \"License\"); # you may not", "# limitations under the License. \"\"\"Test that the C, C++", "[ 'include/controller', 'projects', 'resources/projects', 'resources/wren/shaders', 'tests', 'include/wren', 'src/controller/c', 'src/controller/cpp', 'src/license/sign',", "either express or implied. # See the License for the", "msg='ClangFormat is not installed on this computer.' ) clangFormatConfigFile =", "if extension not in extensions: continue found = False for", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "break if not found: continue found = False for directory", "' not found.' ) def test_sources_are_clang_format_compliant(self): \"\"\"Test that sources are", "if line.startswith(directory): found = True break if found: continue for", "found.' ) def test_sources_are_clang_format_compliant(self): \"\"\"Test that sources are ClangFormat compliant.\"\"\"", "os.path.join(self.WEBOTS_HOME, 'tests', 'sources', 'modified_files.txt') sources = [] if os.path.isfile(modified_files): with", "= False for directory in directories: if line.startswith(directory): found =", "in %s' % source) self.assertTrue( len(diff) == 0, msg='Source file", "for directory in directories: path = self.WEBOTS_HOME + os.sep +", "continue for directory in skippedDirectories: currentDirectories = line.split(os.sep) if directory", "= True break if shouldContinue: continue for fileName in fileNames:", "source code is compliant with ClangFormat.\"\"\" import unittest import difflib", "ClangFormat:\\n\\nDIFF:%s' % (source, diff) ) os.chdir(curdir) if __name__ == '__main__':", "before each test.\"\"\" self.WEBOTS_HOME = os.environ['WEBOTS_HOME'] def _runClangFormat(self, f): \"\"\"Run", "= line.split(os.sep) if directory in currentDirectories: found = True if", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "= True break for directory in skippedDirectories: currentDirectories = rootPath.replace(self.WEBOTS_HOME,", "computer.' ) clangFormatConfigFile = self.WEBOTS_HOME + os.sep + '.clang-format' self.assertTrue(", "f]) def test_clang_format_is_correctly_installed(self): \"\"\"Test ClangFormat is correctly installed.\"\"\" self.assertTrue( find_executable('clang-format')", "'modified_files.txt') sources = [] if os.path.isfile(modified_files): with open(modified_files, 'r') as", "self.assertTrue( find_executable('clang-format') is not None, msg='ClangFormat is not installed on", "find_executable class TestClangFormat(unittest.TestCase): \"\"\"Unit test for ClangFormat compliance.\"\"\" def setUp(self):", "os.sep + path.replace('/', os.sep)): shouldContinue = True break for directory", "'cpp', 'hpp', 'cc', 'hh', 'c++', 'h++', 'vert', 'frag'] modified_files =", "\"License\"); # you may not use this file except in", "for directory in skippedDirectories: currentDirectories = rootPath.replace(self.WEBOTS_HOME, '').split(os.sep) if directory", "and # limitations under the License. \"\"\"Test that the C,", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "'resources/projects', 'resources/wren/shaders', 'tests', 'include/wren', 'src/controller/c', 'src/controller/cpp', 'src/license/sign', 'src/webots', 'src/wren' ]", "os.path.normpath(os.path.join(rootPath, fileName)) skipFile = False for file in skippedFiles: if", "+ file.replace('/', os.sep))) == path: skipFile = True break if", "for path in skippedPaths: if rootPath.startswith(self.WEBOTS_HOME + os.sep + path.replace('/',", "self.WEBOTS_HOME = os.environ['WEBOTS_HOME'] def _runClangFormat(self, f): \"\"\"Run clang format on", "the C, C++ and shader source code is compliant with", "'projects/robots/mobsya/thymio/libraries/dashel', 'projects/robots/mobsya/thymio/libraries/dashel-src', 'projects/robots/robotis/darwin-op/libraries/libssh', 'projects/robots/robotis/darwin-op/libraries/libzip', 'projects/robots/robotis/darwin-op/libraries/robotis-op2/robotis', 'projects/robots/robotis/darwin-op/remote_control/libjpeg-turbo', 'projects/vehicles/controllers/ros_automobile/include', 'src/webots/external' ] skippedFiles", "# distributed under the License is distributed on an \"AS", "False for file in skippedFiles: if os.path.normpath((self.WEBOTS_HOME + os.sep +", "'build', 'python', 'java' ] extensions = ['c', 'h', 'cpp', 'hpp',", "# Unless required by applicable law or agreed to in", "dirNames, fileNames in os.walk(path): shouldContinue = False for path in", "for directory in directories: if line.startswith(directory): found = True break", "the License. \"\"\"Test that the C, C++ and shader source", "True break if not found: continue found = False for", "TestClangFormat(unittest.TestCase): \"\"\"Unit test for ClangFormat compliance.\"\"\" def setUp(self): \"\"\"Set up", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "in skippedFiles: if os.path.normpath((self.WEBOTS_HOME + os.sep + file.replace('/', os.sep))) ==", "self.WEBOTS_HOME + os.sep + '.clang-format' self.assertTrue( os.path.exists(clangFormatConfigFile), msg=clangFormatConfigFile + '", "for ClangFormat compliance.\"\"\" def setUp(self): \"\"\"Set up called before each", "len(diff) == 0, msg='Source file \"%s\" is not compliant with", "You may obtain a copy of the License at #", "'src/controller/cpp', 'src/license/sign', 'src/webots', 'src/wren' ] skippedPaths = [ 'projects/default/controllers/ros/include', 'projects/robots/gctronic/e-puck/transfer',", "rootPath.replace(self.WEBOTS_HOME, '').split(os.sep) if directory in currentDirectories: shouldContinue = True break", "for rootPath, dirNames, fileNames in os.walk(path): shouldContinue = False for", "not in extensions: continue found = False for directory in", "path: skipFile = True break if not skipFile: sources.append(path) curdir", "with open(modified_files, 'r') as file: for line in file: line", "setUp(self): \"\"\"Set up called before each test.\"\"\" self.WEBOTS_HOME = os.environ['WEBOTS_HOME']", "'projects/robots/mobsya/thymio/libraries/dashel-src', 'projects/robots/robotis/darwin-op/libraries/libssh', 'projects/robots/robotis/darwin-op/libraries/libzip', 'projects/robots/robotis/darwin-op/libraries/robotis-op2/robotis', 'projects/robots/robotis/darwin-op/remote_control/libjpeg-turbo', 'projects/vehicles/controllers/ros_automobile/include', 'src/webots/external' ] skippedFiles =", "the Apache License, Version 2.0 (the \"License\"); # you may", "License. \"\"\"Test that the C, C++ and shader source code", "in currentDirectories: found = True if found: continue sources.append(line.replace('/', os.sep))", "continue for fileName in fileNames: extension = os.path.splitext(fileName)[1][1:].lower() if extension", "directory in skippedPaths + skippedFiles: if line.startswith(directory): found = True", "installed.\"\"\" self.assertTrue( find_executable('clang-format') is not None, msg='ClangFormat is not installed" ]
[ "self.mock.log_error.assert_has_calls( [mock.call('Failed to clear directory dir.')]) self.assertEqual(0, self.mock.log_warn.call_count) self.mock.system.assert_has_calls( [mock.call('rm", "posix platforms.') def test(self): self.assertEqual('/bin/ls', shell.which('ls')) class ClearSystemTempDirectoryTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for", "script_name = 'javaclassfile.class' expected_command = 'java javaclassfile' self.call_and_assert_helper(expected_command, script_name) def", "for a file that needs one.\"\"\" self.assertEqual('python', shell.get_interpreter('run.py')) def get_non_interpreter_file_test(self):", "get_non_interpreter_file_test(self): \"\"\"Test that None is returned for a file that", "self.assertFalse(shell.remove_directory('dir', ignore_errors=True)) self.mock.log_warn.assert_has_calls( [mock.call('Failed to clear directory dir.')]) self.assertEqual(0, self.mock.log_error.call_count)", "2.0 (the \"License\"); # you may not use this file", "mock_remove): # bypass pyfakefs's os.remove. os.remove = mock_remove mock_remove.side_effect =", "file_to_execute): \"\"\"Call get_execute_command on |file_to_execute| and assert result equal to", "def call_and_assert_helper(self, expected_command, file_to_execute): \"\"\"Call get_execute_command on |file_to_execute| and assert", "contents='') shell.remove_empty_files('/test') self.assertTrue(os.path.exists('/test/aa/bb.txt')) self.assertTrue(os.path.exists('/test/aa/aa/dd.txt')) self.assertFalse(os.path.exists('/test/aa/cc.txt')) self.assertFalse(os.path.exists('/test/aa/aa/aa.txt')) def test_ignore_file(self): self.fs.CreateFile('/test/aa/cc.txt', contents='')", "\"\"\"Tests for remove_directory.\"\"\" def setUp(self): test_helpers.patch(self, [ 'os.chmod', 'os.mkdir', 'os.path.exists',", "[mock.call('rm -rf \"dir\" > /dev/null 2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) def test_remove_shutil_failure_ignore_errors(self):", "False, False] if raise_mkdir_error: self.mock.mkdir.side_effect = OSError() result = shell.remove_directory('dir',", "True) def test_remove_shutil_success(self): \"\"\"Test remove with shutil.\"\"\" self.mock.exists.side_effect = [True,", "[mock.call('Failed to clear directory dir.')]) self.assertEqual(0, self.mock.log_warn.call_count) self.mock.system.assert_has_calls( [mock.call('rm -rf", "command returned for a binary (executable) file.\"\"\" executable_name = 'executable'", "remove with os-specific command on non-windows.\"\"\" self._test_remove_os_specific('LINUX', True, False) self.mock.system.assert_has_calls(", "test_remove_shutil_failure_ignore_errors(self): self.mock.exists.side_effect = [True, True, True] self.assertFalse(shell.remove_directory('dir', ignore_errors=True)) self.mock.log_warn.assert_has_calls( [mock.call('Failed", "class WhichTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for which (shutil.which).\"\"\" def setUp(self): # FIXME:", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "test_remove(self): \"\"\"Test remove.\"\"\" self.fs.CreateFile('/test/aa/bb.txt', contents='s') self.fs.CreateFile('/test/aa/cc.txt', contents='') self.fs.CreateFile('/test/aa/aa/dd.txt', contents='s') self.fs.CreateFile('/test/aa/aa/aa.txt',", "= 'javaclassfile.class' expected_command = 'java javaclassfile' self.call_and_assert_helper(expected_command, script_name) def test_binary(self):", "correct interpreters to execute a file are returned.\"\"\" def get_interpreted_file_test(self):", "contents='') shell.remove_empty_files('/test') self.assertTrue(os.path.exists('/test/aa/cc.txt')) class RemoveDirectoryTest(unittest.TestCase): \"\"\"Tests for remove_directory.\"\"\" def setUp(self):", "Windows. if not environment.is_posix(): self.skipTest('Process tests are only applicable for", "test(self): \"\"\"Test get_directory_file_count.\"\"\" self.fs.CreateFile('/test/aa/bb.txt', contents='abc') self.fs.CreateFile('/test/aa/cc.txt', contents='def') self.fs.CreateFile('/test/aa/aa/aa.txt', contents='ghi') self.fs.CreateFile('/test/aa/aa/dd.txt',", "test_utils.set_up_pyfakefs(self) def test_remove(self): \"\"\"Test remove.\"\"\" self.fs.CreateFile('/test/aa/bb.txt', contents='s') self.fs.CreateFile('/test/aa/cc.txt', contents='') self.fs.CreateFile('/test/aa/aa/dd.txt',", "contents='abc') self.fs.CreateFile('/test/aa/cc.txt', contents='def') self.fs.CreateFile('/test/aa/aa/aa.txt', contents='ghi') self.fs.CreateFile('/test/aa/aa/dd.txt', contents='t') self.assertEqual(shell.get_directory_file_count('/test/aa'), 4) class", "class GetDirectorySizeTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for get_directory_size.\"\"\" def setUp(self): test_utils.set_up_pyfakefs(self) def test(self):", "GetExecuteCommand(unittest.TestCase): \"\"\"Test that the correct commands to run files are", "self.assertEqual(not raise_mkdir_error, result) else: self.assertTrue(result) self.mock.rmtree.assert_has_calls([]) if recreate: self.mock.mkdir.assert_has_calls([mock.call('dir')]) else:", "but fails.\"\"\" self.mock.exists.side_effect = [True, True, True] self.assertFalse(shell.remove_directory('dir')) self.mock.log_error.assert_has_calls( [mock.call('Failed", "'script.py' expected_command = 'python %s' % script_name self.call_and_assert_helper(expected_command, script_name) def", "contents='abc') self.fs.CreateFile('/tmp/cc/dd/ee.txt', contents='def') self.fs.CreateDirectory('/tmp/ff/gg') self.fs.CreateDirectory('/tmp/hh') self.fs.CreateDirectory('/unrelated') self.fs.CreateFile('/unrelated/zz.txt', contents='zzz') os.symlink('/unrelated/zz.txt', '/tmp/hh/gg.txt')", "on |file_to_execute| and assert result equal to |expected_command|.\"\"\" self.assertEqual(expected_command, shell.get_execute_command(file_to_execute))", "use this file except in compliance with the License. #", "self.fs.CreateFile('/test/aa/bb.txt', contents='abc') self.fs.CreateFile('/test/aa/cc.txt', contents='def') self.fs.CreateFile('/test/aa/aa/aa.txt', contents='ghi') self.fs.CreateFile('/test/aa/aa/dd.txt', contents='t') self.assertEqual(shell.get_directory_file_count('/test/aa'), 4)", "self.fs.CreateFile('/test/aa/aa/dd.txt', contents='t') self.assertEqual(shell.get_directory_file_count('/test/aa'), 4) class GetDirectorySizeTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for get_directory_size.\"\"\" def", "as expected.\"\"\" self.fs.CreateFile('/tmp/aa/bb.txt', contents='abc') self.fs.CreateFile('/tmp/cc/dd/ee.txt', contents='def') self.fs.CreateDirectory('/tmp/ff/gg') self.fs.CreateDirectory('/tmp/hh') self.fs.CreateDirectory('/unrelated') self.fs.CreateFile('/unrelated/zz.txt',", "RemoveEmptyFilesTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for remove_empty_files.\"\"\" def setUp(self): # FIXME: Add support", "self.assertFalse(os.path.exists('/tmp/cc/dd/ee.txt')) self.assertFalse(os.path.exists('/tmp/ff/gg')) self.assertFalse(os.path.exists('/tmp/hh')) class GetExecuteCommand(unittest.TestCase): \"\"\"Test that the correct commands", "executable_name) executable_name += '.exe' self.call_and_assert_helper(executable_name, executable_name) class GetInterpreter(object): \"\"\"Test that", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "self.assertTrue(os.path.exists('/tmp')) self.assertTrue(os.path.exists('/unrelated')) self.assertEqual(shell.get_directory_file_count('/tmp'), 0) self.assertEqual(shell.get_directory_file_count('/unrelated'), 1) self.assertFalse(os.path.exists('/tmp/aa/bb.txt')) self.assertFalse(os.path.exists('/tmp/cc/dd/ee.txt')) self.assertFalse(os.path.exists('/tmp/ff/gg')) self.assertFalse(os.path.exists('/tmp/hh'))", "self.fs.CreateFile('/unrelated/zz.txt', contents='zzz') os.symlink('/unrelated/zz.txt', '/tmp/hh/gg.txt') os.symlink('/unrelated', '/tmp/ii') shell.clear_system_temp_directory() self.assertTrue(os.path.exists('/tmp')) self.assertTrue(os.path.exists('/unrelated')) self.assertEqual(shell.get_directory_file_count('/tmp'),", "License. # You may obtain a copy of the License", "onerror = self.mock.rmtree.call_args[1]['onerror'] fake_fn = mock.MagicMock() fake_fn.side_effect = OSError() onerror(fake_fn,", "test_remove_os_specific_windows(self): \"\"\"Test remove with os-specific command on windows.\"\"\" self._test_remove_os_specific('WINDOWS', True,", "/dev/null 2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) def test_remove_shutil_failure_ignore_errors(self): self.mock.exists.side_effect = [True, True,", "self.mock.system.assert_has_calls([mock.call('rd /s /q \"dir\" > nul 2>&1')]) def test_remove_os_specific_non_windows(self): \"\"\"Test", "def test(self): \"\"\"Test clear_system_temp_directory works as expected.\"\"\" self.fs.CreateFile('/tmp/aa/bb.txt', contents='abc') self.fs.CreateFile('/tmp/cc/dd/ee.txt',", "setUp(self): test_utils.set_up_pyfakefs(self) def test(self): \"\"\"Test get_directory_size.\"\"\" self.fs.CreateFile('/test/aa/bb.txt', contents='abc') self.fs.CreateFile('/test/aa/cc.txt', contents='def')", "for which (shutil.which).\"\"\" def setUp(self): # FIXME: Add support for", "[mock.call('rm -rf \"dir\" > /dev/null 2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) def test_remove_shutil_onerror(self):", "def test_remove_os_specific_non_windows(self): \"\"\"Test remove with os-specific command on non-windows.\"\"\" self._test_remove_os_specific('LINUX',", "under the License is distributed on an \"AS IS\" BASIS,", "License for the specific language governing permissions and # limitations", "command on windows.\"\"\" self._test_remove_os_specific('WINDOWS', True, False) self.mock.system.assert_has_calls([mock.call('rd /s /q \"dir\"", "test_helpers.patch(self, [ 'os.chmod', 'os.mkdir', 'os.path.exists', 'os.system', 'system.environment.platform', 'metrics.logs.log_error', 'metrics.logs.log_warn', 'shutil.rmtree',", "%s' % script_name self.call_and_assert_helper(expected_command, script_name) def test_java(self): \"\"\"Test correct launch", "\"\"\"Test correct launch command returned for a binary (executable) file.\"\"\"", "= shell.remove_directory('dir', recreate=recreate) if recreate: self.assertEqual(not raise_mkdir_error, result) else: self.assertTrue(result)", "self.skipTest('Process tests are only applicable for posix platforms.') test_utils.set_up_pyfakefs(self) def", "mkdir errors.\"\"\" self._test_remove_os_specific('LINUX', True, True) def test_remove_shutil_success(self): \"\"\"Test remove with", "posix platforms.') test_utils.set_up_pyfakefs(self) def test_remove(self): \"\"\"Test remove.\"\"\" self.fs.CreateFile('/test/aa/bb.txt', contents='s') self.fs.CreateFile('/test/aa/cc.txt',", "\"\"\"Test that None is returned for a file that doesn't", "def test_binary(self): \"\"\"Test correct launch command returned for a binary", "def test_remove_shutil_success(self): \"\"\"Test remove with shutil.\"\"\" self.mock.exists.side_effect = [True, True,", "import helpers as test_helpers from tests.test_libs import test_utils class RemoveEmptyFilesTest(fake_filesystem_unittest.TestCase):", "def test_remove_shutil_failure(self): \"\"\"Test remove with shutil but fails.\"\"\" self.mock.exists.side_effect =", "for clear_system_temp_directory.\"\"\" def setUp(self): test_helpers.patch(self, [ 'tempfile.gettempdir', ]) self.mock.gettempdir.return_value =", "self.assertFalse(shell.remove_directory('dir')) self.mock.log_error.assert_has_calls( [mock.call('Failed to clear directory dir.')]) self.assertEqual(0, self.mock.log_warn.call_count) self.mock.system.assert_has_calls(", "mock_remove mock_remove.side_effect = OSError() self.fs.CreateFile('/test/aa/cc.txt', contents='') shell.remove_empty_files('/test') self.assertTrue(os.path.exists('/test/aa/cc.txt')) class RemoveDirectoryTest(unittest.TestCase):", "'os.path.exists', 'os.system', 'system.environment.platform', 'metrics.logs.log_error', 'metrics.logs.log_warn', 'shutil.rmtree', ]) def _test_remove_os_specific(self, platform,", "shell.remove_empty_files('/test') self.assertTrue(os.path.exists('/test/aa/bb.txt')) self.assertTrue(os.path.exists('/test/aa/aa/dd.txt')) self.assertFalse(os.path.exists('/test/aa/cc.txt')) self.assertFalse(os.path.exists('/test/aa/aa/aa.txt')) def test_ignore_file(self): self.fs.CreateFile('/test/aa/cc.txt', contents='') shell.remove_empty_files('/test/aa/cc.txt')", "if recreate: self.mock.mkdir.assert_has_calls([mock.call('dir')]) else: self.mock.mkdir.assert_has_calls([]) def test_remove_os_specific_windows(self): \"\"\"Test remove with", "in compliance with the License. # You may obtain a", "[mock.call('rm -rf \"dir\" > /dev/null 2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) def test_remove_shutil_failure(self):", "contents='t') self.assertEqual(shell.get_directory_file_count('/test/aa'), 4) class GetDirectorySizeTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for get_directory_size.\"\"\" def setUp(self):", "software # distributed under the License is distributed on an", "for remove_empty_files.\"\"\" def setUp(self): # FIXME: Add support for Windows.", "returned for a file that doesn't need one. We don't", "self.mock.gettempdir.return_value = '/tmp' test_utils.set_up_pyfakefs(self) def test(self): \"\"\"Test clear_system_temp_directory works as", "def test_remove(self): \"\"\"Test remove.\"\"\" self.fs.CreateFile('/test/aa/bb.txt', contents='s') self.fs.CreateFile('/test/aa/cc.txt', contents='') self.fs.CreateFile('/test/aa/aa/dd.txt', contents='s')", "self.mock.exists.side_effect = [True, False, False] if raise_mkdir_error: self.mock.mkdir.side_effect = OSError()", "/q \"dir\" > nul 2>&1')]) def test_remove_os_specific_non_windows(self): \"\"\"Test remove with", "self.assertTrue(os.path.exists('/test/aa/bb.txt')) self.assertTrue(os.path.exists('/test/aa/aa/dd.txt')) self.assertFalse(os.path.exists('/test/aa/cc.txt')) self.assertFalse(os.path.exists('/test/aa/aa/aa.txt')) def test_ignore_file(self): self.fs.CreateFile('/test/aa/cc.txt', contents='') shell.remove_empty_files('/test/aa/cc.txt') self.assertTrue(os.path.exists('/test/aa/cc.txt'))", "to |expected_command|.\"\"\" self.assertEqual(expected_command, shell.get_execute_command(file_to_execute)) def test_standard_script(self): \"\"\"Test correct command returned", "that doesn't need one. We don't want empty string since", "GetDirectoryFileCount(fake_filesystem_unittest.TestCase): \"\"\"Tests for get_directory_file_count.\"\"\" def setUp(self): test_utils.set_up_pyfakefs(self) def test(self): \"\"\"Test", "launch command returned for a binary (executable) file.\"\"\" executable_name =", "= 'script.py' expected_command = 'python %s' % script_name self.call_and_assert_helper(expected_command, script_name)", "test is only supported on posix platforms.') def test(self): self.assertEqual('/bin/ls',", "/dev/null 2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) onerror = self.mock.rmtree.call_args[1]['onerror'] fake_fn = mock.MagicMock()", "a file that doesn't need one. We don't want empty", "self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) def test_remove_shutil_failure_ignore_errors(self): self.mock.exists.side_effect = [True, True, True] self.assertFalse(shell.remove_directory('dir',", "True] self.assertFalse(shell.remove_directory('dir')) self.mock.log_error.assert_has_calls( [mock.call('Failed to clear directory dir.')]) self.assertEqual(0, self.mock.log_warn.call_count)", "test(self): \"\"\"Test get_directory_size.\"\"\" self.fs.CreateFile('/test/aa/bb.txt', contents='abc') self.fs.CreateFile('/test/aa/cc.txt', contents='def') self.fs.CreateFile('/test/aa/aa/aa.txt', contents='ghi') self.fs.CreateFile('/test/aa/aa/dd.txt',", "remove with shutil but fails.\"\"\" self.mock.exists.side_effect = [True, True, True]", "class GetDirectoryFileCount(fake_filesystem_unittest.TestCase): \"\"\"Tests for get_directory_file_count.\"\"\" def setUp(self): test_utils.set_up_pyfakefs(self) def test(self):", "self.mock.mkdir.side_effect = OSError() result = shell.remove_directory('dir', recreate=recreate) if recreate: self.assertEqual(not", "self.mock.log_warn.call_count) self.mock.system.assert_has_calls( [mock.call('rm -rf \"dir\" > /dev/null 2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)])", "0) self.assertEqual(shell.get_directory_file_count('/unrelated'), 1) self.assertFalse(os.path.exists('/tmp/aa/bb.txt')) self.assertFalse(os.path.exists('/tmp/cc/dd/ee.txt')) self.assertFalse(os.path.exists('/tmp/ff/gg')) self.assertFalse(os.path.exists('/tmp/hh')) class GetExecuteCommand(unittest.TestCase): \"\"\"Test", "> /dev/null 2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) onerror = self.mock.rmtree.call_args[1]['onerror'] fake_fn =", "Windows. if not environment.is_posix(): self.skipTest('Which test is only supported on", "# limitations under the License. \"\"\"shell tests.\"\"\" import mock import", "to clear directory dir.')]) self.assertEqual(0, self.mock.log_error.call_count) self.mock.system.assert_has_calls( [mock.call('rm -rf \"dir\"", "self.assertEqual(shell.get_directory_file_count('/tmp'), 0) self.assertEqual(shell.get_directory_file_count('/unrelated'), 1) self.assertFalse(os.path.exists('/tmp/aa/bb.txt')) self.assertFalse(os.path.exists('/tmp/cc/dd/ee.txt')) self.assertFalse(os.path.exists('/tmp/ff/gg')) self.assertFalse(os.path.exists('/tmp/hh')) class GetExecuteCommand(unittest.TestCase):", "\"dir\" > /dev/null 2>&1')]) def test_remove_without_recreate(self): \"\"\"Test remove without recreate.\"\"\"", "expected_command = 'java javaclassfile' self.call_and_assert_helper(expected_command, script_name) def test_binary(self): \"\"\"Test correct", "test_standard_script(self): \"\"\"Test correct command returned for python script.\"\"\" script_name =", "self.assertEqual('python', shell.get_interpreter('run.py')) def get_non_interpreter_file_test(self): \"\"\"Test that None is returned for", "def _test_remove_os_specific(self, platform, recreate, raise_mkdir_error): \"\"\"Helper for testing removing dir", "\"\"\"Call get_execute_command on |file_to_execute| and assert result equal to |expected_command|.\"\"\"", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "True, True] self.assertFalse(shell.remove_directory('dir')) self.mock.log_error.assert_has_calls( [mock.call('Failed to clear directory dir.')]) self.assertEqual(0,", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "> /dev/null 2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) def test_remove_shutil_onerror(self): \"\"\"Test shutil invoking", "fake_fn.side_effect = OSError() onerror(fake_fn, 'dir/child', ImportError()) self.mock.chmod.assert_has_calls([mock.call('dir/child', 0o750)]) fake_fn.assert_has_calls([mock.call('dir/child')]) class", "to in writing, software # distributed under the License is", "-rf \"dir\" > /dev/null 2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) def test_remove_shutil_onerror(self): \"\"\"Test", "def test_remove_without_recreate(self): \"\"\"Test remove without recreate.\"\"\" self._test_remove_os_specific('LINUX', False, True) def", "shell.get_interpreter('run.py')) def get_non_interpreter_file_test(self): \"\"\"Test that None is returned for a", "# See the License for the specific language governing permissions", "files are returned.\"\"\" def call_and_assert_helper(self, expected_command, file_to_execute): \"\"\"Call get_execute_command on", "= [True, True, False] self.assertTrue(shell.remove_directory('dir')) self.mock.system.assert_has_calls( [mock.call('rm -rf \"dir\" >", "or agreed to in writing, software # distributed under the", "= platform self.mock.exists.side_effect = [True, False, False] if raise_mkdir_error: self.mock.mkdir.side_effect", "script_name = 'script.py' expected_command = 'python %s' % script_name self.call_and_assert_helper(expected_command,", "required by applicable law or agreed to in writing, software", "clear directory dir.')]) self.assertEqual(0, self.mock.log_warn.call_count) self.mock.system.assert_has_calls( [mock.call('rm -rf \"dir\" >", "script.\"\"\" script_name = 'script.py' expected_command = 'python %s' % script_name", "unittest from pyfakefs import fake_filesystem_unittest from system import environment from", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "2>&1')]) def test_remove_without_recreate(self): \"\"\"Test remove without recreate.\"\"\" self._test_remove_os_specific('LINUX', False, True)", "a binary (executable) file.\"\"\" executable_name = 'executable' self.call_and_assert_helper(executable_name, executable_name) executable_name", "with the License. # You may obtain a copy of", "self.fs.CreateFile('/test/aa/cc.txt', contents='') shell.remove_empty_files('/test') self.assertTrue(os.path.exists('/test/aa/cc.txt')) class RemoveDirectoryTest(unittest.TestCase): \"\"\"Tests for remove_directory.\"\"\" def", "os-specific command.\"\"\" self.mock.platform.return_value = platform self.mock.exists.side_effect = [True, False, False]", "self.mock.exists.side_effect = [True, True, False] self.assertTrue(shell.remove_directory('dir')) self.mock.system.assert_has_calls( [mock.call('rm -rf \"dir\"", "with os-specific command.\"\"\" self.mock.platform.return_value = platform self.mock.exists.side_effect = [True, False,", "remove without recreate.\"\"\" self._test_remove_os_specific('LINUX', False, True) def test_remove_with_mkdir_error(self): \"\"\"Test remove", "LLC # # Licensed under the Apache License, Version 2.0", "remove_empty_files.\"\"\" def setUp(self): # FIXME: Add support for Windows. if", "[True, True, False] self.assertTrue(shell.remove_directory('dir')) self.mock.system.assert_has_calls( [mock.call('rm -rf \"dir\" > /dev/null", "self.fs.CreateFile('/test/aa/bb.txt', contents='abc') self.fs.CreateFile('/test/aa/cc.txt', contents='def') self.fs.CreateFile('/test/aa/aa/aa.txt', contents='ghi') self.fs.CreateFile('/test/aa/aa/dd.txt', contents='t') self.assertEqual(shell.get_directory_size('/test/aa'), 10)", "compliance with the License. # You may obtain a copy", "agreed to in writing, software # distributed under the License", "\"\"\"Test remove with os-specific command on windows.\"\"\" self._test_remove_os_specific('WINDOWS', True, False)", "\"\"\"Test correct launch command returned for Java class.\"\"\" script_name =", "self.fs.CreateFile('/test/aa/aa/dd.txt', contents='t') self.assertEqual(shell.get_directory_size('/test/aa'), 10) class WhichTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for which (shutil.which).\"\"\"", "result equal to |expected_command|.\"\"\" self.assertEqual(expected_command, shell.get_execute_command(file_to_execute)) def test_standard_script(self): \"\"\"Test correct", "self.fs.CreateFile('/test/aa/aa/aa.txt', contents='ghi') self.fs.CreateFile('/test/aa/aa/dd.txt', contents='t') self.assertEqual(shell.get_directory_size('/test/aa'), 10) class WhichTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for", "distributed under the License is distributed on an \"AS IS\"", "-rf \"dir\" > /dev/null 2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) def test_remove_shutil_failure(self): \"\"\"Test", "self._test_remove_os_specific('LINUX', True, False) self.mock.system.assert_has_calls( [mock.call('rm -rf \"dir\" > /dev/null 2>&1')])", "contents='t') self.assertEqual(shell.get_directory_size('/test/aa'), 10) class WhichTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for which (shutil.which).\"\"\" def", "executable_name = 'executable' self.call_and_assert_helper(executable_name, executable_name) executable_name += '.exe' self.call_and_assert_helper(executable_name, executable_name)", "remove when mkdir errors.\"\"\" self._test_remove_os_specific('LINUX', True, True) def test_remove_shutil_success(self): \"\"\"Test", "def test_remove_shutil_onerror(self): \"\"\"Test shutil invoking onerror.\"\"\" self.mock.exists.side_effect = [True, True,", "express or implied. # See the License for the specific", "self.call_and_assert_helper(expected_command, script_name) def test_java(self): \"\"\"Test correct launch command returned for", "except in compliance with the License. # You may obtain", "def test(self): self.assertEqual('/bin/ls', shell.which('ls')) class ClearSystemTempDirectoryTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for clear_system_temp_directory.\"\"\" def", "correct interpreter is returned for a file that needs one.\"\"\"", "'shutil.rmtree', ]) def _test_remove_os_specific(self, platform, recreate, raise_mkdir_error): \"\"\"Helper for testing", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "to execute a file are returned.\"\"\" def get_interpreted_file_test(self): \"\"\"Test correct", "not use this file except in compliance with the License.", "test_remove_os_specific_non_windows(self): \"\"\"Test remove with os-specific command on non-windows.\"\"\" self._test_remove_os_specific('LINUX', True,", "self.mock.exists.side_effect = [True, True, True] self.assertFalse(shell.remove_directory('dir', ignore_errors=True)) self.mock.log_warn.assert_has_calls( [mock.call('Failed to", "fake_filesystem_unittest from system import environment from system import shell from", "\"\"\"Test get_directory_file_count.\"\"\" self.fs.CreateFile('/test/aa/bb.txt', contents='abc') self.fs.CreateFile('/test/aa/cc.txt', contents='def') self.fs.CreateFile('/test/aa/aa/aa.txt', contents='ghi') self.fs.CreateFile('/test/aa/aa/dd.txt', contents='t')", "writing, software # distributed under the License is distributed on", "expected_command, file_to_execute): \"\"\"Call get_execute_command on |file_to_execute| and assert result equal", "setUp(self): test_helpers.patch(self, [ 'os.chmod', 'os.mkdir', 'os.path.exists', 'os.system', 'system.environment.platform', 'metrics.logs.log_error', 'metrics.logs.log_warn',", "you may not use this file except in compliance with", "for get_directory_size.\"\"\" def setUp(self): test_utils.set_up_pyfakefs(self) def test(self): \"\"\"Test get_directory_size.\"\"\" self.fs.CreateFile('/test/aa/bb.txt',", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "assert result equal to |expected_command|.\"\"\" self.assertEqual(expected_command, shell.get_execute_command(file_to_execute)) def test_standard_script(self): \"\"\"Test", "class GetExecuteCommand(unittest.TestCase): \"\"\"Test that the correct commands to run files", "self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) onerror = self.mock.rmtree.call_args[1]['onerror'] fake_fn = mock.MagicMock() fake_fn.side_effect =", "def setUp(self): test_utils.set_up_pyfakefs(self) def test(self): \"\"\"Test get_directory_file_count.\"\"\" self.fs.CreateFile('/test/aa/bb.txt', contents='abc') self.fs.CreateFile('/test/aa/cc.txt',", "_test_remove_os_specific(self, platform, recreate, raise_mkdir_error): \"\"\"Helper for testing removing dir with", "self.call_and_assert_helper(expected_command, script_name) def test_binary(self): \"\"\"Test correct launch command returned for", "self.fs.CreateFile('/test/aa/cc.txt', contents='') self.fs.CreateFile('/test/aa/aa/dd.txt', contents='s') self.fs.CreateFile('/test/aa/aa/aa.txt', contents='') shell.remove_empty_files('/test') self.assertTrue(os.path.exists('/test/aa/bb.txt')) self.assertTrue(os.path.exists('/test/aa/aa/dd.txt')) self.assertFalse(os.path.exists('/test/aa/cc.txt'))", "returned for Java class.\"\"\" script_name = 'javaclassfile.class' expected_command = 'java", "self.assertTrue(result) self.mock.rmtree.assert_has_calls([]) if recreate: self.mock.mkdir.assert_has_calls([mock.call('dir')]) else: self.mock.mkdir.assert_has_calls([]) def test_remove_os_specific_windows(self): \"\"\"Test", "@mock.patch('os.remove', autospec=True) def test_exception(self, mock_remove): # bypass pyfakefs's os.remove. os.remove", "get_directory_size.\"\"\" self.fs.CreateFile('/test/aa/bb.txt', contents='abc') self.fs.CreateFile('/test/aa/cc.txt', contents='def') self.fs.CreateFile('/test/aa/aa/aa.txt', contents='ghi') self.fs.CreateFile('/test/aa/aa/dd.txt', contents='t') self.assertEqual(shell.get_directory_size('/test/aa'),", "/s /q \"dir\" > nul 2>&1')]) def test_remove_os_specific_non_windows(self): \"\"\"Test remove", "self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) def test_remove_shutil_onerror(self): \"\"\"Test shutil invoking onerror.\"\"\" self.mock.exists.side_effect =", "'metrics.logs.log_error', 'metrics.logs.log_warn', 'shutil.rmtree', ]) def _test_remove_os_specific(self, platform, recreate, raise_mkdir_error): \"\"\"Helper", "dir with os-specific command.\"\"\" self.mock.platform.return_value = platform self.mock.exists.side_effect = [True,", "to clear directory dir.')]) self.assertEqual(0, self.mock.log_warn.call_count) self.mock.system.assert_has_calls( [mock.call('rm -rf \"dir\"", "-rf \"dir\" > /dev/null 2>&1')]) def test_remove_without_recreate(self): \"\"\"Test remove without", "test_utils.set_up_pyfakefs(self) def test(self): \"\"\"Test clear_system_temp_directory works as expected.\"\"\" self.fs.CreateFile('/tmp/aa/bb.txt', contents='abc')", "self.mock.chmod.assert_has_calls([mock.call('dir/child', 0o750)]) fake_fn.assert_has_calls([mock.call('dir/child')]) class GetDirectoryFileCount(fake_filesystem_unittest.TestCase): \"\"\"Tests for get_directory_file_count.\"\"\" def setUp(self):", "CONDITIONS OF ANY KIND, either express or implied. # See", "self.assertEqual('/bin/ls', shell.which('ls')) class ClearSystemTempDirectoryTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for clear_system_temp_directory.\"\"\" def setUp(self): test_helpers.patch(self,", "self.fs.CreateFile('/tmp/aa/bb.txt', contents='abc') self.fs.CreateFile('/tmp/cc/dd/ee.txt', contents='def') self.fs.CreateDirectory('/tmp/ff/gg') self.fs.CreateDirectory('/tmp/hh') self.fs.CreateDirectory('/unrelated') self.fs.CreateFile('/unrelated/zz.txt', contents='zzz') os.symlink('/unrelated/zz.txt',", "recreate, raise_mkdir_error): \"\"\"Helper for testing removing dir with os-specific command.\"\"\"", "for python script.\"\"\" script_name = 'script.py' expected_command = 'python %s'", "shell.which('ls')) class ClearSystemTempDirectoryTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for clear_system_temp_directory.\"\"\" def setUp(self): test_helpers.patch(self, [", "'python %s' % script_name self.call_and_assert_helper(expected_command, script_name) def test_java(self): \"\"\"Test correct", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "= [True, True, True] self.assertFalse(shell.remove_directory('dir')) self.mock.log_error.assert_has_calls( [mock.call('Failed to clear directory", "remove_directory.\"\"\" def setUp(self): test_helpers.patch(self, [ 'os.chmod', 'os.mkdir', 'os.path.exists', 'os.system', 'system.environment.platform',", "directory dir.')]) self.assertEqual(0, self.mock.log_error.call_count) self.mock.system.assert_has_calls( [mock.call('rm -rf \"dir\" > /dev/null", "recreate: self.mock.mkdir.assert_has_calls([mock.call('dir')]) else: self.mock.mkdir.assert_has_calls([]) def test_remove_os_specific_windows(self): \"\"\"Test remove with os-specific", "invoking onerror.\"\"\" self.mock.exists.side_effect = [True, True, False] self.assertTrue(shell.remove_directory('dir')) self.mock.system.assert_has_calls( [mock.call('rm", "\"\"\"Tests for which (shutil.which).\"\"\" def setUp(self): # FIXME: Add support", "without recreate.\"\"\" self._test_remove_os_specific('LINUX', False, True) def test_remove_with_mkdir_error(self): \"\"\"Test remove when", "get_interpreted_file_test(self): \"\"\"Test correct interpreter is returned for a file that", "We don't want empty string since this is easier to", "True, True] self.assertFalse(shell.remove_directory('dir', ignore_errors=True)) self.mock.log_warn.assert_has_calls( [mock.call('Failed to clear directory dir.')])", "self.assertFalse(os.path.exists('/tmp/hh')) class GetExecuteCommand(unittest.TestCase): \"\"\"Test that the correct commands to run", "class RemoveEmptyFilesTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for remove_empty_files.\"\"\" def setUp(self): # FIXME: Add", "from system import environment from system import shell from tests.test_libs", "else: self.assertTrue(result) self.mock.rmtree.assert_has_calls([]) if recreate: self.mock.mkdir.assert_has_calls([mock.call('dir')]) else: self.mock.mkdir.assert_has_calls([]) def test_remove_os_specific_windows(self):", "True, False) self.mock.system.assert_has_calls( [mock.call('rm -rf \"dir\" > /dev/null 2>&1')]) def", "test_utils.set_up_pyfakefs(self) def test(self): \"\"\"Test get_directory_file_count.\"\"\" self.fs.CreateFile('/test/aa/bb.txt', contents='abc') self.fs.CreateFile('/test/aa/cc.txt', contents='def') self.fs.CreateFile('/test/aa/aa/aa.txt',", "executable_name += '.exe' self.call_and_assert_helper(executable_name, executable_name) class GetInterpreter(object): \"\"\"Test that the", "GetInterpreter(object): \"\"\"Test that the correct interpreters to execute a file", "/dev/null 2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) def test_remove_shutil_onerror(self): \"\"\"Test shutil invoking onerror.\"\"\"", "self.call_and_assert_helper(executable_name, executable_name) executable_name += '.exe' self.call_and_assert_helper(executable_name, executable_name) class GetInterpreter(object): \"\"\"Test", "ImportError()) self.mock.chmod.assert_has_calls([mock.call('dir/child', 0o750)]) fake_fn.assert_has_calls([mock.call('dir/child')]) class GetDirectoryFileCount(fake_filesystem_unittest.TestCase): \"\"\"Tests for get_directory_file_count.\"\"\" def", "fake_fn = mock.MagicMock() fake_fn.side_effect = OSError() onerror(fake_fn, 'dir/child', ImportError()) self.mock.chmod.assert_has_calls([mock.call('dir/child',", "2019 Google LLC # # Licensed under the Apache License,", "import mock import os import unittest from pyfakefs import fake_filesystem_unittest", "commands to run files are returned.\"\"\" def call_and_assert_helper(self, expected_command, file_to_execute):", "file.\"\"\" executable_name = 'executable' self.call_and_assert_helper(executable_name, executable_name) executable_name += '.exe' self.call_and_assert_helper(executable_name,", "on non-windows.\"\"\" self._test_remove_os_specific('LINUX', True, False) self.mock.system.assert_has_calls( [mock.call('rm -rf \"dir\" >", "True) def test_remove_with_mkdir_error(self): \"\"\"Test remove when mkdir errors.\"\"\" self._test_remove_os_specific('LINUX', True,", "dir.')]) self.assertEqual(0, self.mock.log_error.call_count) self.mock.system.assert_has_calls( [mock.call('rm -rf \"dir\" > /dev/null 2>&1')])", "\"\"\"Tests for remove_empty_files.\"\"\" def setUp(self): # FIXME: Add support for", "if not environment.is_posix(): self.skipTest('Which test is only supported on posix", "command on non-windows.\"\"\" self._test_remove_os_specific('LINUX', True, False) self.mock.system.assert_has_calls( [mock.call('rm -rf \"dir\"", "\"\"\"Tests for get_directory_size.\"\"\" def setUp(self): test_utils.set_up_pyfakefs(self) def test(self): \"\"\"Test get_directory_size.\"\"\"", "OR CONDITIONS OF ANY KIND, either express or implied. #", "the correct interpreters to execute a file are returned.\"\"\" def", "contents='def') self.fs.CreateFile('/test/aa/aa/aa.txt', contents='ghi') self.fs.CreateFile('/test/aa/aa/dd.txt', contents='t') self.assertEqual(shell.get_directory_file_count('/test/aa'), 4) class GetDirectorySizeTest(fake_filesystem_unittest.TestCase): \"\"\"Tests", "non-windows.\"\"\" self._test_remove_os_specific('LINUX', True, False) self.mock.system.assert_has_calls( [mock.call('rm -rf \"dir\" > /dev/null", "is returned for a file that needs one.\"\"\" self.assertEqual('python', shell.get_interpreter('run.py'))", "empty string since this is easier to than None. \"\"\"", "the License is distributed on an \"AS IS\" BASIS, #", "test_ignore_file(self): self.fs.CreateFile('/test/aa/cc.txt', contents='') shell.remove_empty_files('/test/aa/cc.txt') self.assertTrue(os.path.exists('/test/aa/cc.txt')) @mock.patch('os.remove', autospec=True) def test_exception(self, mock_remove):", "fake_fn.assert_has_calls([mock.call('dir/child')]) class GetDirectoryFileCount(fake_filesystem_unittest.TestCase): \"\"\"Tests for get_directory_file_count.\"\"\" def setUp(self): test_utils.set_up_pyfakefs(self) def", "else: self.mock.mkdir.assert_has_calls([]) def test_remove_os_specific_windows(self): \"\"\"Test remove with os-specific command on", "self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) def test_remove_shutil_failure(self): \"\"\"Test remove with shutil but fails.\"\"\"", "> /dev/null 2>&1')]) def test_remove_without_recreate(self): \"\"\"Test remove without recreate.\"\"\" self._test_remove_os_specific('LINUX',", "2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) def test_remove_shutil_failure_ignore_errors(self): self.mock.exists.side_effect = [True, True, True]", "self.assertTrue(os.path.exists('/unrelated')) self.assertEqual(shell.get_directory_file_count('/tmp'), 0) self.assertEqual(shell.get_directory_file_count('/unrelated'), 1) self.assertFalse(os.path.exists('/tmp/aa/bb.txt')) self.assertFalse(os.path.exists('/tmp/cc/dd/ee.txt')) self.assertFalse(os.path.exists('/tmp/ff/gg')) self.assertFalse(os.path.exists('/tmp/hh')) class", "\"\"\"Test remove when mkdir errors.\"\"\" self._test_remove_os_specific('LINUX', True, True) def test_remove_shutil_success(self):", "self.fs.CreateFile('/test/aa/aa/aa.txt', contents='') shell.remove_empty_files('/test') self.assertTrue(os.path.exists('/test/aa/bb.txt')) self.assertTrue(os.path.exists('/test/aa/aa/dd.txt')) self.assertFalse(os.path.exists('/test/aa/cc.txt')) self.assertFalse(os.path.exists('/test/aa/aa/aa.txt')) def test_ignore_file(self): self.fs.CreateFile('/test/aa/cc.txt',", "'/tmp/ii') shell.clear_system_temp_directory() self.assertTrue(os.path.exists('/tmp')) self.assertTrue(os.path.exists('/unrelated')) self.assertEqual(shell.get_directory_file_count('/tmp'), 0) self.assertEqual(shell.get_directory_file_count('/unrelated'), 1) self.assertFalse(os.path.exists('/tmp/aa/bb.txt')) self.assertFalse(os.path.exists('/tmp/cc/dd/ee.txt'))", "that the correct interpreters to execute a file are returned.\"\"\"", "\"\"\"Test that the correct commands to run files are returned.\"\"\"", "self.mock.system.assert_has_calls( [mock.call('rm -rf \"dir\" > /dev/null 2>&1')]) def test_remove_without_recreate(self): \"\"\"Test", "contents='') shell.remove_empty_files('/test/aa/cc.txt') self.assertTrue(os.path.exists('/test/aa/cc.txt')) @mock.patch('os.remove', autospec=True) def test_exception(self, mock_remove): # bypass", "command.\"\"\" self.mock.platform.return_value = platform self.mock.exists.side_effect = [True, False, False] if", "self.assertEqual(shell.get_directory_file_count('/test/aa'), 4) class GetDirectorySizeTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for get_directory_size.\"\"\" def setUp(self): test_utils.set_up_pyfakefs(self)", "[mock.call('Failed to clear directory dir.')]) self.assertEqual(0, self.mock.log_error.call_count) self.mock.system.assert_has_calls( [mock.call('rm -rf", "self.assertTrue(shell.remove_directory('dir')) self.mock.system.assert_has_calls( [mock.call('rm -rf \"dir\" > /dev/null 2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)])", "'system.environment.platform', 'metrics.logs.log_error', 'metrics.logs.log_warn', 'shutil.rmtree', ]) def _test_remove_os_specific(self, platform, recreate, raise_mkdir_error):", "GetDirectorySizeTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for get_directory_size.\"\"\" def setUp(self): test_utils.set_up_pyfakefs(self) def test(self): \"\"\"Test", "with shutil.\"\"\" self.mock.exists.side_effect = [True, True, False] self.assertTrue(shell.remove_directory('dir')) self.mock.system.assert_has_calls( [mock.call('rm", "[True, False, False] if raise_mkdir_error: self.mock.mkdir.side_effect = OSError() result =", "that needs one.\"\"\" self.assertEqual('python', shell.get_interpreter('run.py')) def get_non_interpreter_file_test(self): \"\"\"Test that None", "law or agreed to in writing, software # distributed under", "platform, recreate, raise_mkdir_error): \"\"\"Helper for testing removing dir with os-specific", "correct launch command returned for Java class.\"\"\" script_name = 'javaclassfile.class'", "windows.\"\"\" self._test_remove_os_specific('WINDOWS', True, False) self.mock.system.assert_has_calls([mock.call('rd /s /q \"dir\" > nul", "script_name) def test_java(self): \"\"\"Test correct launch command returned for Java", "# FIXME: Add support for Windows. if not environment.is_posix(): self.skipTest('Which", "works as expected.\"\"\" self.fs.CreateFile('/tmp/aa/bb.txt', contents='abc') self.fs.CreateFile('/tmp/cc/dd/ee.txt', contents='def') self.fs.CreateDirectory('/tmp/ff/gg') self.fs.CreateDirectory('/tmp/hh') self.fs.CreateDirectory('/unrelated')", "shutil but fails.\"\"\" self.mock.exists.side_effect = [True, True, True] self.assertFalse(shell.remove_directory('dir')) self.mock.log_error.assert_has_calls(", "ClearSystemTempDirectoryTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for clear_system_temp_directory.\"\"\" def setUp(self): test_helpers.patch(self, [ 'tempfile.gettempdir', ])", "test_helpers.patch(self, [ 'tempfile.gettempdir', ]) self.mock.gettempdir.return_value = '/tmp' test_utils.set_up_pyfakefs(self) def test(self):", "self.assertFalse(os.path.exists('/tmp/aa/bb.txt')) self.assertFalse(os.path.exists('/tmp/cc/dd/ee.txt')) self.assertFalse(os.path.exists('/tmp/ff/gg')) self.assertFalse(os.path.exists('/tmp/hh')) class GetExecuteCommand(unittest.TestCase): \"\"\"Test that the correct", "returned for a file that needs one.\"\"\" self.assertEqual('python', shell.get_interpreter('run.py')) def", "import unittest from pyfakefs import fake_filesystem_unittest from system import environment", "contents='abc') self.fs.CreateFile('/test/aa/cc.txt', contents='def') self.fs.CreateFile('/test/aa/aa/aa.txt', contents='ghi') self.fs.CreateFile('/test/aa/aa/dd.txt', contents='t') self.assertEqual(shell.get_directory_size('/test/aa'), 10) class", "test_remove_without_recreate(self): \"\"\"Test remove without recreate.\"\"\" self._test_remove_os_specific('LINUX', False, True) def test_remove_with_mkdir_error(self):", "4) class GetDirectorySizeTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for get_directory_size.\"\"\" def setUp(self): test_utils.set_up_pyfakefs(self) def", "\"\"\"Test remove with os-specific command on non-windows.\"\"\" self._test_remove_os_specific('LINUX', True, False)", "\"\"\"Test remove.\"\"\" self.fs.CreateFile('/test/aa/bb.txt', contents='s') self.fs.CreateFile('/test/aa/cc.txt', contents='') self.fs.CreateFile('/test/aa/aa/dd.txt', contents='s') self.fs.CreateFile('/test/aa/aa/aa.txt', contents='')", "are returned.\"\"\" def call_and_assert_helper(self, expected_command, file_to_execute): \"\"\"Call get_execute_command on |file_to_execute|", "tests.test_libs import test_utils class RemoveEmptyFilesTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for remove_empty_files.\"\"\" def setUp(self):", "tests are only applicable for posix platforms.') test_utils.set_up_pyfakefs(self) def test_remove(self):", "mock import os import unittest from pyfakefs import fake_filesystem_unittest from", "from tests.test_libs import helpers as test_helpers from tests.test_libs import test_utils", "may obtain a copy of the License at # #", "are only applicable for posix platforms.') test_utils.set_up_pyfakefs(self) def test_remove(self): \"\"\"Test", "class ClearSystemTempDirectoryTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for clear_system_temp_directory.\"\"\" def setUp(self): test_helpers.patch(self, [ 'tempfile.gettempdir',", "os-specific command on non-windows.\"\"\" self._test_remove_os_specific('LINUX', True, False) self.mock.system.assert_has_calls( [mock.call('rm -rf", "+= '.exe' self.call_and_assert_helper(executable_name, executable_name) class GetInterpreter(object): \"\"\"Test that the correct", "os-specific command on windows.\"\"\" self._test_remove_os_specific('WINDOWS', True, False) self.mock.system.assert_has_calls([mock.call('rd /s /q", "don't want empty string since this is easier to than", "which (shutil.which).\"\"\" def setUp(self): # FIXME: Add support for Windows.", "environment.is_posix(): self.skipTest('Process tests are only applicable for posix platforms.') test_utils.set_up_pyfakefs(self)", "False] if raise_mkdir_error: self.mock.mkdir.side_effect = OSError() result = shell.remove_directory('dir', recreate=recreate)", "\"\"\"Test clear_system_temp_directory works as expected.\"\"\" self.fs.CreateFile('/tmp/aa/bb.txt', contents='abc') self.fs.CreateFile('/tmp/cc/dd/ee.txt', contents='def') self.fs.CreateDirectory('/tmp/ff/gg')", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "self._test_remove_os_specific('LINUX', True, True) def test_remove_shutil_success(self): \"\"\"Test remove with shutil.\"\"\" self.mock.exists.side_effect", "returned for a binary (executable) file.\"\"\" executable_name = 'executable' self.call_and_assert_helper(executable_name,", "\"\"\"Test remove without recreate.\"\"\" self._test_remove_os_specific('LINUX', False, True) def test_remove_with_mkdir_error(self): \"\"\"Test", "self.fs.CreateFile('/test/aa/cc.txt', contents='def') self.fs.CreateFile('/test/aa/aa/aa.txt', contents='ghi') self.fs.CreateFile('/test/aa/aa/dd.txt', contents='t') self.assertEqual(shell.get_directory_size('/test/aa'), 10) class WhichTest(fake_filesystem_unittest.TestCase):", "'os.mkdir', 'os.path.exists', 'os.system', 'system.environment.platform', 'metrics.logs.log_error', 'metrics.logs.log_warn', 'shutil.rmtree', ]) def _test_remove_os_specific(self,", "may not use this file except in compliance with the", "system import environment from system import shell from tests.test_libs import", "for Windows. if not environment.is_posix(): self.skipTest('Which test is only supported", "recreate.\"\"\" self._test_remove_os_specific('LINUX', False, True) def test_remove_with_mkdir_error(self): \"\"\"Test remove when mkdir", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "False] self.assertTrue(shell.remove_directory('dir')) self.mock.system.assert_has_calls( [mock.call('rm -rf \"dir\" > /dev/null 2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir',", "def test_remove_shutil_failure_ignore_errors(self): self.mock.exists.side_effect = [True, True, True] self.assertFalse(shell.remove_directory('dir', ignore_errors=True)) self.mock.log_warn.assert_has_calls(", "this file except in compliance with the License. # You", "Add support for Windows. if not environment.is_posix(): self.skipTest('Process tests are", "equal to |expected_command|.\"\"\" self.assertEqual(expected_command, shell.get_execute_command(file_to_execute)) def test_standard_script(self): \"\"\"Test correct command", "os.symlink('/unrelated', '/tmp/ii') shell.clear_system_temp_directory() self.assertTrue(os.path.exists('/tmp')) self.assertTrue(os.path.exists('/unrelated')) self.assertEqual(shell.get_directory_file_count('/tmp'), 0) self.assertEqual(shell.get_directory_file_count('/unrelated'), 1) self.assertFalse(os.path.exists('/tmp/aa/bb.txt'))", "def setUp(self): test_helpers.patch(self, [ 'os.chmod', 'os.mkdir', 'os.path.exists', 'os.system', 'system.environment.platform', 'metrics.logs.log_error',", "supported on posix platforms.') def test(self): self.assertEqual('/bin/ls', shell.which('ls')) class ClearSystemTempDirectoryTest(fake_filesystem_unittest.TestCase):", "not environment.is_posix(): self.skipTest('Process tests are only applicable for posix platforms.')", "nul 2>&1')]) def test_remove_os_specific_non_windows(self): \"\"\"Test remove with os-specific command on", "environment from system import shell from tests.test_libs import helpers as", "autospec=True) def test_exception(self, mock_remove): # bypass pyfakefs's os.remove. os.remove =", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "# # Licensed under the Apache License, Version 2.0 (the", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "= [True, True, True] self.assertFalse(shell.remove_directory('dir', ignore_errors=True)) self.mock.log_warn.assert_has_calls( [mock.call('Failed to clear", "\"\"\"Tests for get_directory_file_count.\"\"\" def setUp(self): test_utils.set_up_pyfakefs(self) def test(self): \"\"\"Test get_directory_file_count.\"\"\"", "helpers as test_helpers from tests.test_libs import test_utils class RemoveEmptyFilesTest(fake_filesystem_unittest.TestCase): \"\"\"Tests", "removing dir with os-specific command.\"\"\" self.mock.platform.return_value = platform self.mock.exists.side_effect =", "pyfakefs's os.remove. os.remove = mock_remove mock_remove.side_effect = OSError() self.fs.CreateFile('/test/aa/cc.txt', contents='')", "clear_system_temp_directory.\"\"\" def setUp(self): test_helpers.patch(self, [ 'tempfile.gettempdir', ]) self.mock.gettempdir.return_value = '/tmp'", "True] self.assertFalse(shell.remove_directory('dir', ignore_errors=True)) self.mock.log_warn.assert_has_calls( [mock.call('Failed to clear directory dir.')]) self.assertEqual(0,", "permissions and # limitations under the License. \"\"\"shell tests.\"\"\" import", "]) def _test_remove_os_specific(self, platform, recreate, raise_mkdir_error): \"\"\"Helper for testing removing", "\"\"\"Test shutil invoking onerror.\"\"\" self.mock.exists.side_effect = [True, True, False] self.assertTrue(shell.remove_directory('dir'))", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "\"dir\" > /dev/null 2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) def test_remove_shutil_failure_ignore_errors(self): self.mock.exists.side_effect =", "self.assertTrue(os.path.exists('/test/aa/aa/dd.txt')) self.assertFalse(os.path.exists('/test/aa/cc.txt')) self.assertFalse(os.path.exists('/test/aa/aa/aa.txt')) def test_ignore_file(self): self.fs.CreateFile('/test/aa/cc.txt', contents='') shell.remove_empty_files('/test/aa/cc.txt') self.assertTrue(os.path.exists('/test/aa/cc.txt')) @mock.patch('os.remove',", "self.mock.mkdir.assert_has_calls([mock.call('dir')]) else: self.mock.mkdir.assert_has_calls([]) def test_remove_os_specific_windows(self): \"\"\"Test remove with os-specific command", "one. We don't want empty string since this is easier", "a file are returned.\"\"\" def get_interpreted_file_test(self): \"\"\"Test correct interpreter is", "test_binary(self): \"\"\"Test correct launch command returned for a binary (executable)", "self.mock.system.assert_has_calls( [mock.call('rm -rf \"dir\" > /dev/null 2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) onerror", "class RemoveDirectoryTest(unittest.TestCase): \"\"\"Tests for remove_directory.\"\"\" def setUp(self): test_helpers.patch(self, [ 'os.chmod',", "/dev/null 2>&1')]) def test_remove_without_recreate(self): \"\"\"Test remove without recreate.\"\"\" self._test_remove_os_specific('LINUX', False,", "|expected_command|.\"\"\" self.assertEqual(expected_command, shell.get_execute_command(file_to_execute)) def test_standard_script(self): \"\"\"Test correct command returned for", "as test_helpers from tests.test_libs import test_utils class RemoveEmptyFilesTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for", "if recreate: self.assertEqual(not raise_mkdir_error, result) else: self.assertTrue(result) self.mock.rmtree.assert_has_calls([]) if recreate:", "pyfakefs import fake_filesystem_unittest from system import environment from system import", "contents='') self.fs.CreateFile('/test/aa/aa/dd.txt', contents='s') self.fs.CreateFile('/test/aa/aa/aa.txt', contents='') shell.remove_empty_files('/test') self.assertTrue(os.path.exists('/test/aa/bb.txt')) self.assertTrue(os.path.exists('/test/aa/aa/dd.txt')) self.assertFalse(os.path.exists('/test/aa/cc.txt')) self.assertFalse(os.path.exists('/test/aa/aa/aa.txt'))", "contents='ghi') self.fs.CreateFile('/test/aa/aa/dd.txt', contents='t') self.assertEqual(shell.get_directory_file_count('/test/aa'), 4) class GetDirectorySizeTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for get_directory_size.\"\"\"", "self.fs.CreateFile('/test/aa/cc.txt', contents='') shell.remove_empty_files('/test/aa/cc.txt') self.assertTrue(os.path.exists('/test/aa/cc.txt')) @mock.patch('os.remove', autospec=True) def test_exception(self, mock_remove): #", "= 'java javaclassfile' self.call_and_assert_helper(expected_command, script_name) def test_binary(self): \"\"\"Test correct launch", "FIXME: Add support for Windows. if not environment.is_posix(): self.skipTest('Process tests", "test_java(self): \"\"\"Test correct launch command returned for Java class.\"\"\" script_name", "recreate=recreate) if recreate: self.assertEqual(not raise_mkdir_error, result) else: self.assertTrue(result) self.mock.rmtree.assert_has_calls([]) if", "for a binary (executable) file.\"\"\" executable_name = 'executable' self.call_and_assert_helper(executable_name, executable_name)", "def test_ignore_file(self): self.fs.CreateFile('/test/aa/cc.txt', contents='') shell.remove_empty_files('/test/aa/cc.txt') self.assertTrue(os.path.exists('/test/aa/cc.txt')) @mock.patch('os.remove', autospec=True) def test_exception(self,", "launch command returned for Java class.\"\"\" script_name = 'javaclassfile.class' expected_command", "limitations under the License. \"\"\"shell tests.\"\"\" import mock import os", "License. \"\"\"shell tests.\"\"\" import mock import os import unittest from", "for Java class.\"\"\" script_name = 'javaclassfile.class' expected_command = 'java javaclassfile'", "[True, True, True] self.assertFalse(shell.remove_directory('dir')) self.mock.log_error.assert_has_calls( [mock.call('Failed to clear directory dir.')])", "contents='ghi') self.fs.CreateFile('/test/aa/aa/dd.txt', contents='t') self.assertEqual(shell.get_directory_size('/test/aa'), 10) class WhichTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for which", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "contents='s') self.fs.CreateFile('/test/aa/cc.txt', contents='') self.fs.CreateFile('/test/aa/aa/dd.txt', contents='s') self.fs.CreateFile('/test/aa/aa/aa.txt', contents='') shell.remove_empty_files('/test') self.assertTrue(os.path.exists('/test/aa/bb.txt')) self.assertTrue(os.path.exists('/test/aa/aa/dd.txt'))", "shell.remove_directory('dir', recreate=recreate) if recreate: self.assertEqual(not raise_mkdir_error, result) else: self.assertTrue(result) self.mock.rmtree.assert_has_calls([])", "'metrics.logs.log_warn', 'shutil.rmtree', ]) def _test_remove_os_specific(self, platform, recreate, raise_mkdir_error): \"\"\"Helper for", "def setUp(self): test_utils.set_up_pyfakefs(self) def test(self): \"\"\"Test get_directory_size.\"\"\" self.fs.CreateFile('/test/aa/bb.txt', contents='abc') self.fs.CreateFile('/test/aa/cc.txt',", "platform self.mock.exists.side_effect = [True, False, False] if raise_mkdir_error: self.mock.mkdir.side_effect =", "onerror(fake_fn, 'dir/child', ImportError()) self.mock.chmod.assert_has_calls([mock.call('dir/child', 0o750)]) fake_fn.assert_has_calls([mock.call('dir/child')]) class GetDirectoryFileCount(fake_filesystem_unittest.TestCase): \"\"\"Tests for", "get_directory_size.\"\"\" def setUp(self): test_utils.set_up_pyfakefs(self) def test(self): \"\"\"Test get_directory_size.\"\"\" self.fs.CreateFile('/test/aa/bb.txt', contents='abc')", "self.assertTrue(os.path.exists('/test/aa/cc.txt')) class RemoveDirectoryTest(unittest.TestCase): \"\"\"Tests for remove_directory.\"\"\" def setUp(self): test_helpers.patch(self, [", "or implied. # See the License for the specific language", "False) self.mock.system.assert_has_calls([mock.call('rd /s /q \"dir\" > nul 2>&1')]) def test_remove_os_specific_non_windows(self):", "= '/tmp' test_utils.set_up_pyfakefs(self) def test(self): \"\"\"Test clear_system_temp_directory works as expected.\"\"\"", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "on posix platforms.') def test(self): self.assertEqual('/bin/ls', shell.which('ls')) class ClearSystemTempDirectoryTest(fake_filesystem_unittest.TestCase): \"\"\"Tests", "'java javaclassfile' self.call_and_assert_helper(expected_command, script_name) def test_binary(self): \"\"\"Test correct launch command", "self.mock.mkdir.assert_has_calls([]) def test_remove_os_specific_windows(self): \"\"\"Test remove with os-specific command on windows.\"\"\"", "from tests.test_libs import test_utils class RemoveEmptyFilesTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for remove_empty_files.\"\"\" def", "mock_remove.side_effect = OSError() self.fs.CreateFile('/test/aa/cc.txt', contents='') shell.remove_empty_files('/test') self.assertTrue(os.path.exists('/test/aa/cc.txt')) class RemoveDirectoryTest(unittest.TestCase): \"\"\"Tests", "fails.\"\"\" self.mock.exists.side_effect = [True, True, True] self.assertFalse(shell.remove_directory('dir')) self.mock.log_error.assert_has_calls( [mock.call('Failed to", "\"\"\"Test correct interpreter is returned for a file that needs", "None is returned for a file that doesn't need one.", "doesn't need one. We don't want empty string since this", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "result = shell.remove_directory('dir', recreate=recreate) if recreate: self.assertEqual(not raise_mkdir_error, result) else:", "ignore_errors=True)) self.mock.log_warn.assert_has_calls( [mock.call('Failed to clear directory dir.')]) self.assertEqual(0, self.mock.log_error.call_count) self.mock.system.assert_has_calls(", "test_remove_shutil_failure(self): \"\"\"Test remove with shutil but fails.\"\"\" self.mock.exists.side_effect = [True,", "self.fs.CreateFile('/test/aa/cc.txt', contents='def') self.fs.CreateFile('/test/aa/aa/aa.txt', contents='ghi') self.fs.CreateFile('/test/aa/aa/dd.txt', contents='t') self.assertEqual(shell.get_directory_file_count('/test/aa'), 4) class GetDirectorySizeTest(fake_filesystem_unittest.TestCase):", "shell.remove_empty_files('/test/aa/cc.txt') self.assertTrue(os.path.exists('/test/aa/cc.txt')) @mock.patch('os.remove', autospec=True) def test_exception(self, mock_remove): # bypass pyfakefs's", "import os import unittest from pyfakefs import fake_filesystem_unittest from system", "def test(self): \"\"\"Test get_directory_file_count.\"\"\" self.fs.CreateFile('/test/aa/bb.txt', contents='abc') self.fs.CreateFile('/test/aa/cc.txt', contents='def') self.fs.CreateFile('/test/aa/aa/aa.txt', contents='ghi')", "with os-specific command on non-windows.\"\"\" self._test_remove_os_specific('LINUX', True, False) self.mock.system.assert_has_calls( [mock.call('rm", "self.fs.CreateFile('/test/aa/bb.txt', contents='s') self.fs.CreateFile('/test/aa/cc.txt', contents='') self.fs.CreateFile('/test/aa/aa/dd.txt', contents='s') self.fs.CreateFile('/test/aa/aa/aa.txt', contents='') shell.remove_empty_files('/test') self.assertTrue(os.path.exists('/test/aa/bb.txt'))", "want empty string since this is easier to than None.", "os import unittest from pyfakefs import fake_filesystem_unittest from system import", "= self.mock.rmtree.call_args[1]['onerror'] fake_fn = mock.MagicMock() fake_fn.side_effect = OSError() onerror(fake_fn, 'dir/child',", "\"\"\"Test correct command returned for python script.\"\"\" script_name = 'script.py'", "(the \"License\"); # you may not use this file except", "> nul 2>&1')]) def test_remove_os_specific_non_windows(self): \"\"\"Test remove with os-specific command", "clear directory dir.')]) self.assertEqual(0, self.mock.log_error.call_count) self.mock.system.assert_has_calls( [mock.call('rm -rf \"dir\" >", "|file_to_execute| and assert result equal to |expected_command|.\"\"\" self.assertEqual(expected_command, shell.get_execute_command(file_to_execute)) def", "self.fs.CreateFile('/test/aa/aa/aa.txt', contents='ghi') self.fs.CreateFile('/test/aa/aa/dd.txt', contents='t') self.assertEqual(shell.get_directory_file_count('/test/aa'), 4) class GetDirectorySizeTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for", "# you may not use this file except in compliance", "Add support for Windows. if not environment.is_posix(): self.skipTest('Which test is", "\"dir\" > /dev/null 2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) onerror = self.mock.rmtree.call_args[1]['onerror'] fake_fn", "contents='def') self.fs.CreateFile('/test/aa/aa/aa.txt', contents='ghi') self.fs.CreateFile('/test/aa/aa/dd.txt', contents='t') self.assertEqual(shell.get_directory_size('/test/aa'), 10) class WhichTest(fake_filesystem_unittest.TestCase): \"\"\"Tests", "javaclassfile' self.call_and_assert_helper(expected_command, script_name) def test_binary(self): \"\"\"Test correct launch command returned", "'javaclassfile.class' expected_command = 'java javaclassfile' self.call_and_assert_helper(expected_command, script_name) def test_binary(self): \"\"\"Test", "self.mock.exists.side_effect = [True, True, True] self.assertFalse(shell.remove_directory('dir')) self.mock.log_error.assert_has_calls( [mock.call('Failed to clear", "> /dev/null 2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) def test_remove_shutil_failure_ignore_errors(self): self.mock.exists.side_effect = [True,", "on windows.\"\"\" self._test_remove_os_specific('WINDOWS', True, False) self.mock.system.assert_has_calls([mock.call('rd /s /q \"dir\" >", "for testing removing dir with os-specific command.\"\"\" self.mock.platform.return_value = platform", "def test_java(self): \"\"\"Test correct launch command returned for Java class.\"\"\"", "-rf \"dir\" > /dev/null 2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) onerror = self.mock.rmtree.call_args[1]['onerror']", "WhichTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for which (shutil.which).\"\"\" def setUp(self): # FIXME: Add", "not environment.is_posix(): self.skipTest('Which test is only supported on posix platforms.')", "'os.chmod', 'os.mkdir', 'os.path.exists', 'os.system', 'system.environment.platform', 'metrics.logs.log_error', 'metrics.logs.log_warn', 'shutil.rmtree', ]) def", "are returned.\"\"\" def get_interpreted_file_test(self): \"\"\"Test correct interpreter is returned for", "[mock.call('rm -rf \"dir\" > /dev/null 2>&1')]) def test_remove_without_recreate(self): \"\"\"Test remove", "onerror=mock.ANY)]) def test_remove_shutil_failure(self): \"\"\"Test remove with shutil but fails.\"\"\" self.mock.exists.side_effect", "setUp(self): test_helpers.patch(self, [ 'tempfile.gettempdir', ]) self.mock.gettempdir.return_value = '/tmp' test_utils.set_up_pyfakefs(self) def", "10) class WhichTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for which (shutil.which).\"\"\" def setUp(self): #", "environment.is_posix(): self.skipTest('Which test is only supported on posix platforms.') def", "# # Unless required by applicable law or agreed to", "True, False] self.assertTrue(shell.remove_directory('dir')) self.mock.system.assert_has_calls( [mock.call('rm -rf \"dir\" > /dev/null 2>&1')])", "def setUp(self): # FIXME: Add support for Windows. if not", "= 'executable' self.call_and_assert_helper(executable_name, executable_name) executable_name += '.exe' self.call_and_assert_helper(executable_name, executable_name) class", "shell.remove_empty_files('/test') self.assertTrue(os.path.exists('/test/aa/cc.txt')) class RemoveDirectoryTest(unittest.TestCase): \"\"\"Tests for remove_directory.\"\"\" def setUp(self): test_helpers.patch(self,", "correct launch command returned for a binary (executable) file.\"\"\" executable_name", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "returned for python script.\"\"\" script_name = 'script.py' expected_command = 'python", "Version 2.0 (the \"License\"); # you may not use this", "import fake_filesystem_unittest from system import environment from system import shell", "\"dir\" > nul 2>&1')]) def test_remove_os_specific_non_windows(self): \"\"\"Test remove with os-specific", "shutil invoking onerror.\"\"\" self.mock.exists.side_effect = [True, True, False] self.assertTrue(shell.remove_directory('dir')) self.mock.system.assert_has_calls(", "self.skipTest('Which test is only supported on posix platforms.') def test(self):", "remove with shutil.\"\"\" self.mock.exists.side_effect = [True, True, False] self.assertTrue(shell.remove_directory('dir')) self.mock.system.assert_has_calls(", "OSError() onerror(fake_fn, 'dir/child', ImportError()) self.mock.chmod.assert_has_calls([mock.call('dir/child', 0o750)]) fake_fn.assert_has_calls([mock.call('dir/child')]) class GetDirectoryFileCount(fake_filesystem_unittest.TestCase): \"\"\"Tests", "def get_interpreted_file_test(self): \"\"\"Test correct interpreter is returned for a file", "execute a file are returned.\"\"\" def get_interpreted_file_test(self): \"\"\"Test correct interpreter", "False, True) def test_remove_with_mkdir_error(self): \"\"\"Test remove when mkdir errors.\"\"\" self._test_remove_os_specific('LINUX',", "(shutil.which).\"\"\" def setUp(self): # FIXME: Add support for Windows. if", "implied. # See the License for the specific language governing", "self.mock.platform.return_value = platform self.mock.exists.side_effect = [True, False, False] if raise_mkdir_error:", "2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) def test_remove_shutil_onerror(self): \"\"\"Test shutil invoking onerror.\"\"\" self.mock.exists.side_effect", "under the Apache License, Version 2.0 (the \"License\"); # you", "self.fs.CreateFile('/tmp/cc/dd/ee.txt', contents='def') self.fs.CreateDirectory('/tmp/ff/gg') self.fs.CreateDirectory('/tmp/hh') self.fs.CreateDirectory('/unrelated') self.fs.CreateFile('/unrelated/zz.txt', contents='zzz') os.symlink('/unrelated/zz.txt', '/tmp/hh/gg.txt') os.symlink('/unrelated',", "2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) onerror = self.mock.rmtree.call_args[1]['onerror'] fake_fn = mock.MagicMock() fake_fn.side_effect", "def test_remove_os_specific_windows(self): \"\"\"Test remove with os-specific command on windows.\"\"\" self._test_remove_os_specific('WINDOWS',", "bypass pyfakefs's os.remove. os.remove = mock_remove mock_remove.side_effect = OSError() self.fs.CreateFile('/test/aa/cc.txt',", "file are returned.\"\"\" def get_interpreted_file_test(self): \"\"\"Test correct interpreter is returned", "by applicable law or agreed to in writing, software #", "[True, True, True] self.assertFalse(shell.remove_directory('dir', ignore_errors=True)) self.mock.log_warn.assert_has_calls( [mock.call('Failed to clear directory", "executable_name) class GetInterpreter(object): \"\"\"Test that the correct interpreters to execute", "'/tmp/hh/gg.txt') os.symlink('/unrelated', '/tmp/ii') shell.clear_system_temp_directory() self.assertTrue(os.path.exists('/tmp')) self.assertTrue(os.path.exists('/unrelated')) self.assertEqual(shell.get_directory_file_count('/tmp'), 0) self.assertEqual(shell.get_directory_file_count('/unrelated'), 1)", "platforms.') def test(self): self.assertEqual('/bin/ls', shell.which('ls')) class ClearSystemTempDirectoryTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for clear_system_temp_directory.\"\"\"", "contents='s') self.fs.CreateFile('/test/aa/aa/aa.txt', contents='') shell.remove_empty_files('/test') self.assertTrue(os.path.exists('/test/aa/bb.txt')) self.assertTrue(os.path.exists('/test/aa/aa/dd.txt')) self.assertFalse(os.path.exists('/test/aa/cc.txt')) self.assertFalse(os.path.exists('/test/aa/aa/aa.txt')) def test_ignore_file(self):", "from system import shell from tests.test_libs import helpers as test_helpers", "= OSError() onerror(fake_fn, 'dir/child', ImportError()) self.mock.chmod.assert_has_calls([mock.call('dir/child', 0o750)]) fake_fn.assert_has_calls([mock.call('dir/child')]) class GetDirectoryFileCount(fake_filesystem_unittest.TestCase):", "errors.\"\"\" self._test_remove_os_specific('LINUX', True, True) def test_remove_shutil_success(self): \"\"\"Test remove with shutil.\"\"\"", "support for Windows. if not environment.is_posix(): self.skipTest('Which test is only", "class GetInterpreter(object): \"\"\"Test that the correct interpreters to execute a", "command returned for python script.\"\"\" script_name = 'script.py' expected_command =", "= OSError() result = shell.remove_directory('dir', recreate=recreate) if recreate: self.assertEqual(not raise_mkdir_error,", "file that needs one.\"\"\" self.assertEqual('python', shell.get_interpreter('run.py')) def get_non_interpreter_file_test(self): \"\"\"Test that", "self.mock.log_warn.assert_has_calls( [mock.call('Failed to clear directory dir.')]) self.assertEqual(0, self.mock.log_error.call_count) self.mock.system.assert_has_calls( [mock.call('rm", "interpreter is returned for a file that needs one.\"\"\" self.assertEqual('python',", "= mock_remove mock_remove.side_effect = OSError() self.fs.CreateFile('/test/aa/cc.txt', contents='') shell.remove_empty_files('/test') self.assertTrue(os.path.exists('/test/aa/cc.txt')) class", "False) self.mock.system.assert_has_calls( [mock.call('rm -rf \"dir\" > /dev/null 2>&1')]) def test_remove_without_recreate(self):", "returned.\"\"\" def get_interpreted_file_test(self): \"\"\"Test correct interpreter is returned for a", "self.assertFalse(os.path.exists('/tmp/ff/gg')) self.assertFalse(os.path.exists('/tmp/hh')) class GetExecuteCommand(unittest.TestCase): \"\"\"Test that the correct commands to", "2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) def test_remove_shutil_failure(self): \"\"\"Test remove with shutil but", "self.mock.log_error.call_count) self.mock.system.assert_has_calls( [mock.call('rm -rf \"dir\" > /dev/null 2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)])", "[ 'tempfile.gettempdir', ]) self.mock.gettempdir.return_value = '/tmp' test_utils.set_up_pyfakefs(self) def test(self): \"\"\"Test", "Copyright 2019 Google LLC # # Licensed under the Apache", "test(self): self.assertEqual('/bin/ls', shell.which('ls')) class ClearSystemTempDirectoryTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for clear_system_temp_directory.\"\"\" def setUp(self):", "is only supported on posix platforms.') def test(self): self.assertEqual('/bin/ls', shell.which('ls'))", "run files are returned.\"\"\" def call_and_assert_helper(self, expected_command, file_to_execute): \"\"\"Call get_execute_command", "recreate: self.assertEqual(not raise_mkdir_error, result) else: self.assertTrue(result) self.mock.rmtree.assert_has_calls([]) if recreate: self.mock.mkdir.assert_has_calls([mock.call('dir')])", "testing removing dir with os-specific command.\"\"\" self.mock.platform.return_value = platform self.mock.exists.side_effect", "# FIXME: Add support for Windows. if not environment.is_posix(): self.skipTest('Process", "self.fs.CreateDirectory('/tmp/hh') self.fs.CreateDirectory('/unrelated') self.fs.CreateFile('/unrelated/zz.txt', contents='zzz') os.symlink('/unrelated/zz.txt', '/tmp/hh/gg.txt') os.symlink('/unrelated', '/tmp/ii') shell.clear_system_temp_directory() self.assertTrue(os.path.exists('/tmp'))", "interpreters to execute a file are returned.\"\"\" def get_interpreted_file_test(self): \"\"\"Test", "remove.\"\"\" self.fs.CreateFile('/test/aa/bb.txt', contents='s') self.fs.CreateFile('/test/aa/cc.txt', contents='') self.fs.CreateFile('/test/aa/aa/dd.txt', contents='s') self.fs.CreateFile('/test/aa/aa/aa.txt', contents='') shell.remove_empty_files('/test')", "that None is returned for a file that doesn't need", "self.mock.rmtree.assert_has_calls([]) if recreate: self.mock.mkdir.assert_has_calls([mock.call('dir')]) else: self.mock.mkdir.assert_has_calls([]) def test_remove_os_specific_windows(self): \"\"\"Test remove", "a file that needs one.\"\"\" self.assertEqual('python', shell.get_interpreter('run.py')) def get_non_interpreter_file_test(self): \"\"\"Test", "from pyfakefs import fake_filesystem_unittest from system import environment from system", "applicable for posix platforms.') test_utils.set_up_pyfakefs(self) def test_remove(self): \"\"\"Test remove.\"\"\" self.fs.CreateFile('/test/aa/bb.txt',", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "Unless required by applicable law or agreed to in writing,", "self.mock.rmtree.call_args[1]['onerror'] fake_fn = mock.MagicMock() fake_fn.side_effect = OSError() onerror(fake_fn, 'dir/child', ImportError())", "shell.clear_system_temp_directory() self.assertTrue(os.path.exists('/tmp')) self.assertTrue(os.path.exists('/unrelated')) self.assertEqual(shell.get_directory_file_count('/tmp'), 0) self.assertEqual(shell.get_directory_file_count('/unrelated'), 1) self.assertFalse(os.path.exists('/tmp/aa/bb.txt')) self.assertFalse(os.path.exists('/tmp/cc/dd/ee.txt')) self.assertFalse(os.path.exists('/tmp/ff/gg'))", "the specific language governing permissions and # limitations under the", "def get_non_interpreter_file_test(self): \"\"\"Test that None is returned for a file", "system import shell from tests.test_libs import helpers as test_helpers from", "with shutil but fails.\"\"\" self.mock.exists.side_effect = [True, True, True] self.assertFalse(shell.remove_directory('dir'))", "Java class.\"\"\" script_name = 'javaclassfile.class' expected_command = 'java javaclassfile' self.call_and_assert_helper(expected_command,", "shutil.\"\"\" self.mock.exists.side_effect = [True, True, False] self.assertTrue(shell.remove_directory('dir')) self.mock.system.assert_has_calls( [mock.call('rm -rf", "applicable law or agreed to in writing, software # distributed", "expected.\"\"\" self.fs.CreateFile('/tmp/aa/bb.txt', contents='abc') self.fs.CreateFile('/tmp/cc/dd/ee.txt', contents='def') self.fs.CreateDirectory('/tmp/ff/gg') self.fs.CreateDirectory('/tmp/hh') self.fs.CreateDirectory('/unrelated') self.fs.CreateFile('/unrelated/zz.txt', contents='zzz')", "'executable' self.call_and_assert_helper(executable_name, executable_name) executable_name += '.exe' self.call_and_assert_helper(executable_name, executable_name) class GetInterpreter(object):", "correct commands to run files are returned.\"\"\" def call_and_assert_helper(self, expected_command,", "for a file that doesn't need one. We don't want", "test_utils.set_up_pyfakefs(self) def test(self): \"\"\"Test get_directory_size.\"\"\" self.fs.CreateFile('/test/aa/bb.txt', contents='abc') self.fs.CreateFile('/test/aa/cc.txt', contents='def') self.fs.CreateFile('/test/aa/aa/aa.txt',", "test(self): \"\"\"Test clear_system_temp_directory works as expected.\"\"\" self.fs.CreateFile('/tmp/aa/bb.txt', contents='abc') self.fs.CreateFile('/tmp/cc/dd/ee.txt', contents='def')", "contents='def') self.fs.CreateDirectory('/tmp/ff/gg') self.fs.CreateDirectory('/tmp/hh') self.fs.CreateDirectory('/unrelated') self.fs.CreateFile('/unrelated/zz.txt', contents='zzz') os.symlink('/unrelated/zz.txt', '/tmp/hh/gg.txt') os.symlink('/unrelated', '/tmp/ii')", "raise_mkdir_error): \"\"\"Helper for testing removing dir with os-specific command.\"\"\" self.mock.platform.return_value", "if not environment.is_posix(): self.skipTest('Process tests are only applicable for posix", "]) self.mock.gettempdir.return_value = '/tmp' test_utils.set_up_pyfakefs(self) def test(self): \"\"\"Test clear_system_temp_directory works", "clear_system_temp_directory works as expected.\"\"\" self.fs.CreateFile('/tmp/aa/bb.txt', contents='abc') self.fs.CreateFile('/tmp/cc/dd/ee.txt', contents='def') self.fs.CreateDirectory('/tmp/ff/gg') self.fs.CreateDirectory('/tmp/hh')", "\"\"\"Test that the correct interpreters to execute a file are", "Google LLC # # Licensed under the Apache License, Version", "tests.\"\"\" import mock import os import unittest from pyfakefs import", "in writing, software # distributed under the License is distributed", "raise_mkdir_error, result) else: self.assertTrue(result) self.mock.rmtree.assert_has_calls([]) if recreate: self.mock.mkdir.assert_has_calls([mock.call('dir')]) else: self.mock.mkdir.assert_has_calls([])", "= [True, False, False] if raise_mkdir_error: self.mock.mkdir.side_effect = OSError() result", "/dev/null 2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) def test_remove_shutil_failure(self): \"\"\"Test remove with shutil", "language governing permissions and # limitations under the License. \"\"\"shell", "= OSError() self.fs.CreateFile('/test/aa/cc.txt', contents='') shell.remove_empty_files('/test') self.assertTrue(os.path.exists('/test/aa/cc.txt')) class RemoveDirectoryTest(unittest.TestCase): \"\"\"Tests for", "mock.MagicMock() fake_fn.side_effect = OSError() onerror(fake_fn, 'dir/child', ImportError()) self.mock.chmod.assert_has_calls([mock.call('dir/child', 0o750)]) fake_fn.assert_has_calls([mock.call('dir/child')])", "self.assertEqual(shell.get_directory_size('/test/aa'), 10) class WhichTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for which (shutil.which).\"\"\" def setUp(self):", "to run files are returned.\"\"\" def call_and_assert_helper(self, expected_command, file_to_execute): \"\"\"Call", "self.fs.CreateDirectory('/unrelated') self.fs.CreateFile('/unrelated/zz.txt', contents='zzz') os.symlink('/unrelated/zz.txt', '/tmp/hh/gg.txt') os.symlink('/unrelated', '/tmp/ii') shell.clear_system_temp_directory() self.assertTrue(os.path.exists('/tmp')) self.assertTrue(os.path.exists('/unrelated'))", "self.fs.CreateFile('/test/aa/aa/dd.txt', contents='s') self.fs.CreateFile('/test/aa/aa/aa.txt', contents='') shell.remove_empty_files('/test') self.assertTrue(os.path.exists('/test/aa/bb.txt')) self.assertTrue(os.path.exists('/test/aa/aa/dd.txt')) self.assertFalse(os.path.exists('/test/aa/cc.txt')) self.assertFalse(os.path.exists('/test/aa/aa/aa.txt')) def", "returned.\"\"\" def call_and_assert_helper(self, expected_command, file_to_execute): \"\"\"Call get_execute_command on |file_to_execute| and", "self.mock.system.assert_has_calls( [mock.call('rm -rf \"dir\" > /dev/null 2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) def", "def test_standard_script(self): \"\"\"Test correct command returned for python script.\"\"\" script_name", "[ 'os.chmod', 'os.mkdir', 'os.path.exists', 'os.system', 'system.environment.platform', 'metrics.logs.log_error', 'metrics.logs.log_warn', 'shutil.rmtree', ])", "\"\"\"Test remove with shutil but fails.\"\"\" self.mock.exists.side_effect = [True, True,", "command returned for Java class.\"\"\" script_name = 'javaclassfile.class' expected_command =", "True, False) self.mock.system.assert_has_calls([mock.call('rd /s /q \"dir\" > nul 2>&1')]) def", "that the correct commands to run files are returned.\"\"\" def", "support for Windows. if not environment.is_posix(): self.skipTest('Process tests are only", "raise_mkdir_error: self.mock.mkdir.side_effect = OSError() result = shell.remove_directory('dir', recreate=recreate) if recreate:", "and assert result equal to |expected_command|.\"\"\" self.assertEqual(expected_command, shell.get_execute_command(file_to_execute)) def test_standard_script(self):", "test_helpers from tests.test_libs import test_utils class RemoveEmptyFilesTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for remove_empty_files.\"\"\"", "is returned for a file that doesn't need one. We", "onerror=mock.ANY)]) def test_remove_shutil_onerror(self): \"\"\"Test shutil invoking onerror.\"\"\" self.mock.exists.side_effect = [True,", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "self.assertTrue(os.path.exists('/test/aa/cc.txt')) @mock.patch('os.remove', autospec=True) def test_exception(self, mock_remove): # bypass pyfakefs's os.remove.", "'tempfile.gettempdir', ]) self.mock.gettempdir.return_value = '/tmp' test_utils.set_up_pyfakefs(self) def test(self): \"\"\"Test clear_system_temp_directory", "self.assertEqual(0, self.mock.log_warn.call_count) self.mock.system.assert_has_calls( [mock.call('rm -rf \"dir\" > /dev/null 2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir',", "get_directory_file_count.\"\"\" self.fs.CreateFile('/test/aa/bb.txt', contents='abc') self.fs.CreateFile('/test/aa/cc.txt', contents='def') self.fs.CreateFile('/test/aa/aa/aa.txt', contents='ghi') self.fs.CreateFile('/test/aa/aa/dd.txt', contents='t') self.assertEqual(shell.get_directory_file_count('/test/aa'),", "License, Version 2.0 (the \"License\"); # you may not use", "# bypass pyfakefs's os.remove. os.remove = mock_remove mock_remove.side_effect = OSError()", "onerror=mock.ANY)]) onerror = self.mock.rmtree.call_args[1]['onerror'] fake_fn = mock.MagicMock() fake_fn.side_effect = OSError()", "# You may obtain a copy of the License at", "FIXME: Add support for Windows. if not environment.is_posix(): self.skipTest('Which test", "\"\"\"shell tests.\"\"\" import mock import os import unittest from pyfakefs", "\"dir\" > /dev/null 2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) def test_remove_shutil_onerror(self): \"\"\"Test shutil", "one.\"\"\" self.assertEqual('python', shell.get_interpreter('run.py')) def get_non_interpreter_file_test(self): \"\"\"Test that None is returned", "shell.get_execute_command(file_to_execute)) def test_standard_script(self): \"\"\"Test correct command returned for python script.\"\"\"", "class.\"\"\" script_name = 'javaclassfile.class' expected_command = 'java javaclassfile' self.call_and_assert_helper(expected_command, script_name)", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "OSError() self.fs.CreateFile('/test/aa/cc.txt', contents='') shell.remove_empty_files('/test') self.assertTrue(os.path.exists('/test/aa/cc.txt')) class RemoveDirectoryTest(unittest.TestCase): \"\"\"Tests for remove_directory.\"\"\"", "\"\"\"Tests for clear_system_temp_directory.\"\"\" def setUp(self): test_helpers.patch(self, [ 'tempfile.gettempdir', ]) self.mock.gettempdir.return_value", "self.fs.CreateDirectory('/tmp/ff/gg') self.fs.CreateDirectory('/tmp/hh') self.fs.CreateDirectory('/unrelated') self.fs.CreateFile('/unrelated/zz.txt', contents='zzz') os.symlink('/unrelated/zz.txt', '/tmp/hh/gg.txt') os.symlink('/unrelated', '/tmp/ii') shell.clear_system_temp_directory()", "(executable) file.\"\"\" executable_name = 'executable' self.call_and_assert_helper(executable_name, executable_name) executable_name += '.exe'", "test_exception(self, mock_remove): # bypass pyfakefs's os.remove. os.remove = mock_remove mock_remove.side_effect", "file that doesn't need one. We don't want empty string", "'/tmp' test_utils.set_up_pyfakefs(self) def test(self): \"\"\"Test clear_system_temp_directory works as expected.\"\"\" self.fs.CreateFile('/tmp/aa/bb.txt',", "string since this is easier to than None. \"\"\" self.assertIsNone(shell.get_interpreter('executable'))", "dir.')]) self.assertEqual(0, self.mock.log_warn.call_count) self.mock.system.assert_has_calls( [mock.call('rm -rf \"dir\" > /dev/null 2>&1')])", "setUp(self): test_utils.set_up_pyfakefs(self) def test(self): \"\"\"Test get_directory_file_count.\"\"\" self.fs.CreateFile('/test/aa/bb.txt', contents='abc') self.fs.CreateFile('/test/aa/cc.txt', contents='def')", "tests.test_libs import helpers as test_helpers from tests.test_libs import test_utils class", "self.assertEqual(shell.get_directory_file_count('/unrelated'), 1) self.assertFalse(os.path.exists('/tmp/aa/bb.txt')) self.assertFalse(os.path.exists('/tmp/cc/dd/ee.txt')) self.assertFalse(os.path.exists('/tmp/ff/gg')) self.assertFalse(os.path.exists('/tmp/hh')) class GetExecuteCommand(unittest.TestCase): \"\"\"Test that", "the License for the specific language governing permissions and #", "the License. \"\"\"shell tests.\"\"\" import mock import os import unittest", "% script_name self.call_and_assert_helper(expected_command, script_name) def test_java(self): \"\"\"Test correct launch command", "Apache License, Version 2.0 (the \"License\"); # you may not", "'.exe' self.call_and_assert_helper(executable_name, executable_name) class GetInterpreter(object): \"\"\"Test that the correct interpreters", "under the License. \"\"\"shell tests.\"\"\" import mock import os import", "either express or implied. # See the License for the", "> /dev/null 2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) def test_remove_shutil_failure(self): \"\"\"Test remove with", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "result) else: self.assertTrue(result) self.mock.rmtree.assert_has_calls([]) if recreate: self.mock.mkdir.assert_has_calls([mock.call('dir')]) else: self.mock.mkdir.assert_has_calls([]) def", "self._test_remove_os_specific('WINDOWS', True, False) self.mock.system.assert_has_calls([mock.call('rd /s /q \"dir\" > nul 2>&1')])", "onerror=mock.ANY)]) def test_remove_shutil_failure_ignore_errors(self): self.mock.exists.side_effect = [True, True, True] self.assertFalse(shell.remove_directory('dir', ignore_errors=True))", "python script.\"\"\" script_name = 'script.py' expected_command = 'python %s' %", "2>&1')]) def test_remove_os_specific_non_windows(self): \"\"\"Test remove with os-specific command on non-windows.\"\"\"", "test_remove_shutil_onerror(self): \"\"\"Test shutil invoking onerror.\"\"\" self.mock.exists.side_effect = [True, True, False]", "def test_exception(self, mock_remove): # bypass pyfakefs's os.remove. os.remove = mock_remove", "setUp(self): # FIXME: Add support for Windows. if not environment.is_posix():", "test_remove_with_mkdir_error(self): \"\"\"Test remove when mkdir errors.\"\"\" self._test_remove_os_specific('LINUX', True, True) def", "only supported on posix platforms.') def test(self): self.assertEqual('/bin/ls', shell.which('ls')) class", "RemoveDirectoryTest(unittest.TestCase): \"\"\"Tests for remove_directory.\"\"\" def setUp(self): test_helpers.patch(self, [ 'os.chmod', 'os.mkdir',", "the correct commands to run files are returned.\"\"\" def call_and_assert_helper(self,", "correct command returned for python script.\"\"\" script_name = 'script.py' expected_command", "test_remove_shutil_success(self): \"\"\"Test remove with shutil.\"\"\" self.mock.exists.side_effect = [True, True, False]", "import shell from tests.test_libs import helpers as test_helpers from tests.test_libs", "binary (executable) file.\"\"\" executable_name = 'executable' self.call_and_assert_helper(executable_name, executable_name) executable_name +=", "self.assertEqual(expected_command, shell.get_execute_command(file_to_execute)) def test_standard_script(self): \"\"\"Test correct command returned for python", "script_name self.call_and_assert_helper(expected_command, script_name) def test_java(self): \"\"\"Test correct launch command returned", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "needs one.\"\"\" self.assertEqual('python', shell.get_interpreter('run.py')) def get_non_interpreter_file_test(self): \"\"\"Test that None is", "need one. We don't want empty string since this is", "[mock.call('rm -rf \"dir\" > /dev/null 2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) onerror =", "directory dir.')]) self.assertEqual(0, self.mock.log_warn.call_count) self.mock.system.assert_has_calls( [mock.call('rm -rf \"dir\" > /dev/null", "self.assertEqual(0, self.mock.log_error.call_count) self.mock.system.assert_has_calls( [mock.call('rm -rf \"dir\" > /dev/null 2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir',", "1) self.assertFalse(os.path.exists('/tmp/aa/bb.txt')) self.assertFalse(os.path.exists('/tmp/cc/dd/ee.txt')) self.assertFalse(os.path.exists('/tmp/ff/gg')) self.assertFalse(os.path.exists('/tmp/hh')) class GetExecuteCommand(unittest.TestCase): \"\"\"Test that the", "with os-specific command on windows.\"\"\" self._test_remove_os_specific('WINDOWS', True, False) self.mock.system.assert_has_calls([mock.call('rd /s", "script_name) def test_binary(self): \"\"\"Test correct launch command returned for a", "def setUp(self): test_helpers.patch(self, [ 'tempfile.gettempdir', ]) self.mock.gettempdir.return_value = '/tmp' test_utils.set_up_pyfakefs(self)", "-rf \"dir\" > /dev/null 2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) def test_remove_shutil_failure_ignore_errors(self): self.mock.exists.side_effect", "\"\"\"Helper for testing removing dir with os-specific command.\"\"\" self.mock.platform.return_value =", "get_execute_command on |file_to_execute| and assert result equal to |expected_command|.\"\"\" self.assertEqual(expected_command,", "def test_remove_with_mkdir_error(self): \"\"\"Test remove when mkdir errors.\"\"\" self._test_remove_os_specific('LINUX', True, True)", "when mkdir errors.\"\"\" self._test_remove_os_specific('LINUX', True, True) def test_remove_shutil_success(self): \"\"\"Test remove", "os.remove. os.remove = mock_remove mock_remove.side_effect = OSError() self.fs.CreateFile('/test/aa/cc.txt', contents='') shell.remove_empty_files('/test')", "for posix platforms.') test_utils.set_up_pyfakefs(self) def test_remove(self): \"\"\"Test remove.\"\"\" self.fs.CreateFile('/test/aa/bb.txt', contents='s')", "self.call_and_assert_helper(executable_name, executable_name) class GetInterpreter(object): \"\"\"Test that the correct interpreters to", "expected_command = 'python %s' % script_name self.call_and_assert_helper(expected_command, script_name) def test_java(self):", "\"License\"); # you may not use this file except in", "governing permissions and # limitations under the License. \"\"\"shell tests.\"\"\"", "self.assertFalse(os.path.exists('/test/aa/aa/aa.txt')) def test_ignore_file(self): self.fs.CreateFile('/test/aa/cc.txt', contents='') shell.remove_empty_files('/test/aa/cc.txt') self.assertTrue(os.path.exists('/test/aa/cc.txt')) @mock.patch('os.remove', autospec=True) def", "call_and_assert_helper(self, expected_command, file_to_execute): \"\"\"Call get_execute_command on |file_to_execute| and assert result", "and # limitations under the License. \"\"\"shell tests.\"\"\" import mock", "self.assertFalse(os.path.exists('/test/aa/cc.txt')) self.assertFalse(os.path.exists('/test/aa/aa/aa.txt')) def test_ignore_file(self): self.fs.CreateFile('/test/aa/cc.txt', contents='') shell.remove_empty_files('/test/aa/cc.txt') self.assertTrue(os.path.exists('/test/aa/cc.txt')) @mock.patch('os.remove', autospec=True)", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "contents='zzz') os.symlink('/unrelated/zz.txt', '/tmp/hh/gg.txt') os.symlink('/unrelated', '/tmp/ii') shell.clear_system_temp_directory() self.assertTrue(os.path.exists('/tmp')) self.assertTrue(os.path.exists('/unrelated')) self.assertEqual(shell.get_directory_file_count('/tmp'), 0)", "import test_utils class RemoveEmptyFilesTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for remove_empty_files.\"\"\" def setUp(self): #", "# Copyright 2019 Google LLC # # Licensed under the", "for get_directory_file_count.\"\"\" def setUp(self): test_utils.set_up_pyfakefs(self) def test(self): \"\"\"Test get_directory_file_count.\"\"\" self.fs.CreateFile('/test/aa/bb.txt',", "\"\"\"Test remove with shutil.\"\"\" self.mock.exists.side_effect = [True, True, False] self.assertTrue(shell.remove_directory('dir'))", "0o750)]) fake_fn.assert_has_calls([mock.call('dir/child')]) class GetDirectoryFileCount(fake_filesystem_unittest.TestCase): \"\"\"Tests for get_directory_file_count.\"\"\" def setUp(self): test_utils.set_up_pyfakefs(self)", "# distributed under the License is distributed on an \"AS", "only applicable for posix platforms.') test_utils.set_up_pyfakefs(self) def test_remove(self): \"\"\"Test remove.\"\"\"", "# Unless required by applicable law or agreed to in", "shell from tests.test_libs import helpers as test_helpers from tests.test_libs import", "'dir/child', ImportError()) self.mock.chmod.assert_has_calls([mock.call('dir/child', 0o750)]) fake_fn.assert_has_calls([mock.call('dir/child')]) class GetDirectoryFileCount(fake_filesystem_unittest.TestCase): \"\"\"Tests for get_directory_file_count.\"\"\"", "= 'python %s' % script_name self.call_and_assert_helper(expected_command, script_name) def test_java(self): \"\"\"Test", "os.symlink('/unrelated/zz.txt', '/tmp/hh/gg.txt') os.symlink('/unrelated', '/tmp/ii') shell.clear_system_temp_directory() self.assertTrue(os.path.exists('/tmp')) self.assertTrue(os.path.exists('/unrelated')) self.assertEqual(shell.get_directory_file_count('/tmp'), 0) self.assertEqual(shell.get_directory_file_count('/unrelated'),", "test_utils class RemoveEmptyFilesTest(fake_filesystem_unittest.TestCase): \"\"\"Tests for remove_empty_files.\"\"\" def setUp(self): # FIXME:", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "for Windows. if not environment.is_posix(): self.skipTest('Process tests are only applicable", "platforms.') test_utils.set_up_pyfakefs(self) def test_remove(self): \"\"\"Test remove.\"\"\" self.fs.CreateFile('/test/aa/bb.txt', contents='s') self.fs.CreateFile('/test/aa/cc.txt', contents='')", "self._test_remove_os_specific('LINUX', False, True) def test_remove_with_mkdir_error(self): \"\"\"Test remove when mkdir errors.\"\"\"", "\"dir\" > /dev/null 2>&1')]) self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)]) def test_remove_shutil_failure(self): \"\"\"Test remove", "onerror.\"\"\" self.mock.exists.side_effect = [True, True, False] self.assertTrue(shell.remove_directory('dir')) self.mock.system.assert_has_calls( [mock.call('rm -rf", "for remove_directory.\"\"\" def setUp(self): test_helpers.patch(self, [ 'os.chmod', 'os.mkdir', 'os.path.exists', 'os.system',", "You may obtain a copy of the License at #", "os.remove = mock_remove mock_remove.side_effect = OSError() self.fs.CreateFile('/test/aa/cc.txt', contents='') shell.remove_empty_files('/test') self.assertTrue(os.path.exists('/test/aa/cc.txt'))", "def test(self): \"\"\"Test get_directory_size.\"\"\" self.fs.CreateFile('/test/aa/bb.txt', contents='abc') self.fs.CreateFile('/test/aa/cc.txt', contents='def') self.fs.CreateFile('/test/aa/aa/aa.txt', contents='ghi')", "\"\"\"Test get_directory_size.\"\"\" self.fs.CreateFile('/test/aa/bb.txt', contents='abc') self.fs.CreateFile('/test/aa/cc.txt', contents='def') self.fs.CreateFile('/test/aa/aa/aa.txt', contents='ghi') self.fs.CreateFile('/test/aa/aa/dd.txt', contents='t')", "True, True) def test_remove_shutil_success(self): \"\"\"Test remove with shutil.\"\"\" self.mock.exists.side_effect =", "'os.system', 'system.environment.platform', 'metrics.logs.log_error', 'metrics.logs.log_warn', 'shutil.rmtree', ]) def _test_remove_os_specific(self, platform, recreate,", "= mock.MagicMock() fake_fn.side_effect = OSError() onerror(fake_fn, 'dir/child', ImportError()) self.mock.chmod.assert_has_calls([mock.call('dir/child', 0o750)])", "the Apache License, Version 2.0 (the \"License\"); # you may", "import environment from system import shell from tests.test_libs import helpers", "OSError() result = shell.remove_directory('dir', recreate=recreate) if recreate: self.assertEqual(not raise_mkdir_error, result)", "if raise_mkdir_error: self.mock.mkdir.side_effect = OSError() result = shell.remove_directory('dir', recreate=recreate) if", "get_directory_file_count.\"\"\" def setUp(self): test_utils.set_up_pyfakefs(self) def test(self): \"\"\"Test get_directory_file_count.\"\"\" self.fs.CreateFile('/test/aa/bb.txt', contents='abc')", "remove with os-specific command on windows.\"\"\" self._test_remove_os_specific('WINDOWS', True, False) self.mock.system.assert_has_calls([mock.call('rd" ]
[ "False: return out3 else: out4 = self.Layers[3](out3) return out4 def", "embedding_dim self.embeddings = tf.keras.layers.Embedding(self.vocab_size, self.embedding_dim, weights = [embedding_matrix], trainable=False, name='embeddings')", "out1 = self.Layers[0](inp) out2 = self.Layers[1]([inp, out1]) out3 = self.Layers[2](out2)", "self.Layers[2](out2) if predict is False: return out3 else: out4 =", "[tf.keras.layers.Bidirectional(tf.keras.layers.GRU(self.units, return_sequences=True, return_state=True, recurrent_initializer='glorot_uniform', name='birnn')), tf.keras.layers.Dense(self.projection_units, name='projection')] def call(self, inp):", "_ = self.Layers[0](inp) out = self.Layers[1](out) return out class BiRNN(tf.keras.Model):", "__init__(self, units: int,projection_units: int,max_seq_length: int, vocab_size: int,embedding_dim: int,embedding_matrix = None):", "= units self.projection_units = projection_units self.Layers = [tf.keras.layers.Bidirectional(tf.keras.layers.GRU(self.units, return_sequences=True, return_state=True,", "self.projection_units = projection_units self.Layers = [tf.keras.layers.Bidirectional(tf.keras.layers.GRU(self.units, return_sequences=True, return_state=True, recurrent_initializer='glorot_uniform', name='birnn')),", "= np.zeros(seq.shape) mask[:,:t] = 1 inp = tf.math.multiply(seq, mask) mask[:,:t+1]", "class BiRNN(tf.keras.Model): def __init__(self, units: int,projection_units: int,max_seq_length: int, vocab_size: int,embedding_dim:", "self.max_seq_length = max_seq_length self.vocab_size = vocab_size self.embedding_dim = embedding_dim self.embeddings", "self).__init__() self.units = units self.projection_units = projection_units self.Layers = [tf.keras.layers.Bidirectional(tf.keras.layers.GRU(self.units,", "recurrent_initializer='glorot_uniform', name='birnn')), tf.keras.layers.Dense(self.projection_units, name='projection')] def call(self, inp): out, _, _", "tf.keras.layers.Dense(self.projection_units, name='projection')] def call(self, inp): out, _, _ = self.Layers[0](inp)", "= self.Layers[1]([inp, out1]) out3 = self.Layers[2](out2) if predict is False:", "self.units = units self.projection_units = projection_units self.Layers = [tf.keras.layers.Bidirectional(tf.keras.layers.GRU(self.units, return_sequences=True,", "inp): out, _, _ = self.Layers[0](inp) out = self.Layers[1](out) return", "call(self, inp, predict=False): inp = self.embeddings(inp) out1 = self.Layers[0](inp) out2", "BiRNN(tf.keras.Model): def __init__(self, units: int,projection_units: int,max_seq_length: int, vocab_size: int,embedding_dim: int,embedding_matrix", "int,embedding_dim: int,embedding_matrix = None): super(BiRNN, self).__init__() self.units = units self.projection_units=projection_units", "= [tf.keras.layers.Bidirectional(tf.keras.layers.GRU(self.units, return_sequences=True, return_state=True, recurrent_initializer='glorot_uniform', name='birnn')), tf.keras.layers.Dense(self.projection_units, name='projection')] def call(self,", "inp = tf.math.multiply(seq, mask) mask[:,:t+1] = 1 tar = tf.math.multiply(seq,", "return out4 def loss_function(real, pred, loss_object): mask = tf.math.logical_not(tf.math.equal(real, 0))", "mask = tf.cast(mask, dtype=loss_.dtype) loss_ *= mask return tf.reduce_mean(loss_) def", "= tf.math.multiply(seq, mask) mask[:,:t+1] = 1 tar = tf.math.multiply(seq, mask)", "np import tensorflow as tf class Bidirectional(tf.keras.Model): def __init__(self, units:", "out4 = self.Layers[3](out3) return out4 def loss_function(real, pred, loss_object): mask", "= tf.cast(mask, dtype=loss_.dtype) loss_ *= mask return tf.reduce_mean(loss_) def mask_sequences(seq,", "out2 = self.Layers[1]([inp, out1]) out3 = self.Layers[2](out2) if predict is", "def mask_sequences(seq, t): mask = np.zeros(seq.shape) mask[:,:t] = 1 inp", "self.embedding_dim, weights = [embedding_matrix], trainable=False, name='embeddings') self.Layers = [Bidirectional(units=self.units, projection_units=self.projection_units),", "inp, predict=False): inp = self.embeddings(inp) out1 = self.Layers[0](inp) out2 =", "def __init__(self, units: int,projection_units: int,max_seq_length: int, vocab_size: int,embedding_dim: int,embedding_matrix =", "self.Layers[0](inp) out2 = self.Layers[1]([inp, out1]) out3 = self.Layers[2](out2) if predict", "self.Layers[3](out3) return out4 def loss_function(real, pred, loss_object): mask = tf.math.logical_not(tf.math.equal(real,", "out4 def loss_function(real, pred, loss_object): mask = tf.math.logical_not(tf.math.equal(real, 0)) loss_", "mask = np.zeros(seq.shape) mask[:,:t] = 1 inp = tf.math.multiply(seq, mask)", "mask = tf.math.logical_not(tf.math.equal(real, 0)) loss_ = loss_object(real, pred) mask =", "= self.Layers[0](inp) out = self.Layers[1](out) return out class BiRNN(tf.keras.Model): def", "name='projection')] def call(self, inp): out, _, _ = self.Layers[0](inp) out", "name='embeddings') self.Layers = [Bidirectional(units=self.units, projection_units=self.projection_units), tf.keras.layers.Add(), Bidirectional(units=self.units, projection_units=self.projection_units), tf.keras.layers.Dense(self.vocab_size, activation='softmax',", "return_sequences=True, return_state=True, recurrent_initializer='glorot_uniform', name='birnn')), tf.keras.layers.Dense(self.projection_units, name='projection')] def call(self, inp): out,", "loss_object(real, pred) mask = tf.cast(mask, dtype=loss_.dtype) loss_ *= mask return", "class Bidirectional(tf.keras.Model): def __init__(self, units: int, projection_units: int): super(Bidirectional, self).__init__()", "call(self, inp): out, _, _ = self.Layers[0](inp) out = self.Layers[1](out)", "tf.math.multiply(seq, mask) mask[:,:t+1] = 1 tar = tf.math.multiply(seq, mask) return", "= projection_units self.Layers = [tf.keras.layers.Bidirectional(tf.keras.layers.GRU(self.units, return_sequences=True, return_state=True, recurrent_initializer='glorot_uniform', name='birnn')), tf.keras.layers.Dense(self.projection_units,", "def call(self, inp): out, _, _ = self.Layers[0](inp) out =", "def __init__(self, units: int, projection_units: int): super(Bidirectional, self).__init__() self.units =", "= [embedding_matrix], trainable=False, name='embeddings') self.Layers = [Bidirectional(units=self.units, projection_units=self.projection_units), tf.keras.layers.Add(), Bidirectional(units=self.units,", "def call(self, inp, predict=False): inp = self.embeddings(inp) out1 = self.Layers[0](inp)", "1 inp = tf.math.multiply(seq, mask) mask[:,:t+1] = 1 tar =", "max_seq_length self.vocab_size = vocab_size self.embedding_dim = embedding_dim self.embeddings = tf.keras.layers.Embedding(self.vocab_size,", "units self.projection_units = projection_units self.Layers = [tf.keras.layers.Bidirectional(tf.keras.layers.GRU(self.units, return_sequences=True, return_state=True, recurrent_initializer='glorot_uniform',", "predict=False): inp = self.embeddings(inp) out1 = self.Layers[0](inp) out2 = self.Layers[1]([inp,", "out1]) out3 = self.Layers[2](out2) if predict is False: return out3", "pred, loss_object): mask = tf.math.logical_not(tf.math.equal(real, 0)) loss_ = loss_object(real, pred)", "tf.cast(mask, dtype=loss_.dtype) loss_ *= mask return tf.reduce_mean(loss_) def mask_sequences(seq, t):", "projection_units: int): super(Bidirectional, self).__init__() self.units = units self.projection_units = projection_units", "self.Layers = [Bidirectional(units=self.units, projection_units=self.projection_units), tf.keras.layers.Add(), Bidirectional(units=self.units, projection_units=self.projection_units), tf.keras.layers.Dense(self.vocab_size, activation='softmax', name='softmax')]", "name='birnn')), tf.keras.layers.Dense(self.projection_units, name='projection')] def call(self, inp): out, _, _ =", "mask) mask[:,:t+1] = 1 tar = tf.math.multiply(seq, mask) return inp,", "self.Layers[1](out) return out class BiRNN(tf.keras.Model): def __init__(self, units: int,projection_units: int,max_seq_length:", "= self.embeddings(inp) out1 = self.Layers[0](inp) out2 = self.Layers[1]([inp, out1]) out3", "__init__(self, units: int, projection_units: int): super(Bidirectional, self).__init__() self.units = units", "= vocab_size self.embedding_dim = embedding_dim self.embeddings = tf.keras.layers.Embedding(self.vocab_size, self.embedding_dim, weights", "Bidirectional(tf.keras.Model): def __init__(self, units: int, projection_units: int): super(Bidirectional, self).__init__() self.units", "vocab_size self.embedding_dim = embedding_dim self.embeddings = tf.keras.layers.Embedding(self.vocab_size, self.embedding_dim, weights =", "loss_ *= mask return tf.reduce_mean(loss_) def mask_sequences(seq, t): mask =", "*= mask return tf.reduce_mean(loss_) def mask_sequences(seq, t): mask = np.zeros(seq.shape)", "= units self.projection_units=projection_units self.max_seq_length = max_seq_length self.vocab_size = vocab_size self.embedding_dim", "loss_ = loss_object(real, pred) mask = tf.cast(mask, dtype=loss_.dtype) loss_ *=", "_, _ = self.Layers[0](inp) out = self.Layers[1](out) return out class", "is False: return out3 else: out4 = self.Layers[3](out3) return out4", "pred) mask = tf.cast(mask, dtype=loss_.dtype) loss_ *= mask return tf.reduce_mean(loss_)", "return_state=True, recurrent_initializer='glorot_uniform', name='birnn')), tf.keras.layers.Dense(self.projection_units, name='projection')] def call(self, inp): out, _,", "mask return tf.reduce_mean(loss_) def mask_sequences(seq, t): mask = np.zeros(seq.shape) mask[:,:t]", "return tf.reduce_mean(loss_) def mask_sequences(seq, t): mask = np.zeros(seq.shape) mask[:,:t] =", "= tf.math.logical_not(tf.math.equal(real, 0)) loss_ = loss_object(real, pred) mask = tf.cast(mask,", "np.zeros(seq.shape) mask[:,:t] = 1 inp = tf.math.multiply(seq, mask) mask[:,:t+1] =", "0)) loss_ = loss_object(real, pred) mask = tf.cast(mask, dtype=loss_.dtype) loss_", "<filename>Language Model/birnn/model.py<gh_stars>1-10 import numpy as np import tensorflow as tf", "trainable=False, name='embeddings') self.Layers = [Bidirectional(units=self.units, projection_units=self.projection_units), tf.keras.layers.Add(), Bidirectional(units=self.units, projection_units=self.projection_units), tf.keras.layers.Dense(self.vocab_size,", "out = self.Layers[1](out) return out class BiRNN(tf.keras.Model): def __init__(self, units:", "return out3 else: out4 = self.Layers[3](out3) return out4 def loss_function(real,", "import tensorflow as tf class Bidirectional(tf.keras.Model): def __init__(self, units: int,", "super(Bidirectional, self).__init__() self.units = units self.projection_units = projection_units self.Layers =", "out3 = self.Layers[2](out2) if predict is False: return out3 else:", "inp = self.embeddings(inp) out1 = self.Layers[0](inp) out2 = self.Layers[1]([inp, out1])", "= [Bidirectional(units=self.units, projection_units=self.projection_units), tf.keras.layers.Add(), Bidirectional(units=self.units, projection_units=self.projection_units), tf.keras.layers.Dense(self.vocab_size, activation='softmax', name='softmax')] def", "mask[:,:t+1] = 1 tar = tf.math.multiply(seq, mask) return inp, tar", "tf.math.logical_not(tf.math.equal(real, 0)) loss_ = loss_object(real, pred) mask = tf.cast(mask, dtype=loss_.dtype)", "name='softmax')] def call(self, inp, predict=False): inp = self.embeddings(inp) out1 =", "= None): super(BiRNN, self).__init__() self.units = units self.projection_units=projection_units self.max_seq_length =", "self.Layers = [tf.keras.layers.Bidirectional(tf.keras.layers.GRU(self.units, return_sequences=True, return_state=True, recurrent_initializer='glorot_uniform', name='birnn')), tf.keras.layers.Dense(self.projection_units, name='projection')] def", "int, projection_units: int): super(Bidirectional, self).__init__() self.units = units self.projection_units =", "as np import tensorflow as tf class Bidirectional(tf.keras.Model): def __init__(self,", "= embedding_dim self.embeddings = tf.keras.layers.Embedding(self.vocab_size, self.embedding_dim, weights = [embedding_matrix], trainable=False,", "def loss_function(real, pred, loss_object): mask = tf.math.logical_not(tf.math.equal(real, 0)) loss_ =", "units: int,projection_units: int,max_seq_length: int, vocab_size: int,embedding_dim: int,embedding_matrix = None): super(BiRNN,", "t): mask = np.zeros(seq.shape) mask[:,:t] = 1 inp = tf.math.multiply(seq,", "= 1 inp = tf.math.multiply(seq, mask) mask[:,:t+1] = 1 tar", "self.Layers[1]([inp, out1]) out3 = self.Layers[2](out2) if predict is False: return", "= tf.keras.layers.Embedding(self.vocab_size, self.embedding_dim, weights = [embedding_matrix], trainable=False, name='embeddings') self.Layers =", "Bidirectional(units=self.units, projection_units=self.projection_units), tf.keras.layers.Dense(self.vocab_size, activation='softmax', name='softmax')] def call(self, inp, predict=False): inp", "self).__init__() self.units = units self.projection_units=projection_units self.max_seq_length = max_seq_length self.vocab_size =", "activation='softmax', name='softmax')] def call(self, inp, predict=False): inp = self.embeddings(inp) out1", "loss_function(real, pred, loss_object): mask = tf.math.logical_not(tf.math.equal(real, 0)) loss_ = loss_object(real,", "import numpy as np import tensorflow as tf class Bidirectional(tf.keras.Model):", "as tf class Bidirectional(tf.keras.Model): def __init__(self, units: int, projection_units: int):", "int,embedding_matrix = None): super(BiRNN, self).__init__() self.units = units self.projection_units=projection_units self.max_seq_length", "self.embeddings(inp) out1 = self.Layers[0](inp) out2 = self.Layers[1]([inp, out1]) out3 =", "Model/birnn/model.py<gh_stars>1-10 import numpy as np import tensorflow as tf class", "super(BiRNN, self).__init__() self.units = units self.projection_units=projection_units self.max_seq_length = max_seq_length self.vocab_size", "self.projection_units=projection_units self.max_seq_length = max_seq_length self.vocab_size = vocab_size self.embedding_dim = embedding_dim", "= self.Layers[3](out3) return out4 def loss_function(real, pred, loss_object): mask =", "out, _, _ = self.Layers[0](inp) out = self.Layers[1](out) return out", "tf.keras.layers.Embedding(self.vocab_size, self.embedding_dim, weights = [embedding_matrix], trainable=False, name='embeddings') self.Layers = [Bidirectional(units=self.units,", "self.Layers[0](inp) out = self.Layers[1](out) return out class BiRNN(tf.keras.Model): def __init__(self,", "= self.Layers[2](out2) if predict is False: return out3 else: out4", "tensorflow as tf class Bidirectional(tf.keras.Model): def __init__(self, units: int, projection_units:", "None): super(BiRNN, self).__init__() self.units = units self.projection_units=projection_units self.max_seq_length = max_seq_length", "[Bidirectional(units=self.units, projection_units=self.projection_units), tf.keras.layers.Add(), Bidirectional(units=self.units, projection_units=self.projection_units), tf.keras.layers.Dense(self.vocab_size, activation='softmax', name='softmax')] def call(self,", "self.vocab_size = vocab_size self.embedding_dim = embedding_dim self.embeddings = tf.keras.layers.Embedding(self.vocab_size, self.embedding_dim,", "dtype=loss_.dtype) loss_ *= mask return tf.reduce_mean(loss_) def mask_sequences(seq, t): mask", "units: int, projection_units: int): super(Bidirectional, self).__init__() self.units = units self.projection_units", "= self.Layers[1](out) return out class BiRNN(tf.keras.Model): def __init__(self, units: int,projection_units:", "if predict is False: return out3 else: out4 = self.Layers[3](out3)", "vocab_size: int,embedding_dim: int,embedding_matrix = None): super(BiRNN, self).__init__() self.units = units", "predict is False: return out3 else: out4 = self.Layers[3](out3) return", "self.units = units self.projection_units=projection_units self.max_seq_length = max_seq_length self.vocab_size = vocab_size", "out3 else: out4 = self.Layers[3](out3) return out4 def loss_function(real, pred,", "loss_object): mask = tf.math.logical_not(tf.math.equal(real, 0)) loss_ = loss_object(real, pred) mask", "= loss_object(real, pred) mask = tf.cast(mask, dtype=loss_.dtype) loss_ *= mask", "int, vocab_size: int,embedding_dim: int,embedding_matrix = None): super(BiRNN, self).__init__() self.units =", "int): super(Bidirectional, self).__init__() self.units = units self.projection_units = projection_units self.Layers", "mask_sequences(seq, t): mask = np.zeros(seq.shape) mask[:,:t] = 1 inp =", "weights = [embedding_matrix], trainable=False, name='embeddings') self.Layers = [Bidirectional(units=self.units, projection_units=self.projection_units), tf.keras.layers.Add(),", "projection_units=self.projection_units), tf.keras.layers.Dense(self.vocab_size, activation='softmax', name='softmax')] def call(self, inp, predict=False): inp =", "int,projection_units: int,max_seq_length: int, vocab_size: int,embedding_dim: int,embedding_matrix = None): super(BiRNN, self).__init__()", "tf.keras.layers.Dense(self.vocab_size, activation='softmax', name='softmax')] def call(self, inp, predict=False): inp = self.embeddings(inp)", "= self.Layers[0](inp) out2 = self.Layers[1]([inp, out1]) out3 = self.Layers[2](out2) if", "projection_units=self.projection_units), tf.keras.layers.Add(), Bidirectional(units=self.units, projection_units=self.projection_units), tf.keras.layers.Dense(self.vocab_size, activation='softmax', name='softmax')] def call(self, inp,", "units self.projection_units=projection_units self.max_seq_length = max_seq_length self.vocab_size = vocab_size self.embedding_dim =", "= max_seq_length self.vocab_size = vocab_size self.embedding_dim = embedding_dim self.embeddings =", "[embedding_matrix], trainable=False, name='embeddings') self.Layers = [Bidirectional(units=self.units, projection_units=self.projection_units), tf.keras.layers.Add(), Bidirectional(units=self.units, projection_units=self.projection_units),", "else: out4 = self.Layers[3](out3) return out4 def loss_function(real, pred, loss_object):", "tf.reduce_mean(loss_) def mask_sequences(seq, t): mask = np.zeros(seq.shape) mask[:,:t] = 1", "projection_units self.Layers = [tf.keras.layers.Bidirectional(tf.keras.layers.GRU(self.units, return_sequences=True, return_state=True, recurrent_initializer='glorot_uniform', name='birnn')), tf.keras.layers.Dense(self.projection_units, name='projection')]", "return out class BiRNN(tf.keras.Model): def __init__(self, units: int,projection_units: int,max_seq_length: int,", "tf class Bidirectional(tf.keras.Model): def __init__(self, units: int, projection_units: int): super(Bidirectional,", "tf.keras.layers.Add(), Bidirectional(units=self.units, projection_units=self.projection_units), tf.keras.layers.Dense(self.vocab_size, activation='softmax', name='softmax')] def call(self, inp, predict=False):", "numpy as np import tensorflow as tf class Bidirectional(tf.keras.Model): def", "out class BiRNN(tf.keras.Model): def __init__(self, units: int,projection_units: int,max_seq_length: int, vocab_size:", "mask[:,:t] = 1 inp = tf.math.multiply(seq, mask) mask[:,:t+1] = 1", "self.embeddings = tf.keras.layers.Embedding(self.vocab_size, self.embedding_dim, weights = [embedding_matrix], trainable=False, name='embeddings') self.Layers", "self.embedding_dim = embedding_dim self.embeddings = tf.keras.layers.Embedding(self.vocab_size, self.embedding_dim, weights = [embedding_matrix],", "int,max_seq_length: int, vocab_size: int,embedding_dim: int,embedding_matrix = None): super(BiRNN, self).__init__() self.units" ]