index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
38,368
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/run_compositionality_exp.py
|
import os
import traceback
import numpy as np
import cv2
import sklearn
import umap
import tensorflow as tf
import matplotlib.pyplot as plt
from sklearn import linear_model
from sklearn.neural_network import MLPRegressor
from PIL import Image
from absl import app, flags, logging
from matplotlib.offsetbox import OffsetImage, AnnotationBbox
import models
import configs
import datasets
from models import ClassifierModel, DrawerModel, VAE
from util import HParams, scale_and_rasterize, stroke_three_format, scale_and_center_stroke_three, rasterize
from util import log_flags, log_hparams
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
FLAGS = flags.FLAGS
flags.DEFINE_string("dir", "/h/wangale/project/few-shot-sketch", "Project directory")
flags.DEFINE_string("data_dir", "/h/wangale/data", "Data directory")
flags.DEFINE_string("id", "comptest", "training_id")
flags.DEFINE_string("logfile", "comptest", "Logfile name")
flags.DEFINE_string("drawer_id", None, "Drawing model id")
flags.DEFINE_string("drawer_model", None, "Drawer model")
flags.DEFINE_string("drawer_cfgset", None, "Configuration set for the drawer model")
flags.DEFINE_string("drawer_cfgs", "", "Custom configuration for the drawer model configs")
flags.DEFINE_string("vae_id", None, "VAE model ID")
flags.DEFINE_string("vae_model", None, "VAE model")
flags.DEFINE_string("vae_cfgset", None, "Configuration set for the vae model")
flags.DEFINE_string("vae_cfgs", "", "Custom configuration for the vae model configs")
flags.DEFINE_integer("random_seed", 1, "Random seed")
flags.DEFINE_bool("conceptual_composition", True, "Add/sub embeddings.")
flags.DEFINE_bool("relation_count", True, "Count test")
flags.DEFINE_bool("relation_count_toy", True, "Count with toy examples")
flags.DEFINE_bool("relation_orient", True, "Orientation test")
flags.DEFINE_bool("relation_inout", True, "In Out test")
flags.DEFINE_bool("relation_four", True, "Four compose test")
flags.DEFINE_bool("relation_count_readout", True, "Count readout")
flags.DEFINE_bool("relation_four_readout", True, "Four discrete readout test")
flags.DEFINE_bool("relation_nest_readout", True, "Four discrete readout test")
flags.DEFINE_bool("latent_distance", True, "Distance test")
flags.DEFINE_bool("latent_angle", True, "Angle test")
flags.DEFINE_bool("latent_size", True, "Size test")
flags.DEFINE_bool("latent_angle_readout", True, "Angle linear readout test")
flags.DEFINE_bool("latent_distance_readout", True, "Distance linear readout test")
flags.DEFINE_bool('latent_size_readout', True, "Size linear readout test")
flags.DEFINE_bool("n_interpolate", False, "Interpolate between 2,3,4 objects per image")
flags.DEFINE_bool("four_rotate", False, "rotate four placed values")
def conceptual_composition(embedding_models, clustering_methods, base_dir="compositionality", title=True):
folder = os.path.join(base_dir, "addembed")
ex_per_class = 50
os.makedirs(folder, exist_ok=True)
snowman_config: HParams = configs.get_config("quickdraw")().parse("split=snowman,shuffle=False,batch_size={}".format(ex_per_class))
snowman_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, snowman_config)
snowman_dataset = snowman_dataset_proto.load(repeat=False)[0]
circle_config: HParams = configs.get_config("quickdraw")().parse("split=circle,shuffle=False,batch_size={}".format(ex_per_class))
circle_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, circle_config)
circle_dataset = circle_dataset_proto.load(repeat=False)[0]
square_config: HParams = configs.get_config("quickdraw")().parse("split=square,shuffle=False,batch_size={}".format(ex_per_class))
square_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, square_config)
square_dataset = square_dataset_proto.load(repeat=False)[0]
television_config = configs.get_config("quickdraw")().parse("split=television,shuffle=False,batch_size={}".format(ex_per_class))
television_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, television_config)
television_dataset = television_dataset_proto.load(repeat=False)[0]
circle_batch, circle_names = next(circle_dataset.__iter__())[2:4]
snowman_batch, snowman_names = next(snowman_dataset.__iter__())[2:4]
square_batch, square_names = next(square_dataset.__iter__())[2:4]
television_batch, television_names = next(television_dataset.__iter__())[2:4]
for model in embedding_models:
circle_embed = model.embed(circle_batch, training=False)[0]
snowman_embed = model.embed(snowman_batch, training=False)[0]
square_embed = model.embed(square_batch, training=False)[0]
television_embed = model.embed(television_batch, training=False)[0]
for i, embeds in enumerate([(snowman_embed, circle_embed, square_embed,
snowman_batch, circle_batch, square_batch), (television_embed, square_embed, circle_embed,
television_batch, square_batch, circle_batch)]):
os.makedirs(os.path.join(folder, str(i), model.__class__.__name__))
a, b, c, a_im, b_im, c_im = embeds
new_embed = a - b + c
if "Drawer" in model.__class__.__name__:
decodes = model.decode(new_embed, training=False, generation_length=64)[1]
lst = []
for decode in decodes:
lst.append(scale_and_rasterize(stroke_three_format(decode), png_dimensions=(28, 28), stroke_width=1))
gen_image = np.array(lst).astype(np.float32)
elif "VAE" in model.__class__.__name__:
gen_image = tf.image.resize(model.decode(new_embed, training=False), (28, 28)) * 255.0
else:
logging.info("Error, wrong embedding model")
inter_embed = a - b
if "Drawer" in model.__class__.__name__:
decodes = model.decode(inter_embed, training=False, generation_length=64)[1]
lst = []
for decode in decodes:
lst.append(scale_and_rasterize(stroke_three_format(decode), png_dimensions=(28, 28), stroke_width=1))
gen_inter_image = np.array(lst).astype(np.float32)
elif "VAE" in model.__class__.__name__:
gen_inter_image = tf.image.resize(model.decode(inter_embed, training=False), (28, 28)) * 255.0
else:
logging.info("Error, wrong embedding model")
for k in range(ex_per_class):
np_img = np.concatenate((a_im[k], b_im[k], c_im[k], gen_image[k]), axis=1).astype(np.uint8)
Image.fromarray(np_img).save(os.path.join(folder, str(i), model.__class__.__name__, '{}.png'.format(k)))
for j, np_arr in enumerate((a_im[k], b_im[k], c_im[k], gen_image[k])):
os.makedirs(os.path.join(folder, str(i), str(j), model.__class__.__name__), exist_ok=True)
Image.fromarray(np.array(np_arr).astype(np.uint8)).save(os.path.join(folder, str(i), str(j), model.__class__.__name__, '{}.png'.format(k)))
def relation_count(embedding_models, clustering_methods, base_dir="compositionality", title=True):
folder = os.path.join(base_dir, "count")
ex_per_class = 20
os.makedirs(folder, exist_ok=True)
snowman_config: HParams = configs.get_config("quickdraw")().parse("split=snowman,shuffle=False,batch_size={}".format(ex_per_class))
snowman_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, snowman_config)
snowman_dataset = snowman_dataset_proto.load(repeat=False)[0]
circle_config: HParams = configs.get_config("quickdraw")().parse("split=circle,shuffle=False,batch_size={}".format(ex_per_class))
circle_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, circle_config)
circle_dataset = circle_dataset_proto.load(repeat=False)[0]
square_config: HParams = configs.get_config("quickdraw")().parse("split=square,shuffle=False,batch_size={}".format(ex_per_class))
square_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, square_config)
square_dataset = square_dataset_proto.load(repeat=False)[0]
television_config = configs.get_config("quickdraw")().parse("split=television,shuffle=False,batch_size={}".format(ex_per_class))
television_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, television_config)
television_dataset = television_dataset_proto.load(repeat=False)[0]
circle_batch, circle_names = next(circle_dataset.__iter__())[2:4]
snowman_batch, snowman_names = next(snowman_dataset.__iter__())[2:4]
square_batch, square_names = next(square_dataset.__iter__())[2:4]
television_batch, television_names = next(television_dataset.__iter__())[2:4]
fig, axs = plt.subplots(len(embedding_models), len(clustering_methods), figsize=(10 * len(clustering_methods), 10 * len(embedding_models)))
for i, model in enumerate(embedding_models):
circle_embed = model.embed(circle_batch, training=False)[0]
snowman_embed = model.embed(snowman_batch, training=False)[0]
square_embed = model.embed(square_batch, training=False)[0]
television_embed = model.embed(television_batch, training=False)[0]
n_class = 4
x = tf.concat((circle_embed, snowman_embed, square_embed, television_embed), axis=0)
y_image = tf.concat((circle_batch, snowman_batch, square_batch, television_batch), axis=0)
alphas = y_image == (tf.ones(y_image.shape) * 255.0)
alphas = 1-tf.cast(tf.reduce_all(alphas, axis=-1, keepdims=True), dtype=tf.float32)
y_image = tf.concat((y_image/255.0, alphas), axis=-1).numpy()
colors = [[1, 0, 0], [0, 1, 0], [0, 0, 1], [1, 0, 1]]
for k, col_array in enumerate(colors):
curr_imgs = y_image[k * ex_per_class: (k+1) * ex_per_class, :, :, :3]
black_mask = np.all((curr_imgs - tf.zeros(curr_imgs.shape) > 0.08), axis=-1, keepdims=True)
y_image[k * ex_per_class: (k + 1) * ex_per_class, :, :, :3] = black_mask * col_array
for k in range(4):
Image.fromarray((y_image[k * ex_per_class] * 255.0).astype('uint8')).save(os.path.join(folder, "legend-{}.png".format(k)))
project_plot(clustering_methods, x, axs, title, model, i, y_image)
fig.tight_layout()
fig.savefig(os.path.join(folder, "compositionality.png"))
def relation_count_toy(embedding_models, clustering_methods, base_dir="compositionality", title=True):
folder = os.path.join(base_dir, "count_toy")
ex_per_class = 14
os.makedirs(folder, exist_ok=True)
circle_config: HParams = configs.get_config("quickdraw")().parse("split=circle,shuffle=False,batch_size={}".format(ex_per_class))
circle_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, circle_config)
circle_dataset = circle_dataset_proto.load(repeat=False)[0]
square_config: HParams = configs.get_config("quickdraw")().parse("split=square,shuffle=False,batch_size={}".format(ex_per_class))
square_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, square_config)
square_dataset = square_dataset_proto.load(repeat=False)[0]
circle_batch, circle_names = next(circle_dataset.__iter__())[2:4]
square_batch, square_names = next(square_dataset.__iter__())[2:4]
circle_batch, square_batch = circle_batch.numpy(), square_batch.numpy()
composite_batch = np.ones((circle_batch.shape[0] * 4, *circle_batch.shape[1:])) * 255.0
for idx in range(circle_batch.shape[0]):
circle_orig, square_orig = circle_batch[idx], square_batch[idx]
circle, square = cv2.resize(circle_orig, (14, 14)), cv2.resize(square_orig, (14, 14))
composite_batch[idx * 4, 7:21, 7:21, :] = circle
composite_batch[(idx * 4) + 1, 7:21, :, :] = np.concatenate((circle, circle), axis=1)
composite_batch[(idx * 4) + 2, :, 14:, :] = np.concatenate((circle, circle), axis=0)
composite_batch[(idx * 4) + 2, 7:21, :14, :] = circle
place4((idx*4)+3, composite_batch, [circle, circle, circle, circle])
composite_batch = np.array(composite_batch, dtype=np.float32)
fig, axs = plt.subplots(len(embedding_models), len(clustering_methods), figsize=(10 * len(clustering_methods), 10 * len(embedding_models)))
for i, model in enumerate(embedding_models):
x = model.embed(composite_batch, training=False)[0]
y_image = composite_batch
alphas = y_image == (tf.ones(y_image.shape) * 255.0)
alphas = 1 - tf.cast(tf.reduce_all(alphas, axis=-1, keepdims=True), dtype=tf.float32)
y_image = tf.concat((y_image / 255.0, alphas), axis=-1).numpy()
# Set colors
colors = [[1, 0, 0], [0, 1, 0], [0, 0, 1], [1, 0, 1]] # , [1, 0, 1], [0, 1, 1]]
pre_idx = np.array([k for k in range(0, x.shape[0], len(colors))])
for k, col_array in enumerate(colors):
idx = pre_idx + k
curr_imgs = y_image[idx][:, :, :, :3]
black_mask = np.all((curr_imgs - tf.zeros(curr_imgs.shape) > 0.08), axis=-1, keepdims=True)
y_image[idx, :, :, :3] = black_mask * col_array
for k in range(4):
Image.fromarray((y_image[k] * 255.0).astype('uint8')).save(os.path.join(folder, "legend-{}.png".format(k)))
project_plot(clustering_methods, x, axs, title, model, i, y_image)
fig.tight_layout()
fig.savefig(os.path.join(folder, "count_toy.png"))
def relation_orientation(embedding_models, clustering_methods, base_dir="compositionality", title=True):
folder = os.path.join(base_dir, "orientation")
ex_per_class = 10
os.makedirs(folder, exist_ok=True)
circle_config: HParams = configs.get_config("quickdraw")().parse("split=circle,shuffle=False,batch_size={}".format(ex_per_class))
circle_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, circle_config)
circle_dataset = circle_dataset_proto.load(repeat=False)[0]
square_config: HParams = configs.get_config("quickdraw")().parse("split=square,shuffle=False,batch_size={}".format(ex_per_class))
square_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, square_config)
square_dataset = square_dataset_proto.load(repeat=False)[0]
circle_batch, circle_names = next(circle_dataset.__iter__())[2:4]
square_batch, square_names = next(square_dataset.__iter__())[2:4]
circle_batch, square_batch = circle_batch.numpy(), square_batch.numpy()
composite_batch = np.ones((circle_batch.shape[0] * 4, *circle_batch.shape[1:])) * 255.0
for idx in range(circle_batch.shape[0]):
circle_orig, square_orig = circle_batch[idx], square_batch[idx]
circle, square = cv2.resize(circle_orig, (14, 14)), cv2.resize(square_orig, (14, 14))
composite_batch[idx*4, :, 7:21, :] = np.concatenate((circle, square), axis=0)
composite_batch[(idx*4)+1, 7:21, :, :] = np.concatenate((circle, square), axis=1)
composite_batch[(idx*4)+2, :, 7:21, :] = np.concatenate((square, circle), axis=0)
composite_batch[(idx*4)+3, 7:21, :, :] = np.concatenate((square, circle), axis=1)
composite_batch = np.array(composite_batch, dtype=np.float32)
fig, axs = plt.subplots(len(embedding_models), len(clustering_methods), figsize=(10 * len(clustering_methods), 10 * len(embedding_models)))
for i, model in enumerate(embedding_models):
x = model.embed(composite_batch, training=False)[0]
y_image = composite_batch
alphas = y_image == (tf.ones(y_image.shape) * 255.0)
alphas = 1-tf.cast(tf.reduce_all(alphas, axis=-1, keepdims=True), dtype=tf.float32)
y_image = tf.concat((y_image/255.0, alphas), axis=-1).numpy()
# Set colors
colors = [[1, 0, 0], [0, 1, 0], [0, 0, 1], [1, 0, 1]]#, [1, 0, 1], [0, 1, 1]]
pre_idx = np.array([k for k in range(0, x.shape[0], len(colors))])
for k, col_array in enumerate(colors):
idx = pre_idx + k
curr_imgs = y_image[idx][:, :, :, :3]
black_mask = np.all((curr_imgs - tf.zeros(curr_imgs.shape) > 0.08), axis=-1, keepdims=True)
y_image[idx, :, :, :3] = black_mask * col_array
for k in range(4):
Image.fromarray((y_image[k] * 255.0).astype('uint8')).save(os.path.join(folder, "legend-{}.png".format(k)))
project_plot(clustering_methods, x, axs, title, model, i, y_image)
fig.tight_layout()
fig.savefig(os.path.join(folder, "orientation.png"))
def relation_inout(embedding_models, clustering_methods, base_dir="compositionality", title=True):
folder = os.path.join(base_dir, "inout")
ex_per_class = 35
os.makedirs(folder, exist_ok=True)
circle_config: HParams = configs.get_config("quickdraw")().parse("split=circle,shuffle=False,batch_size={}".format(ex_per_class))
circle_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, circle_config)
circle_dataset = circle_dataset_proto.load(repeat=False)[0]
square_config: HParams = configs.get_config("quickdraw")().parse("split=square,shuffle=False,batch_size={}".format(ex_per_class))
square_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, square_config)
square_dataset = square_dataset_proto.load(repeat=False)[0]
circle_strokes, circle_batch, circle_names = next(circle_dataset.__iter__())[1:4]
square_strokes, square_batch, square_names = next(square_dataset.__iter__())[1:4]
circle_batch, square_batch = circle_batch.numpy(), square_batch.numpy()
composite_batch = np.ones((circle_batch.shape[0] * 2, *circle_batch.shape[1:])) * 255.0
png_dims = circle_batch[0].shape[-3:-1]
for idx in range(circle_batch.shape[0]):
circle_orig, square_orig = circle_batch[idx], square_batch[idx]
circle, square = cv2.resize(circle_orig, (14, 14)), cv2.resize(square_orig, (14, 14))
# circle_big, square_big = cv2.resize(circle_orig, (32, 32)), cv2.resize(square_orig, (32, 32))
stroke_five = circle_strokes[idx]
stroke_three = stroke_three_format(stroke_five)
scaled_and_centered_stroke_three = scale_and_center_stroke_three(stroke_three, circle_batch[0].shape[-3:-1], 2)
rasterized_image = rasterize(scaled_and_centered_stroke_three, png_dims)
big_circle = rasterized_image
stroke_five = square_strokes[idx]
stroke_three = stroke_three_format(stroke_five)
scaled_and_centered_stroke_three = scale_and_center_stroke_three(stroke_three, circle_batch[0].shape[-3:-1], 2)
rasterized_image = rasterize(scaled_and_centered_stroke_three, png_dims)
big_square = rasterized_image
composite_batch[(idx*2), :, :, :] = big_square
composite_batch[(idx*2), 8:20, 8:20, :] = circle[1:13, 1:13, :]
composite_batch[(idx*2)+1, :, :, :] = big_circle
composite_batch[(idx*2)+1, 8:20, 8:20, :] = square[1:13, 1:13, :]
composite_batch = np.array(composite_batch, dtype=np.float32)
fig, axs = plt.subplots(len(embedding_models), len(clustering_methods), figsize=(10 * len(clustering_methods), 10 * len(embedding_models)))
for i, model in enumerate(embedding_models):
x = model.embed(composite_batch, training=False)[0]
y_image = composite_batch
alphas = y_image == (tf.ones(y_image.shape) * 255.0)
alphas = 1-tf.cast(tf.reduce_all(alphas, axis=-1, keepdims=True), dtype=tf.float32)
y_image = tf.concat((y_image/255.0, alphas), axis=-1).numpy()
# Set colors
colors = [[1, 0, 0], [0, 1, 0]] # , [0, 0, 1], [1, 1, 0], [1, 0, 1], [0, 1, 1]]
pre_idx = np.array([k for k in range(0, x.shape[0], len(colors))])
for k, col_array in enumerate(colors):
idx = pre_idx + k
curr_imgs = y_image[idx][:, :, :, :3]
black_mask = np.all((curr_imgs - tf.zeros(curr_imgs.shape) > 0.08), axis=-1, keepdims=True)
y_image[idx, :, :, :3] = black_mask * col_array
for k in range(2):
Image.fromarray((y_image[k] * 255.0).astype('uint8')).save(os.path.join(folder, "legend-{}.png".format(k)))
project_plot(clustering_methods, x, axs, title, model, i, y_image)
fig.tight_layout()
fig.savefig(os.path.join(folder, "inout.png"))
def relation_four(embedding_models, clustering_methods, base_dir="compositionality", title=True):
folder = os.path.join(base_dir, "four")
ex_per_class = 15
os.makedirs(folder, exist_ok=True)
circle_config: HParams = configs.get_config("quickdraw")().parse("split=circle,shuffle=False,batch_size={}".format(ex_per_class))
circle_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, circle_config)
circle_dataset = circle_dataset_proto.load(repeat=False)[0]
square_config: HParams = configs.get_config("quickdraw")().parse("split=square,shuffle=False,batch_size={}".format(ex_per_class))
square_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, square_config)
square_dataset = square_dataset_proto.load(repeat=False)[0]
circle_batch, circle_names = next(circle_dataset.__iter__())[2:4]
square_batch, square_names = next(square_dataset.__iter__())[2:4]
circle_batch, square_batch = circle_batch.numpy(), square_batch.numpy()
composite_batch = np.ones((circle_batch.shape[0] * 4, *circle_batch.shape[1:])) * 255.0
for idx in range(circle_batch.shape[0]):
circle_orig, square_orig = circle_batch[idx], square_batch[idx]
circle, square = cv2.resize(circle_orig, (14, 14)), cv2.resize(square_orig, (14, 14))
place4((idx*4), composite_batch, [circle, circle, square, square])
place4((idx*4)+1, composite_batch, [square, circle, circle, square])
place4((idx*4)+2, composite_batch, [square, square, circle, circle])
place4((idx*4)+3, composite_batch, [circle, square, square, circle])
composite_batch = np.array(composite_batch, dtype=np.float32)
fig, axs = plt.subplots(len(embedding_models), len(clustering_methods), figsize=(10 * len(clustering_methods), 10 * len(embedding_models)))
for i, model in enumerate(embedding_models):
x = model.embed(composite_batch, training=False)[0]
y_image = composite_batch
alphas = y_image == (tf.ones(y_image.shape) * 255.0)
alphas = 1-tf.cast(tf.reduce_all(alphas, axis=-1, keepdims=True), dtype=tf.float32)
y_image = tf.concat((y_image/255.0, alphas), axis=-1).numpy()
# Set colors
colors = [[1, 0, 0], [0, 1, 0], [0, 0, 1], [1, 0, 1]] #, [1, 0, 1], [0, 1, 1]]
pre_idx = np.array([k for k in range(0, x.shape[0], len(colors))])
for k, col_array in enumerate(colors):
idx = pre_idx + k
curr_imgs = y_image[idx][:, :, :, :3]
black_mask = np.all((curr_imgs - tf.zeros(curr_imgs.shape) > 0.08), axis=-1, keepdims=True)
y_image[idx, :, :, :3] = black_mask * col_array
for k in range(4):
Image.fromarray((y_image[k] * 255.0).astype('uint8')).save(os.path.join(folder, "legend-{}.png".format(k)))
project_plot(clustering_methods, x, axs, title, model, i, y_image)
fig.tight_layout()
fig.savefig(os.path.join(folder, "four.png"))
def relation_four_readout(embedding_models, clustering_methods, base_dir="compositionality", title=True):
folder = os.path.join(base_dir, "four_readout")
batch_size = 300
os.makedirs(folder, exist_ok=True)
circle_config: HParams = configs.get_config("quickdraw")().parse("split=circle,shuffle=False,batch_size={}".format(batch_size))
circle_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, circle_config)
circle_dataset = circle_dataset_proto.load(repeat=False)[0]
square_config: HParams = configs.get_config("quickdraw")().parse("split=square,shuffle=False,batch_size={}".format(batch_size))
square_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, square_config)
square_dataset = square_dataset_proto.load(repeat=False)[0]
circle_batch, circle_names = next(circle_dataset.__iter__())[2:4]
square_batch, square_names = next(square_dataset.__iter__())[2:4]
circle_batch, square_batch = circle_batch.numpy(), square_batch.numpy()
composite_batch = np.ones((batch_size, *circle_batch.shape[1:])) * 255.0
y_class = np.zeros((batch_size, ))
for idx in range(batch_size):
circle_orig, square_orig = circle_batch[idx], square_batch[idx]
circle, square = cv2.resize(circle_orig, (14, 14)), cv2.resize(square_orig, (14, 14))
class_label = np.random.randint(0, 4)
if class_label == 0:
place4(idx, composite_batch, [circle, circle, square, square])
# y_class[idx] = np.array([1, 0, 0, 0])
elif class_label == 1:
place4(idx, composite_batch, [square, circle, circle, square])
# y_class[idx] = np.array([0, 1, 0, 0])
elif class_label == 2:
place4(idx, composite_batch, [square, square, circle, circle])
# y_class[idx] = np.array([0, 0, 1, 0])
elif class_label == 3:
place4(idx, composite_batch, [circle, square, square, circle])
# y_class[idx] = np.array([0, 0, 0, 1])
y_class[idx] = class_label
composite_batch = np.array(composite_batch, dtype=np.float32)
reg_model = linear_model.LogisticRegression()
for i, model in enumerate(embedding_models):
x = model.embed(composite_batch.astype(np.float32), training=False)[0]
reg_model.fit(x[:100], y_class[:100])
logging.info("%s, %f", model.__class__.__name__, reg_model.score(x[100:], y_class[100:]))
def relation_nest_readout(embedding_models, clustering_methods, base_dir="compositionality", title=True):
folder = os.path.join(base_dir, "nest_readout")
batch_size = 300
os.makedirs(folder, exist_ok=True)
circle_config: HParams = configs.get_config("quickdraw")().parse("split=circle,shuffle=False,batch_size={}".format(batch_size))
circle_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, circle_config)
circle_dataset = circle_dataset_proto.load(repeat=False)[0]
square_config: HParams = configs.get_config("quickdraw")().parse("split=square,shuffle=False,batch_size={}".format(batch_size))
square_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, square_config)
square_dataset = square_dataset_proto.load(repeat=False)[0]
circle_strokes, circle_batch, circle_names = next(circle_dataset.__iter__())[1:4]
square_strokes, square_batch, square_names = next(square_dataset.__iter__())[1:4]
circle_batch, square_batch = circle_batch.numpy(), square_batch.numpy()
composite_batch = np.ones((batch_size, *circle_batch.shape[1:])) * 255.0
y_class = np.zeros((batch_size, ))
png_dims = circle_batch[0].shape[-3:-1]
for idx in range(batch_size):
circle_orig, square_orig = circle_batch[idx], square_batch[idx]
circle, square = cv2.resize(circle_orig, (14, 14)), cv2.resize(square_orig, (14, 14))
stroke_five = circle_strokes[idx]
stroke_three = stroke_three_format(stroke_five)
scaled_and_centered_stroke_three = scale_and_center_stroke_three(stroke_three, circle_batch[0].shape[-3:-1], 2)
rasterized_image = rasterize(scaled_and_centered_stroke_three, png_dims)
big_circle = rasterized_image
stroke_five = square_strokes[idx]
stroke_three = stroke_three_format(stroke_five)
scaled_and_centered_stroke_three = scale_and_center_stroke_three(stroke_three, circle_batch[0].shape[-3:-1], 2)
rasterized_image = rasterize(scaled_and_centered_stroke_three, png_dims)
big_square = rasterized_image
class_label = np.random.randint(0, 2)
if class_label == 0:
composite_batch[idx, :, :, :] = big_circle
composite_batch[idx, 8:20, 8:20, :] = square[1:13, 1:13, :]
elif class_label == 1:
composite_batch[idx, :, :, :] = big_square
composite_batch[idx, 8:20, 8:20, :] = circle[1:13, 1:13, :]
y_class[idx] = class_label
composite_batch = np.array(composite_batch, dtype=np.float32)
for i, model in enumerate(embedding_models):
reg_model = linear_model.LogisticRegression()
x = model.embed(composite_batch.astype(np.float32), training=False)[0]
reg_model.fit(x[:100], y_class[:100])
logging.info("%s, %f", model.__class__.__name__, reg_model.score(x[100:], y_class[100:]))
def latent_distance_cts(embedding_models, clustering_methods, base_dir="compositionality", title=True):
folder = os.path.join(base_dir, "distance")
batch_size = 300
os.makedirs(folder, exist_ok=True)
circle_config: HParams = configs.get_config("quickdraw")().parse("split=circle,shuffle=False,batch_size={}".format(batch_size))
circle_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, circle_config)
circle_dataset = circle_dataset_proto.load(repeat=False)[0]
circle_batch, circle_names = next(circle_dataset.__iter__())[2:4]
circle_batch = circle_batch.numpy()
composite_batch = np.ones((circle_batch.shape[0], *circle_batch.shape[1:])) * 255.0
y_dist = np.zeros((batch_size, 1))
for idx in range(batch_size):
circle_orig = circle_batch[idx]
circle = cv2.resize(circle_orig, (12, 12))
# circle -= (circle < 200).astype(np.float32) * 120
# circle = np.maximum(np.zeros(circle.shape), circle)
distance = np.random.randint(0, 7)
y_dist[idx] = distance
composite_batch[idx, 8:20, :11, :] = circle[:, 1:]
composite_batch[idx, 8:20, 16 + distance - 6:16 + distance + 6, :] = circle
composite_batch = np.array(composite_batch, dtype=np.float32)
fig, axs = plt.subplots(len(embedding_models), len(clustering_methods), figsize=(10 * len(clustering_methods), 10 * len(embedding_models)))
for i, model in enumerate(embedding_models):
x = model.embed(composite_batch, training=False)[0]
y_image = composite_batch
alphas = y_image == (tf.ones(y_image.shape) * 255.0)
alphas = 1 - tf.cast(tf.reduce_all(alphas, axis=-1, keepdims=True), dtype=tf.float32)
y_image = tf.concat((y_image / 255.0, alphas), axis=-1).numpy()
for idx in range(batch_size):
dist = y_dist[idx]
color = [dist/6.0, 0, 1-(dist/6.0)]
curr_imgs = y_image[idx, :, :, :3]
black_mask = np.all((curr_imgs - tf.zeros(curr_imgs.shape) > 0.08), axis=-1, keepdims=True)
y_image[idx, :, :, :3] = black_mask * color
Image.fromarray((y_image[np.argmax(y_dist)] * 255.0).astype('uint8')).save(os.path.join(folder, "legend-{}.png".format(np.argmax(y_dist))))
Image.fromarray((y_image[np.argmin(y_dist)] * 255.0).astype('uint8')).save(
os.path.join(folder, "legend-{}.png".format(np.argmin(y_dist))))
project_plot(clustering_methods, x, axs, title, model, i, y_image)
for i in range(10):
Image.fromarray((y_image[i] * 255.0).astype('uint8')).save(
os.path.join(folder, "legend-{}.png".format(y_dist[i])))
fig.tight_layout()
fig.savefig(os.path.join(folder, "distance.png"))
def latent_angle_cts(embedding_models, clustering_methods, base_dir="compositionality", title=True):
folder = os.path.join(base_dir, "angle")
batch_size = 400
os.makedirs(folder, exist_ok=True)
circle_config: HParams = configs.get_config("quickdraw")().parse("split=circle,shuffle=False,batch_size={}".format(batch_size))
circle_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, circle_config)
circle_dataset = circle_dataset_proto.load(repeat=False)[0]
circle_batch, circle_names = next(circle_dataset.__iter__())[2:4]
circle_batch = circle_batch.numpy()
composite_batch = np.ones((circle_batch.shape[0], *circle_batch.shape[1:])) * 255.0
y_angles = np.zeros((batch_size, 1))
for idx in range(circle_batch.shape[0]):
circle_orig = circle_batch[idx]
circle = cv2.resize(circle_orig, (12, 12))
angle = np.random.uniform(-0.52, 0.52) # -30 to 30 degrees, the limit of our canvas
x_offs, y_offs = np.rint(14 * np.cos(angle)).astype(np.int32), np.rint(14 * np.sin(angle)).astype(np.int32)
composite_batch[idx, 8:20, :11, :] = circle[:, 1:]
composite_batch[idx, 14 - y_offs - 6:14 - y_offs + 6, 7 + x_offs - 6:7 + x_offs + 6, :] = circle
y_angles[idx, 0] = np.round(angle, decimals=2)
composite_batch = np.array(composite_batch, dtype=np.float32)
fig, axs = plt.subplots(len(embedding_models), len(clustering_methods), figsize=(10 * len(clustering_methods), 10 * len(embedding_models)))
for i, model in enumerate(embedding_models):
x = model.embed(composite_batch, training=False)[0]
y_image = composite_batch
alphas = y_image == (tf.ones(y_image.shape) * 255.0)
alphas = 1 - tf.cast(tf.reduce_all(alphas, axis=-1, keepdims=True), dtype=tf.float32)
y_image = tf.concat((y_image / 255.0, alphas), axis=-1).numpy()
# Set colors
for idx in range(batch_size):
angle = y_angles[idx] + 0.52
color = [angle / 1.04, 0, 1 - (angle / 1.04)]
curr_imgs = y_image[idx, :, :, :3]
black_mask = np.all((curr_imgs - tf.zeros(curr_imgs.shape) > 0.08), axis=-1, keepdims=True)
y_image[idx, :, :, :3] = black_mask * color
Image.fromarray((y_image[np.argmax(y_angles)] * 255.0).astype('uint8')).save(
os.path.join(folder, "legend-{}.png".format(np.argmax(y_angles))))
Image.fromarray((y_image[np.argmin(y_angles)] * 255.0).astype('uint8')).save(
os.path.join(folder, "legend-{}.png".format(np.argmin(y_angles))))
project_plot(clustering_methods, x, axs, title, model, i, y_image)
for i in range(10):
Image.fromarray((y_image[i] * 255.0).astype('uint8')).save(
os.path.join(folder, "legend-{}.png".format(y_angles[i])))
fig.tight_layout()
fig.savefig(os.path.join(folder, "angle.png"))
def latent_size_cts(embedding_models, clustering_methods, base_dir="compositionality", title=True):
folder = os.path.join(base_dir, "size")
batch_size = 500
os.makedirs(folder, exist_ok=True)
circle_config: HParams = configs.get_config("quickdraw")().parse("split=circle,shuffle=True,batch_size={}".format(batch_size))
circle_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, circle_config)
circle_dataset = circle_dataset_proto.load(repeat=False)[0]
iter = circle_dataset.__iter__()
circle_strokes, circle_batch, circle_names = next(iter)[1:4]
circle_batch = circle_batch.numpy()
composite_batch = np.ones((circle_batch.shape[0], *circle_batch.shape[1:])) * 255.0
y_size = np.zeros((batch_size, 1))
png_dims = circle_batch[0].shape[-3:-1]
for idx in range(batch_size):
circle_orig = circle_batch[idx]
size = np.random.randint(4, 22)
stroke_five = circle_strokes[idx]
stroke_three = stroke_three_format(stroke_five)
scaled_and_centered_stroke_three = scale_and_center_stroke_three(stroke_three, png_dims, 28-size)
rasterized_image = rasterize(scaled_and_centered_stroke_three, png_dims)
circle = rasterized_image
y_size[idx] = size
composite_batch[idx] = circle
composite_batch = np.array(composite_batch, dtype=np.float32)
fig, axs = plt.subplots(len(embedding_models), len(clustering_methods), figsize=(10 * len(clustering_methods), 10 * len(embedding_models)))
for i, model in enumerate(embedding_models):
x = model.embed(composite_batch, training=False)[0]
y_image = composite_batch
alphas = y_image == (tf.ones(y_image.shape) * 255.0)
alphas = 1 - tf.cast(tf.reduce_all(alphas, axis=-1, keepdims=True), dtype=tf.float32)
y_image = tf.concat((y_image / 255.0, alphas), axis=-1).numpy()
for idx in range(batch_size):
size = y_size[idx]
color = [(size-4)/18, 0, 1-((size-4)/18)]
curr_imgs = y_image[idx, :, :, :3]
black_mask = np.all((curr_imgs - tf.zeros(curr_imgs.shape) > 0.12), axis=-1, keepdims=True)
y_image[idx, :, :, :3] = black_mask * color
Image.fromarray((y_image[np.argmax(y_size)] * 255.0).astype('uint8')).save(os.path.join(folder, "legend-{}.png".format(np.argmax(y_size))))
Image.fromarray((y_image[np.argmin(y_size)] * 255.0).astype('uint8')).save(
os.path.join(folder, "legend-{}.png".format(np.argmin(y_size))))
project_plot(clustering_methods, x, axs, title, model, i, y_image)
for i in range(10):
Image.fromarray((y_image[i] * 255.0).astype('uint8')).save(
os.path.join(folder, "legend-{}.png".format(y_size[i])))
fig.tight_layout()
fig.savefig(os.path.join(folder, "size.png"))
def latent_count_readout(embedding_models, clustering_methods, base_dir="compositionality", title=True):
folder = os.path.join(base_dir, "count_readout")
batch_size = 300
os.makedirs(folder, exist_ok=True)
snowman_config: HParams = configs.get_config("quickdraw")().parse("split=snowman,shuffle=True,batch_size={}".format(batch_size))
snowman_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, snowman_config)
snowman_dataset = snowman_dataset_proto.load(repeat=False)[0]
circle_config: HParams = configs.get_config("quickdraw")().parse("split=circle,shuffle=True,batch_size={}".format(batch_size))
circle_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, circle_config)
circle_dataset = circle_dataset_proto.load(repeat=False)[0]
square_config: HParams = configs.get_config("quickdraw")().parse("split=square,shuffle=True,batch_size={}".format(batch_size))
square_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, square_config)
square_dataset = square_dataset_proto.load(repeat=False)[0]
television_config = configs.get_config("quickdraw")().parse("split=television,shuffle=True,batch_size={}".format(batch_size))
television_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, television_config)
television_dataset = television_dataset_proto.load(repeat=False)[0]
circle_batch, circle_names = next(circle_dataset.__iter__())[2:4]
snowman_batch, snowman_names = next(snowman_dataset.__iter__())[2:4]
square_batch, square_names = next(square_dataset.__iter__())[2:4]
television_batch, television_names = next(television_dataset.__iter__())[2:4]
circle_batch, square_batch = circle_batch.numpy(), square_batch.numpy()
composite_batch = np.ones((batch_size, *circle_batch.shape[1:])) * 255.0
y_class = np.zeros((batch_size, ))
for idx in range(batch_size):
circle_orig, square_orig = circle_batch[idx], square_batch[idx]
snowman_orig, television_orig = snowman_batch[idx], television_batch[idx]
class_label = np.random.randint(0, 4)
if class_label == 0:
composite_batch[idx] = circle_orig
elif class_label == 1:
composite_batch[idx] = square_orig
elif class_label == 2:
composite_batch[idx] = snowman_orig
elif class_label == 3:
composite_batch[idx] = television_orig
y_class[idx] = class_label
composite_batch = np.array(composite_batch, dtype=np.float32)
for i, model in enumerate(embedding_models):
reg_model = linear_model.Ridge()
reg_model = MLPRegressor(max_iter=2000)
x = model.embed(composite_batch.astype(np.float32), training=False)[0]
reg_model.fit(x[:100], y_class[:100])
logging.info("%s, %f", model.__class__.__name__, reg_model.score(x[100:], y_class[100:]))
def latent_angle_readout(embedding_models, clustering_methods, base_dir):
logging.info("---Angle Readout---")
folder = os.path.join(base_dir, "angle_readout", "angled")
num_train_ex = 100
num_test_ex = 2000
os.makedirs(folder, exist_ok=True)
circle_config: HParams = configs.get_config("quickdraw")().parse("split=circle,shuffle=False,batch_size={}".format(num_train_ex + num_test_ex))
circle_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, circle_config)
circle_dataset = circle_dataset_proto.load(repeat=False)[0]
circle_batch = next(circle_dataset.__iter__())[2].numpy()
composed_image_batch = np.ones((num_train_ex + num_test_ex, *circle_batch.shape[1:])) * 255.0
y_angles = np.zeros((num_train_ex + num_test_ex, 1))
for i in range(num_train_ex + num_test_ex):
circle_orig = circle_batch[i]
circle = cv2.resize(circle_orig, (12, 12))
composed_image_batch[i, 8:20, 1:13, :] = circle
angle = np.random.uniform(-0.52, 0.52) # -30 to 30 degrees, the limit of our canvas
x_offs, y_offs = np.rint(14*np.cos(angle)).astype(np.int32), np.rint(14*np.sin(angle)).astype(np.int32)
composed_image_batch[i, 14-y_offs-6:14-y_offs+6, 7+x_offs-6:7+x_offs+6, :] = circle
y_angles[i, 0] = np.round(angle, decimals=2)
for i, model in enumerate(embedding_models):
lin_readout(model, composed_image_batch, y_angles, num_train_ex)
def latent_distance_readout(embedding_models, clustering_methods, base_dir):
logging.info("---Distance Readout---")
folder = os.path.join(base_dir, "distance_readout")
num_train_ex = 100
num_test_ex = 2000
os.makedirs(folder, exist_ok=True)
circle_config: HParams = configs.get_config("quickdraw")().parse("split=circle,shuffle=False,batch_size={}".format(num_train_ex + num_test_ex))
circle_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, circle_config)
circle_dataset = circle_dataset_proto.load(repeat=False)[0]
circle_batch = next(circle_dataset.__iter__())[2].numpy()
composed_image_batch = np.ones((num_train_ex + num_test_ex, *circle_batch.shape[1:])) * 255.0
y_distance = np.zeros((num_train_ex + num_test_ex, 1))
for i in range(num_train_ex + num_test_ex):
circle_orig = circle_batch[i]
circle = cv2.resize(circle_orig, (12, 12))
circle -= (circle < 200).astype(np.float32) * 120
circle = np.maximum(np.zeros(circle.shape), circle)
composed_image_batch[i, 8:20, :10, :] = circle[:, 2:]
distance = np.random.uniform(0.0, 6) # -30 to 30 degrees, the limit of our canvas
composed_image_batch[i, 8:20, np.rint(16+distance-6).astype(np.int32):np.rint(16+distance+6).astype(np.int32), :] = circle
y_distance[i, 0] = np.round(distance, decimals=0)
for i, model in enumerate(embedding_models):
lin_readout(model, composed_image_batch, y_distance, num_train_ex)
def latent_size_readout(embedding_models, clustering_methods, base_dir):
logging.info("---Size Readout---")
folder = os.path.join(base_dir, "size_readout")
num_train_ex = 100
num_test_ex = 2000
os.makedirs(folder, exist_ok=True)
circle_config: HParams = configs.get_config("quickdraw")().parse(
"split=circle,shuffle=False,batch_size={}".format(num_train_ex + num_test_ex))
circle_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, circle_config)
circle_dataset = circle_dataset_proto.load(repeat=False)[0]
iter = circle_dataset.__iter__()
circle_strokes, circle_batch, circle_names = next(iter)[1:4]
circle_batch = circle_batch.numpy()
composed_image_batch = np.ones((num_train_ex + num_test_ex, *circle_batch.shape[1:])) * 255.0
y_size = np.zeros((num_train_ex + num_test_ex, 1))
png_dims = circle_batch[0].shape[-3:-1]
for idx in range(num_train_ex + num_test_ex):
size = np.random.randint(4, 22)
stroke_five = circle_strokes[idx]
stroke_three = stroke_three_format(stroke_five)
scaled_and_centered_stroke_three = scale_and_center_stroke_three(stroke_three, png_dims, 28-size)
rasterized_image = rasterize(scaled_and_centered_stroke_three, png_dims)
circle = rasterized_image
y_size[idx] = size
composed_image_batch[idx] = circle
for i, model in enumerate(embedding_models):
lin_readout(model, composed_image_batch, y_size, num_train_ex)
def n_interpolate(embedding_models, clustering_methods, base_dir):
folder = os.path.join(base_dir, "n_interpolate")
batch_size = 20
os.makedirs(folder, exist_ok=True)
circle_config: HParams = configs.get_config("quickdraw")().parse("split=circle,shuffle=False,batch_size={}".format(batch_size))
circle_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, circle_config)
circle_dataset = circle_dataset_proto.load(repeat=False)[0]
circle_batch = next(circle_dataset.__iter__())[2].numpy()
composed_image_batch = np.ones((batch_size * 3, *circle_batch.shape[1:])) * 255.0
for i in range(batch_size):
circle_orig = circle_batch[i]
circle = cv2.resize(circle_orig, (14, 14))
shp = circle_orig.shape
composed_image_batch[3*i, 7:21, 7:21, :] = circle
composed_image_batch[3*i] -= (composed_image_batch[3*i] < 230).astype(np.float32) * 100
composed_image_batch[3 * i] -= (composed_image_batch[3 * i] < 230).astype(np.float32) * 100
composed_image_batch[3*i] = np.maximum(np.zeros(shp), composed_image_batch[3*i])
composed_image_batch[3*i+1, 1:15, 7:21, :] = circle
composed_image_batch[3*i+1, 13:27, 7:21, :] = circle
composed_image_batch[3*i+1] -= (composed_image_batch[3*i+1] < 200).astype(np.float32)* 150
composed_image_batch[3*i+1] = np.maximum(np.zeros(shp), composed_image_batch[3*i+1])
composed_image_batch[3*i+2, 1:15, 7:21, :] = circle
composed_image_batch[3*i+2, 13:27, 1:15, :] = circle
composed_image_batch[3*i+2, 13:27, 13:27, :] = circle
composed_image_batch[3*i+2] -= (composed_image_batch[3 * i + 2] < 200).astype(np.float32)* 150
composed_image_batch[3*i+2] = np.maximum(np.zeros(shp), composed_image_batch[3 * i + 2])
def interpolate3(x, y, z):
res = np.zeros((21, *x.shape))
for idx, w in enumerate(np.linspace(0.1, 0.9, 9)):
res[idx + 1] = ((1.-w)*x) + (w*y)
for idx, w in enumerate(np.linspace(0.1, 0.9, 9)):
res[idx + 11] = ((1.-w)*y) + (w*z)
res[0] = x
res[10] = y
res[20] = z
return res
for i, model in enumerate(embedding_models):
x = model.embed(composed_image_batch.astype(np.float32), training=False)[0]
for j in range(batch_size):
two, three, four = x[3*j], x[3*j+1], x[3*j+2]
decode_batch = interpolate3(two, three, four)
if "Drawer" in model.__class__.__name__:
decodes = model.decode(decode_batch.astype(np.float32), training=False, generation_length=64)[1]
lst = []
for decode in decodes:
lst.append(scale_and_rasterize(stroke_three_format(decode), png_dimensions=(28, 28), stroke_width=1))
gen_inter_image = np.array(lst).astype(np.float)
name = "drawer_{}.png".format(j)
elif "VAE" in model.__class__.__name__:
gen_inter_image = tf.image.resize(model.decode(decode_batch.astype(np.float32), training=False), (28, 28)) * 255.0
name = "vae_{}.png".format(j)
else:
logging.fatal("Error, wrong embedding model")
stitched_interpolation = np.hstack(gen_inter_image)
Image.fromarray(stitched_interpolation.astype(np.uint8)).save(os.path.join(folder, name))
stitched_original = np.hstack(composed_image_batch[3*j:3*j + 3])
Image.fromarray(stitched_original.astype(np.uint8)).save(os.path.join(folder, "orig"+name))
def four_rotate(embedding_models, clustering_methods, base_dir="compositionality", title=True):
folder = os.path.join(base_dir, "four_rotate")
ex_per_class = 25
os.makedirs(folder, exist_ok=True)
circle_config: HParams = configs.get_config("quickdraw")().parse("split=circle,shuffle=False,batch_size={}".format(ex_per_class))
circle_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, circle_config)
circle_dataset = circle_dataset_proto.load(repeat=False)[0]
square_config: HParams = configs.get_config("quickdraw")().parse("split=square,shuffle=False,batch_size={}".format(ex_per_class))
square_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, square_config)
square_dataset = square_dataset_proto.load(repeat=False)[0]
circle_batch, circle_names = next(circle_dataset.__iter__())[2:4]
square_batch, square_names = next(square_dataset.__iter__())[2:4]
circle_batch, square_batch = circle_batch.numpy(), square_batch.numpy()
composite_batch = np.ones((circle_batch.shape[0] * 4, *circle_batch.shape[1:])) * 255.0
for idx in range(circle_batch.shape[0]):
circle_orig, square_orig = circle_batch[idx], square_batch[idx]
circle, square = cv2.resize(circle_orig, (14, 14)), cv2.resize(square_orig, (14, 14))
circle -= (circle < 200).astype(np.float32) * 100
circle = np.maximum(np.zeros(circle.shape), circle)
square -= (square < 200).astype(np.float32) * 100
square = np.maximum(np.zeros(square.shape), square)
place4((idx*4), composite_batch, [circle, circle, circle, square])
place4((idx*4)+1, composite_batch, [square, circle, circle, circle])
place4((idx*4)+2, composite_batch, [circle, square, circle, circle])
place4((idx*4)+3, composite_batch, [circle, circle, square, circle])
composite_batch = np.array(composite_batch, dtype=np.float32)
fig, axs = plt.subplots(len(embedding_models), len(clustering_methods), figsize=(10 * len(clustering_methods), 10 * len(embedding_models)))
for i, model in enumerate(embedding_models):
x = model.embed(composite_batch, training=False)[0]
y_image = composite_batch
alphas = y_image == (tf.ones(y_image.shape) * 255.0)
alphas = 1-tf.cast(tf.reduce_all(alphas, axis=-1, keepdims=True), dtype=tf.float32)
y_image = tf.concat((y_image/255.0, alphas), axis=-1).numpy()
# Set colors
colors = [[1, 0, 0], [0, 1, 0], [0, 0, 1], [1, 0, 1]] #, [1, 0, 1], [0, 1, 1]]
pre_idx = np.array([k for k in range(0, x.shape[0], len(colors))])
for k, col_array in enumerate(colors):
idx = pre_idx + k
curr_imgs = y_image[idx][:, :, :, :3]
black_mask = np.all((curr_imgs - tf.zeros(curr_imgs.shape) > 0.08), axis=-1, keepdims=True)
y_image[idx, :, :, :3] = black_mask * col_array
for k in range(4):
Image.fromarray((y_image[k] * 255.0).astype('uint8')).save(os.path.join(folder, "legend-{}.png".format(k)))
project_plot(clustering_methods, x, axs, title, model, i, y_image)
fig.tight_layout()
fig.savefig(os.path.join(folder, "four.png"))
def place4(idx, source, shapes):
source[idx, 0:14, 0:14, :] = shapes[0]
source[idx, 0:14, 14:, :] = shapes[1]
source[idx, 14:, 0:14, :] = shapes[2]
source[idx, 14:, 14:, :] = shapes[3]
def project_plot(clustering_methods, x, axs, title, model, i, y_image):
for j, method in enumerate(clustering_methods):
x_2d = method.fit_transform(x)
if len(clustering_methods) == 1:
ax = axs[i]
else:
ax = axs[i][j]
ax.set_xticks([])
ax.set_yticks([])
ax.scatter(x_2d[:, 0], x_2d[:, 1], facecolors='none', edgecolors='none')
for k, img in enumerate(y_image):
ab = AnnotationBbox(OffsetImage(img), (x_2d[k, 0], x_2d[k, 1]), frameon=False)
ax.add_artist(ab)
if title:
ax.set_title("{}".format("SketchEmbedding" if model.__class__.__name__ == "DrawerEncTADAMModel" else model.__class__.__name__),
fontsize=50)
else:
ax.set_title(" ", fontsize=50)
def lin_readout(model, composed_image_batch, y, num_train_ex):
reg_model_nl = MLPRegressor(max_iter=3000)
reg_model_lin = linear_model.Ridge()
x = model.embed(composed_image_batch.astype(np.float32), training=False)[0]
reg_model_nl.fit(x[:num_train_ex], y[:num_train_ex, 0])
reg_model_lin.fit(x[:num_train_ex], y[:num_train_ex, 0])
logging.info("%s, linear:(%f, %f), nonlinear:(%f, %f)",
model.__class__.__name__,
reg_model_lin.score(x[num_train_ex:], y[num_train_ex:, 0]),
sklearn.metrics.mean_squared_error(y[num_train_ex:, 0], reg_model_lin.predict(x[num_train_ex:])),
reg_model_nl.score(x[num_train_ex:], y[num_train_ex:, 0]),
sklearn.metrics.mean_squared_error(y[num_train_ex:, 0], reg_model_nl.predict(x[num_train_ex:])),)
def main(argv):
"""Create directories and configure python settings"""
# Setup Directory
experiment_dir = os.path.join(FLAGS.dir, FLAGS.id)
if not os.path.exists(experiment_dir):
os.makedirs(os.path.join(experiment_dir, "logs"), exist_ok=True)
# Setup Logging
FLAGS.alsologtostderr = True
logging.get_absl_handler().use_absl_log_file(FLAGS.logfile, os.path.join(experiment_dir, "logs"))
# Setup seeds
if FLAGS.random_seed:
np.random.seed(FLAGS.random_seed)
tf.random.set_seed(FLAGS.random_seed)
# Log Flags
log_flags(FLAGS)
drawer_id = FLAGS.drawer_id
drawer_config: HParams = configs.get_config(FLAGS.drawer_cfgset)().parse(FLAGS.drawer_cfgs)
drawer: DrawerModel = models.get_model(FLAGS.drawer_model)(FLAGS.dir, drawer_id, drawer_config, training=False)
vae_id = FLAGS.vae_id
print(vae_id)
vae_config: HParams = configs.get_config(FLAGS.vae_cfgset)().parse(FLAGS.vae_cfgs)
vae: VAE = models.get_model(FLAGS.vae_model)(FLAGS.dir, vae_id, vae_config, training=False)
embedding_models = [drawer, vae]
clustering_methods = [umap.UMAP()]
try:
if FLAGS.conceptual_composition:
conceptual_composition(embedding_models, clustering_methods, experiment_dir)
if FLAGS.relation_count:
relation_count(embedding_models, clustering_methods, experiment_dir)
if FLAGS.relation_count_toy:
relation_count_toy(embedding_models, clustering_methods, experiment_dir)
if FLAGS.relation_orient:
relation_orientation(embedding_models, clustering_methods, experiment_dir)
if FLAGS.relation_inout:
relation_inout(embedding_models, clustering_methods, experiment_dir)
if FLAGS.relation_four:
relation_four(embedding_models, clustering_methods, experiment_dir)
if FLAGS.relation_count_readout:
latent_count_readout(embedding_models, clustering_methods, experiment_dir)
if FLAGS.relation_four_readout:
relation_four_readout(embedding_models, clustering_methods, experiment_dir)
if FLAGS.relation_nest_readout:
relation_nest_readout(embedding_models, clustering_methods, experiment_dir)
if FLAGS.latent_distance:
latent_distance_cts(embedding_models, clustering_methods, experiment_dir)
if FLAGS.latent_angle:
latent_angle_cts(embedding_models, clustering_methods, experiment_dir)
if FLAGS.latent_size:
latent_size_cts(embedding_models, clustering_methods, experiment_dir)
if FLAGS.latent_angle_readout:
latent_angle_readout(embedding_models, clustering_methods, experiment_dir)
if FLAGS.latent_distance_readout:
latent_distance_readout(embedding_models, clustering_methods, experiment_dir)
if FLAGS.latent_size_readout:
latent_size_readout(embedding_models, clustering_methods, experiment_dir)
if FLAGS.n_interpolate:
n_interpolate(embedding_models, clustering_methods, experiment_dir)
if FLAGS.four_rotate:
four_rotate(embedding_models, clustering_methods, experiment_dir)
except:
exception = traceback.format_exc()
logging.info(exception)
if __name__ == "__main__":
app.run(main)
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,369
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/models/vae.py
|
import os
from time import time
import tensorflow as tf
from absl import logging
from models.base import TrainableModel, register_model
from models.subs.conv_block import ResNet12Block, ResNet12BlockReverse
from util import process_write_out
from util.write_routines import parallel_writer_vae_latent
@register_model("vae")
class VAE(TrainableModel):
def __init__(self, base_dir, model_id, params, training=True):
"""
Pixel-domain VAE model as a benchmark comparison for embedding learning purposes
:param base_dir:
:param model_id:
:param params:
:param training:
"""
self._latent_size = params.latent_size
self._png_dim = params.png_dim
self._grayscale = params.grayscale
self._kl_weight = params.kl_weight
self._kl_tolerance = params.kl_tolerance
self._filters = params.filters
self._lr = params.lr
self._lr_decay_step = params.lr_decay_step
self._lr_decay_rate = params.lr_decay_rate
super(VAE, self).__init__(base_dir, model_id, training)
def _build_model(self):
self._conv_encoder = tf.keras.Sequential([
ResNet12Block(self._filters[0]),
ResNet12Block(self._filters[1]),
ResNet12Block(self._filters[2]),
ResNet12Block(self._filters[3]),
tf.keras.layers.GlobalAveragePooling2D()
])
self._mu = tf.keras.layers.Dense(self._latent_size)
self._var = tf.keras.layers.Dense(self._latent_size)
self._conv_decoder = tf.keras.Sequential([
tf.keras.layers.UpSampling2D((self._png_dim//16, self._png_dim//16)),
ResNet12BlockReverse(self._filters[2]),
ResNet12BlockReverse(self._filters[1]),
ResNet12BlockReverse(self._filters[0]),
ResNet12BlockReverse(1 if self._grayscale else 3)
])
lr = tf.keras.optimizers.schedules.ExponentialDecay(self._lr, self._lr_decay_step, self._lr_decay_rate)
self._optimizer = tf.optimizers.Adam(learning_rate=lr)
def _checkpoint_model(self):
ckpt = tf.train.Checkpoint(optimizer=self._optimizer,
conv_encoder=self._conv_encoder,
mean=self._mu,
var=self._var,
conv_decoder=self._conv_decoder)
self._ckpt_manager = tf.train.CheckpointManager(ckpt, self._checkpoint_dir, max_to_keep=None)
if self._ckpt_manager.latest_checkpoint:
logging.info("Restoring Checkpoint: %s", self._ckpt_manager.latest_checkpoint)
status = ckpt.restore(self._ckpt_manager.latest_checkpoint)
if self.training:
status.assert_existing_objects_matched()
else:
status.expect_partial()
def train(self, train_dataset, train_steps, print_freq, save_freq, eval_dataset=None, eval_freq=None):
train_dataset: tf.data.Dataset = train_dataset[0]
train_iter = train_dataset.__iter__()
if eval_dataset and not eval_freq:
eval_freq = save_freq
logging.info("Beginning training loop")
last_time = start_time = time()
for step in tf.range(self._optimizer.iterations + 1, tf.constant(train_steps + 1)):
entry = next(train_iter)
if len(entry) == 2:
x_image = entry[0]
else:
x_image = next(train_iter)[2]
x_image = tf.image.rgb_to_grayscale(x_image / 255.0) if self._grayscale else x_image / 255.0
loss, reconstruction_loss, kl_loss = self.train_step(x_image)
if step and step % print_freq == 0:
curr_time = time()
logging.info("Step: %6d | Loss: %6f | Recons. Loss: %6f | KL Loss: %6f | LR: %.5f | time/step: %.4f | Total Time: %7d",
step, loss, reconstruction_loss, kl_loss, self._optimizer._decayed_lr('float32'),
(curr_time-last_time)/print_freq, curr_time-start_time)
last_time = curr_time
with self._writer.as_default():
self._write_summaries(step, {"cost": loss, "reconstruction_cost": reconstruction_loss, "kl_cost": kl_loss})
if step and step % save_freq == 0:
self._ckpt_manager.save(step)
if eval_dataset and step and step % eval_freq == 0:
eval_start_time = time()
eval_cost, eval_reconstruction, eval_kl = self.evaluate(eval_dataset)
with self._writer.as_default():
self._write_summaries(step, {"eval_cost": eval_cost,
"eval_reconstruction": eval_reconstruction, "eval_kl": eval_kl})
last_time = time()
logging.info("Eval Done | Loss: %.5f | Recons. Loss: %6f | KL Loss: %6f | Eval Time: %.4f | Total Time: %.0f",
eval_cost, eval_reconstruction, eval_kl, last_time-eval_start_time, last_time-start_time)
def evaluate(self, eval_dataset):
eval_dataset, _ = eval_dataset
total_cost_mean, reconstruction_mean, kl_mean = tf.keras.metrics.Mean(), tf.keras.metrics.Mean(), tf.keras.metrics.Mean()
for entry in eval_dataset.__iter__():
if len(entry) == 2:
x_image = entry[0]
else:
x_image = next(entry)[2]
x_image = tf.image.rgb_to_grayscale(x_image/255.0) if self._grayscale else x_image / 255.0
if x_image.shape[0] != self._png_dim:
x_image = tf.image.resize(x_image, (self._png_dim, self._png_dim))
outputs = self.forward(x_image, training=True)
total_cost, reconstruction_cost, kl_cost = self.compute_loss(outputs, x_image)
total_cost_mean(total_cost)
reconstruction_mean(reconstruction_cost)
kl_mean(kl_cost)
return total_cost_mean.result(), reconstruction_mean.result(), kl_mean.result()
def test(self, test_dataset, result_name, steps=None):
self._sampling_dir = os.path.join(self._sampling_dir, result_name)
test_dataset, _ = test_dataset
# Begin Writing Child-Process
process, write_queue = process_write_out(parallel_writer_vae_latent, (self._sampling_dir,))
try:
for step, entry in enumerate(test_dataset):
if steps == step:
break
x_image, class_names = entry[-2:]
x_image = tf.image.rgb_to_grayscale(x_image / 255.0) if self._grayscale else x_image / 255.0
if x_image.shape[0] != self._png_dim:
x_image = tf.image.resize(x_image, (self._png_dim, self._png_dim))
reconstruction, [z, _, _] = self.forward(x_image, training=False)
np_images, np_recons, np_z, np_class_names = x_image.numpy(), reconstruction.numpy(), z.numpy(), class_names.numpy()
for idx in range(0, x_image.shape[0], 50):
write_queue.put({"rasterized_images": np_images[idx],
"reconstructed_images": np_recons[idx],
"latent_embedding": np_z[idx],
"class_names": np_class_names[idx]})
write_queue.put(None)
except:
process.terminate()
raise
process.join()
@tf.function
def train_step(self, x_image):
if x_image.shape[0] != self._png_dim:
x_image = tf.image.resize(x_image, (self._png_dim, self._png_dim))
with tf.GradientTape() as tape:
outputs = self.forward(x_image, training=True)
total_loss, reconstruction_loss, kl_loss = self.compute_loss(outputs, x_image)
grads = tape.gradient(total_loss,
self._conv_encoder.trainable_variables +
self._mu.trainable_variables + self._var.trainable_variables +
self._conv_decoder.trainable_variables)
self._optimizer.apply_gradients(zip(grads,
self._conv_encoder.trainable_variables +
self._mu.trainable_variables + self._var.trainable_variables +
self._conv_decoder.trainable_variables))
return total_loss, reconstruction_loss, kl_loss
@tf.function
def compute_loss(self, outputs, ground_truth):
reconstruction, params = outputs
z, mean, logvar = params
cross_ent = tf.nn.sigmoid_cross_entropy_with_logits(logits=reconstruction, labels=ground_truth)
reconstruction_loss_batched = tf.reduce_mean(cross_ent, axis=[1, 2, 3])
kl_loss_batched = -0.5 * tf.reduce_mean((1 + logvar - tf.square(mean) - tf.exp(logvar)))
kl_loss_batched = tf.maximum(kl_loss_batched, self._kl_tolerance) * self._kl_weight
total_loss_batched = reconstruction_loss_batched + kl_loss_batched
# total_loss_batched = reconstruction_loss_batched
reconstruction_loss = tf.reduce_mean(reconstruction_loss_batched)
kl_loss = tf.reduce_mean(kl_loss_batched)
total_loss = tf.reduce_mean(total_loss_batched)
return total_loss, reconstruction_loss, kl_loss
@tf.function
def embed(self, x_image, training=False):
if x_image.shape[0] != self._png_dim:
x_image = tf.image.resize(x_image, (self._png_dim, self._png_dim))
if tf.reduce_max(x_image) > 1.0:
x_image = x_image / 255.0
x = self._conv_encoder(x_image, training)
mu, var = self._mu(x, training=training), self._var(x, training=training)
# Re-parameterize
eps = tf.random.normal(shape=mu.shape)
z = mu + eps * var
return z, mu, var
@tf.function
def forward(self, x_image, training):
if x_image.shape[1] != self._png_dim:
x_image = tf.image.resize(x_image, (self._png_dim, self._png_dim))
if tf.reduce_max(x_image) > 1.0:
x_image = x_image / 255.0
x = self._conv_encoder(x_image, training)
mu, var = self._mu(x, training=training), self._var(x, training=training)
# Re-parameterize
eps = tf.random.normal(shape=mu.shape)
z = mu + eps * var
x = tf.reshape(z, (z.shape[0], 1, 1, z.shape[-1]))
x = self._conv_decoder(x, training)
if not training:
x = tf.keras.activations.sigmoid(x)
return x, (z, mu, var)
@tf.function
def decode(self, z, training):
x = tf.reshape(z, (z.shape[0], 1, 1, z.shape[-1]))
x = self._conv_decoder(x, training)
if not training:
x = tf.keras.activations.sigmoid(x)
return x
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,370
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/datasets/quickdraw.py
|
import os
from concurrent.futures.process import ProcessPoolExecutor
from itertools import repeat
from math import ceil
from time import time
import numpy as np
import psutil as psutil
from absl import logging
from datasets.base import register_dataset, DatasetBase
from util import get_normalizing_scale_factor, quickdraw_process, ST2_classes, ST1_classes
@register_dataset("quickdraw")
class Quickdraw(DatasetBase):
def __init__(self, data_dir, params):
super(Quickdraw, self).__init__(data_dir, params)
self._dataset_path = os.path.join(self._data_dir, 'quickdraw')
def load(self, repeat=True):
data_path = os.path.join(self._dataset_path, 'caches', self._split)
files = [os.path.join(data_path, shard_name) for shard_name in os.listdir(data_path)]
return self._create_dataset_from_filepaths(files, repeat)
def _filter_collections(self, files):
"""
Selects files from archive.
:param files:
:return: y_strokes(ground_truth), y_strokes(teacher), x_image, class_name
"""
files = sorted(files)
return files[2], files[2], files[1], files[0]
def prepare(self, FLAGS, max_seq_len=64, shard_size=1000, png_dims=(28, 28), unit_var=True, classes=ST1_classes):
"""
Parallelized processing function for the Quickdraw dataset to convert .npz files into files for model ingestion.
:param FLAGS:
:param max_seq_len:
:param shard_size:
:param png_dims:
:param unit_var:
:param classes:
:return:
"""
save_dir = os.path.join(self._dataset_path, "caches", self._split)
raw_dir = os.path.join(self._dataset_path, "raw")
os.makedirs(save_dir, exist_ok=True)
files = [os.path.join(raw_dir, file_path+".npz") for file_path in classes]
logging.info('Loading NPZ Files | Num Classes: %d', len(files))
all_sketches = np.empty((0, 2))
for file in files:
try:
npz = np.load(file, encoding='latin1', allow_pickle=True)
except IOError:
logging.error("Numpy unable to load dataset file: {}".format(file))
continue
class_name = np.array([file.split('/')[-1].split('.')[0]])
sketches = np.reshape(npz['train'], (-1, 1))
classes = np.tile(np.reshape(class_name, (1, -1)), sketches.shape)
data = np.concatenate((sketches, classes), axis=1)
if max_seq_len:
bool_array = np.array([sketch.shape[0] <= max_seq_len for sketch in data[:, 0]])
data = data[bool_array]
all_sketches = np.concatenate((all_sketches, data))
logging.info("Loaded npz: %s | Taking samples %d/%d | Total samples: %d",
file, data.shape[0], sketches.shape[0], all_sketches.shape[0])
if not max_seq_len:
max_seq_len = max([len(x) for x in all_sketches[:, 0]])
# Scale all offsets to be of unit variance (makes image very small)
if unit_var:
normalizing_scale_factor = get_normalizing_scale_factor(all_sketches[:, 0:1])
else:
normalizing_scale_factor = 1.0
# Randomize
np.random.shuffle(all_sketches)
logging.info("Beginning Processing | %s sketches | %s classes | Max Sequence Length: %s",
all_sketches.shape[0], len(files), max_seq_len)
cpu_count = psutil.cpu_count(logical=False)
workers_per_cpu = 2
with ProcessPoolExecutor(max_workers=cpu_count * workers_per_cpu) as executor:
out = executor.map(quickdraw_process,
(all_sketches[i: i + shard_size] for i in range(0, all_sketches.shape[0], shard_size)),
repeat(max_seq_len),
repeat(png_dims),
(os.path.join(save_dir, "{}.npz".format(i)) for i in range(ceil(all_sketches.shape[0] // shard_size))),
repeat(normalizing_scale_factor),
chunksize=1)
batch_count = 0
last_time = time()
try:
for write_signal in out:
batch_count += write_signal
if batch_count % cpu_count == 0:
curr_time = time()
logging.info("Processed batch: {:5} | Total: {:8}/{:8} | Time/Batch: {:8.2f} | Time/Image: {:8.8f}"
.format(batch_count,
min(batch_count * shard_size, all_sketches.shape[0]),
all_sketches.shape[0],
(curr_time - last_time) / cpu_count,
(curr_time - last_time) / (cpu_count * shard_size)))
last_time = curr_time
except StopIteration:
logging.info("Processing Done")
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,371
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/configs/base/configs.py
|
from absl import logging
CONFIGS = {}
def register_config(config_name):
"""
Decorator for registering config class.
:param config_name:
:return:
"""
def decorator(f):
CONFIGS[config_name] = f
return f
return decorator
def get_config_old(config):
"""
Returns config class if registered.
:param config:
:return:
"""
if config in CONFIGS:
config = CONFIGS[config]
return config
else:
raise ValueError("Config not found: %s", config)
def get_config(config: str):
"""
Splits requested config string by '/', the base config is the first element in the list and every compounding modification
is retrieved by passing the previous config into "base/mod".
To keep in convention that we return a function that can be called for the config, we will use a lambda after generating the config.
:param config:
:return: fn: () -> config
"""
terms = config.split('/')
base, mods = terms[0], terms[1:]
try:
lookup = base
config = CONFIGS[lookup]()
for mod in mods:
lookup = "{}/{}".format(base, mod)
config = CONFIGS[lookup](config)
except KeyError:
logging.fatal("Incorrect config requested %s from %s", lookup, config)
return lambda: config
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,372
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/configs/miniimagenet_configs.py
|
from configs import register_config
from util import HParams, miniimagenet_train, miniimagenet_eval
@register_config("miniimagenet")
def miniimagenet_default():
return HParams(
# ----- Dataset Parameters ----- #
split="",
mode="batch", # episodic or batch
# ----- Batch Parameters ----- #
batch_size=256,
# ----- Episodic Parameters ----- #
episodic=False,
way=0,
shot=0,
# ----- Loading Parameters ----- #
cycle_length=None,
num_parallel_calls=None,
block_length=1,
buff_size=2,
shuffle=False,
)
@register_config("miniimagenet/5way1shot")
def miniimagenet_5way1shot(hparams: HParams):
hparams.set_hparam("mode", "episodic")
hparams.set_hparam("way", 5)
hparams.set_hparam("shot", 1)
return hparams
@register_config("miniimagenet/5way5shot")
def miniimagenet_5way5shot(hparams: HParams):
hparams.set_hparam("mode", "episodic")
hparams.set_hparam("way", 5)
hparams.set_hparam("shot", 5)
return hparams
@register_config("miniimagenet/5way20shot")
def miniimagenet_5way20shot(hparams: HParams):
hparams.set_hparam("mode", "episodic")
hparams.set_hparam("way", 5)
hparams.set_hparam("shot", 20)
return hparams
@register_config("miniimagenet/5way50shot")
def miniimagenet_5way5shot(hparams: HParams):
hparams.set_hparam("mode", "episodic")
hparams.set_hparam("way", 5)
hparams.set_hparam("shot", 50)
return hparams
@register_config("miniimagenet/sachinravi_train")
def miniimagenet_sachinravi_train(hparams: HParams):
hparams.set_hparam("mode", "batch")
hparams.set_hparam("split",
".npz,".join(miniimagenet_train)+".npz")
hparams.set_hparam("shuffle", True)
return hparams
@register_config("miniimagenet/sachinravi_val")
def miniimagenet_sachinravi_val(hparams: HParams):
hparams.set_hparam("mode", "batch")
hparams.set_hparam("split",
".npz,".join(miniimagenet_eval)+".npz")
hparams.set_hparam("shuffle", True)
return hparams
@register_config("miniimagenet/sachinravi_test")
def miniimagenet_sachinravi_test(hparams: HParams):
hparams.set_hparam("mode", "batch")
hparams.set_hparam("split", 'n01981276.npz,n02116738.npz,n03146219.npz,n04149813.npz,n04146614.npz,n04522168.npz,n02099601.npz,n02443484.npz,n02129165.npz,n03272010.npz,'
'n04418357.npz,n03127925.npz,n02110063.npz,n02871525.npz,n03775546.npz,n02219486.npz,n02110341.npz,n07613480.npz,n03544143.npz,n01930112.npz')
hparams.set_hparam("shuffle", True)
return hparams
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,373
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/models/subs/decoders.py
|
import tensorflow as tf
from absl import logging
from models.subs.cells import LayerNormLSTMCell, HyperLSTMCell
from util import get_mixture_coef
class DecoderDefault(tf.keras.Model):
def __init__(self, rnn_output_size, num_mixtures, rnn_model, cell_configs):
"""
Autoregressive RNN Sequence decoder that outputs a sequence of points and pen states that constitute a sketch.
Trained with teacher forcing during train time, the input at each timestep the ground truth data.
During test time, we sample from the output distribution to obtain the input for the next timestep.
:param rnn_output_size:
:param num_mixtures:
:param rnn_model:
:param cell_configs:
"""
super(DecoderDefault, self).__init__()
self._rnn_model = rnn_model
self._rnn_output_size = rnn_output_size
self._num_mixtures = num_mixtures
self._cell_configs = cell_configs
if self._rnn_model == "hyper":
self.cell = HyperLSTMCell(self._rnn_output_size,
hyper_num_units=self._cell_configs["hyper_num_units"],
hyper_embedding_size=self._cell_configs["hyper_embedding_size"],
use_recurrent_dropout=self._cell_configs["use_recurrent_dropout"],
recurrent_dropout_prob=self._cell_configs["recurrent_dropout_prob"])
else:
logging.fatal("Invalid RNN Cell Selection: %s", self._rnn_model)
def build(self, input_shape):
self._RNN = tf.keras.layers.RNN(self.cell)
# 6 parameters per mixutre (pi, mu1, mu2, sigma1, sigma2, rho) + 3 pen state outputs
glorot_init = tf.initializers.GlorotNormal()
self._rnn_output_linear = tf.keras.layers.Dense(3 + self._num_mixtures * 6,
kernel_initializer=glorot_init, bias_initializer=glorot_init)
def call(self, inputs, training=None, mask=None):
ground_truth_sketch, z, init_cell_state, sequence_length = inputs
batch_size = ground_truth_sketch.shape[0]
time_dim_z = tf.reshape(z, [z.shape[0], 1, z.shape[1]])
tiled_z = tf.tile(time_dim_z, [1, ground_truth_sketch.shape[1], 1])
decoder_input = tf.concat([ground_truth_sketch, tiled_z], 2)
loop_state_array = tf.TensorArray(dtype=tf.float32, size=sequence_length)
def loop_fn(time, cell_output, cell_state, loop_state):
emit_output = cell_output
finished = (time >= sequence_length)
if cell_output is None:
next_cell_state = init_cell_state
next_input = decoder_input[:, 0, :]
next_loop_state = loop_state if training else loop_state_array
else:
next_cell_state = cell_state
if training:
next_input = tf.cond(finished,
lambda: tf.zeros(decoder_input[:, 0, :].shape),
lambda: decoder_input[:, time, :])
next_loop_state = loop_state
else:
step_pi, step_mu1, step_mu2, step_sigma1, step_sigma2, step_rho, step_pen, _ = get_mixture_coef(
self._rnn_output_linear(cell_output))
# Highest weighted mixture component
max_preidx = tf.math.argmax(step_pi, axis=1, output_type=tf.int32)
max_idx = tf.stack([tf.range(batch_size), max_preidx], axis=-1)
pi, mu1, mu2, sigma1, sigma2, rho = [tf.gather_nd(param, max_idx)
for param in [step_pi, step_mu1, step_mu2, step_sigma1, step_sigma2, step_rho]]
# Sample from bivariate gaussian
loc = tf.stack((mu1, mu2), axis=1)
cov = tf.stack((tf.stack((sigma1, tf.zeros(sigma1.shape)), axis=-1),
tf.stack((rho * sigma2, sigma2 * tf.sqrt(1 - rho ** 2 + 1e-6)), axis=-1)),
axis=-2)
eps = tf.random.normal(loc.shape)
xy = loc + tf.einsum("ijk,ik->ij", cov, eps)
# Convert softmax to one-hot
pen_one_hot = tf.one_hot(tf.argmax(step_pen, axis=1), depth=3)
stroke = tf.cast(tf.concat((xy, pen_one_hot), axis=1), dtype=tf.float32)
next_input = tf.concat([stroke, z], 1)
next_loop_state = loop_state.write(time - 1, stroke)
return finished, next_input, next_cell_state, emit_output, next_loop_state
emit_outputs_arr, final_state, loop_state_output_arr = tf.compat.v1.nn.raw_rnn(self.cell, loop_fn)
param_output = tf.transpose(emit_outputs_arr.stack(), (1, 0, 2))
raw_params = self._rnn_output_linear(tf.reshape(param_output, (batch_size * sequence_length, param_output.shape[-1])))
params = [tf.cast(param, dtype=tf.float32) for param in get_mixture_coef(raw_params)]
# If in training mode, only return parameters for NLL loss.
# If in training mode, strokes are sampled at each timestep and are returned.
if training:
return params
else:
stroke_output = tf.transpose(loop_state_output_arr.stack(), (1, 0, 2))
start_strokes = ground_truth_sketch[:, 0:1, :]
return params, tf.concat((start_strokes, stroke_output), axis=1)
def call_with_hyper_states(self, inputs, training=None, mask=None):
"""
Alternative implementation of the "call" function.
Saves and returns the hyperembedding activations per time step, used for the hyper_embedding_experiment.
:param inputs:
:param training:
:param mask:
:return:
"""
ground_truth_sketch, z, init_cell_state, sequence_length = inputs
batch_size = ground_truth_sketch.shape[0]
time_dim_z = tf.reshape(z, [z.shape[0], 1, z.shape[1]])
tiled_z = tf.tile(time_dim_z, [1, ground_truth_sketch.shape[1], 1])
decoder_input = tf.concat([ground_truth_sketch, tiled_z], 2)
loop_state_array = tf.TensorArray(dtype=tf.float32, size=sequence_length)
def loop_fn(time, cell_output, cell_state, loop_state):
emit_output = cell_output
finished = (time >= sequence_length)
if cell_output is None:
next_cell_state = init_cell_state
next_input = decoder_input[:, 0, :]
next_loop_state = loop_state if training else loop_state_array
else:
next_cell_state = cell_state
if training:
next_input = tf.cond(finished,
lambda: tf.zeros(decoder_input[:, 0, :].shape),
lambda: decoder_input[:, time, :])
next_loop_state = loop_state
else:
step_pi, step_mu1, step_mu2, step_sigma1, step_sigma2, step_rho, step_pen, _ = get_mixture_coef(
self._rnn_output_linear(cell_output))
max_preidx = tf.math.argmax(step_pi, axis=1, output_type=tf.int32)
max_idx = tf.stack([tf.range(batch_size), max_preidx], axis=-1)
pi, mu1, mu2, sigma1, sigma2, rho = [tf.gather_nd(param, max_idx)
for param in [step_pi, step_mu1, step_mu2, step_sigma1, step_sigma2, step_rho]]
loc = tf.stack((mu1, mu2), axis=1)
cov = tf.stack((tf.stack((sigma1, tf.zeros(sigma1.shape)), axis=-1),
tf.stack((rho * sigma2, sigma2 * tf.sqrt(1 - rho ** 2 + 1e-6)), axis=-1)),
axis=-2)
eps = tf.random.normal(loc.shape)
xy = loc + tf.einsum("ijk,ik->ij", cov, eps)
pen_one_hot = tf.one_hot(tf.argmax(step_pen, axis=1), depth=3)
stroke = tf.cast(tf.concat((xy, pen_one_hot), axis=1), dtype=tf.float32)
next_input = tf.concat([stroke, z], 1)
next_loop_state = loop_state.write(time - 1, tf.concat((stroke, cell_state), axis=1))
return finished, next_input, next_cell_state, emit_output, next_loop_state
emit_outputs_arr, final_state, loop_state_output_arr = tf.compat.v1.nn.raw_rnn(self.cell, loop_fn)
param_output = tf.transpose(emit_outputs_arr.stack(), (1, 0, 2))
raw_params = self._rnn_output_linear(tf.reshape(param_output, (batch_size * sequence_length, param_output.shape[-1])))
params = [tf.cast(param, dtype=tf.float32) for param in get_mixture_coef(raw_params)]
if training:
return params
else:
state_outputs = tf.transpose(loop_state_output_arr.stack(), (1, 0, 2))
stroke_output = state_outputs[:, :, :5]
cell_states = state_outputs[:, :, 5:]
start_strokes = ground_truth_sketch[:, 0:1, :]
return params, tf.concat((start_strokes, stroke_output), axis=1), cell_states
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,374
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/run_experiment.py
|
import os
import traceback
import numpy as np
import tensorflow as tf
from absl import app, flags, logging
import models
import configs
import datasets
from util import HParams
from util import log_flags, log_hparams
try:
import horovod.tensorflow as hvd
except:
hvd = None
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
FLAGS = flags.FLAGS
flags.DEFINE_string("dir", "/h/wangale/project/few-shot-sketch", "Project directory")
flags.DEFINE_string("data_dir", "/h/wangale/data", "Data directory")
flags.DEFINE_boolean("check_numerics", False, "Enable tensorflow check numerics.")
flags.DEFINE_string("id", None, "training_id")
flags.DEFINE_string("logfile", None, "Logfile name")
flags.DEFINE_string("model", None, "Model to train")
flags.DEFINE_string("model_cfgset", None, "Configuration set for the model")
flags.DEFINE_string("model_cfgs", "", "Custom configuration for the model configs")
flags.DEFINE_string("train_dataset", None, "Dataset used")
flags.DEFINE_string("train_dataset_cfgset", None, "Configuration set for the dataset")
flags.DEFINE_string("train_dataset_cfgs", "", "Custom configuration for the dataset configs")
flags.DEFINE_string("eval_dataset", None, "Dataset used")
flags.DEFINE_string("eval_dataset_cfgset", None, "Configuration set for the dataset")
flags.DEFINE_string("eval_dataset_cfgs", "", "Custom configuration for the dataset configs")
flags.DEFINE_integer("train_steps", None, "Training iterations")
flags.DEFINE_integer("print_freq", 250, "Training loop print frequency")
flags.DEFINE_integer("save_freq", 10000, "Save checkpoint frequency")
flags.DEFINE_integer("eval_freq", None, "Evaluation frequency; set to save_freq if None")
flags.DEFINE_boolean("distributed", False, "Distributed training if model architecture allows")
flags.DEFINE_integer("random_seed", 0, "Random seed")
flags.mark_flags_as_required(["id", "logfile",
"model", "model_cfgset",
"train_dataset", "train_dataset_cfgset",
"train_steps"])
def experiment():
model_config: HParams = configs.get_config(FLAGS.model_cfgset)().parse(FLAGS.model_cfgs)
model = models.get_model(FLAGS.model)(FLAGS.dir, FLAGS.id, model_config)
train_dataset_config: HParams = configs.get_config(FLAGS.train_dataset_cfgset)().parse(FLAGS.train_dataset_cfgs)
train_dataset = datasets.get_dataset(FLAGS.train_dataset)(FLAGS.data_dir, train_dataset_config)
train_tf_dataset = train_dataset.load(repeat=True)
if FLAGS.eval_dataset:
eval_dataset_config: HParams = configs.get_config(FLAGS.eval_dataset_cfgset)().parse(FLAGS.eval_dataset_cfgs)
eval_dataset = datasets.get_dataset(FLAGS.eval_dataset)(FLAGS.data_dir, eval_dataset_config)
eval_tf_dataset = eval_dataset.load(repeat=False)
else:
eval_dataset_config = None
eval_tf_dataset = None
if (not FLAGS.distributed) or (hvd.rank() == 0):
logging.info("Creating Model: %s | Loading Train Dataset: %s | Loading Eval Dataset: %s",
FLAGS.model, FLAGS.train_dataset, FLAGS.eval_dataset)
log_hparams(model_config, train_dataset_config, eval_dataset_config)
logging.info("Beginning training loop")
# Debugging NaN errors.
if FLAGS.check_numerics:
tf.debugging.enable_check_numerics()
while True:
try:
model.train(train_tf_dataset,
FLAGS.train_steps,
FLAGS.print_freq,
FLAGS.save_freq,
eval_tf_dataset,
FLAGS.eval_freq)
except tf.errors.AbortedError:
logging.info("InvalidArgumentError received from training function. Restarting training.")
continue
else:
break
def main(argv):
"""Create directories and configure python settings"""
# Setup Directory
experiment_dir = os.path.join(FLAGS.dir, FLAGS.id)
if not os.path.exists(experiment_dir):
os.makedirs(os.path.join(experiment_dir, "logs"), exist_ok=True)
# Setup Logging
FLAGS.alsologtostderr = True
logging.get_absl_handler().use_absl_log_file(FLAGS.logfile, os.path.join(experiment_dir, "logs"))
# Setup Distributed
if FLAGS.distributed:
try:
hvd.init()
gpus = tf.config.list_physical_devices('GPU')
logging.info("Distributed training enabled.")
logging.info("GPUS: %s", str(gpus))
for gpu in gpus:
tf.config.experimental.set_memory_growth(gpu, True)
if gpus:
tf.config.experimental.set_visible_devices(gpus[hvd.local_rank()], 'GPU')
FLAGS.model_cfgs = (FLAGS.model_cfgs + ",distributed=True").strip(',')
except:
logging.info("Distributed training training setup failed. Disabling distributed training.")
if FLAGS.random_seed:
logging.info("Setting seed to %s", FLAGS.random_seed + hvd.rank())
np.random.seed(FLAGS.random_seed + hvd.rank())
tf.random.set_seed(FLAGS.random_seed + hvd.rank())
else:
# Setup seeds
if FLAGS.random_seed:
logging.info("Setting seed to %s", FLAGS.random_seed)
np.random.seed(FLAGS.random_seed)
tf.random.set_seed(FLAGS.random_seed)
# Log Flags
if (not FLAGS.distributed) or (hvd.rank() == 0):
log_flags(FLAGS)
try:
experiment()
except:
exception = traceback.format_exc()
logging.info(exception)
if __name__ == "__main__":
app.run(main)
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,375
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/configs/__init__.py
|
from .drawer_configs import *
from .vae_configs import *
from .classifier_configs import *
from .quickdraw_configs import *
from .fs_omniglot_configs import *
from .sketchy_configs import *
from .miniimagenet_configs import *
from .base import *
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,376
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/datasets/__init__.py
|
from .quickdraw import *
from .fs_omniglot import *
from .fs_omniglot_vinyals import *
from .sketchy import *
from .miniimagenet import *
from .base import *
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,377
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/util/drawer_utils.py
|
import tensorflow as tf
import numpy as np
from .utils import gaussian_blur
def compute_pixel_loss(pi, mu1, mu2, sigma1, sigma2, rho, pen, stroke_gt, batch_size, pixel_dims):
"""
Computes the pixel loss given mixture of gaussian outputs at every timestep for the entire decoding process.
:param pi:
:param mu1:
:param mu2:
:param sigma1:
:param sigma2:
:param rho:
:param pen:
:param stroke_gt:
:param batch_size:
:param pixel_dims:
:return:
"""
padding = tf.round(tf.reduce_min(pixel_dims) / 10) * 2
stroke_gt_scaled = scale_and_center_strokes(stroke_gt, stroke_gt[:, :, 0:2], pixel_dims, padding)
pixel_gt = strokes_to_image(stroke_gt_scaled, pixel_dims)
# Scale predicted strokes based on dimensions of ground truth strokes. Then produce pixel image for comparison
predicted_strokes = params_to_strokes(pi, mu1, mu2, sigma1, sigma2, rho, pen, batch_size)
scaled_predicted_strokes = scale_and_center_strokes(predicted_strokes, stroke_gt[:, :, 0:2], pixel_dims, padding)
predicted_pixels = strokes_to_image(scaled_predicted_strokes, pixel_dims)
pix_blur, pixel_gt_blur = tf.split(gaussian_blur(tf.concat((predicted_pixels, pixel_gt), axis=0), (4, 4), 2.), 2, axis=0)
loss = tf.reduce_mean(tf.keras.losses.binary_crossentropy(pixel_gt_blur, pix_blur), axis=(1, 2))
return loss
def scale_and_center_strokes(input_strokes, stroke_gt, pixel_dims, padding):
"""
Vectorized / differentiable scaling and centering function.
Computes scaling and shift parameter based on stroke_gt to maximize size given pixel dimensions and padding.
Applies these parameters to input_strokes to scale them.
:param input_strokes:
:param stroke_gt:
:param pixel_dims:
:param padding:
:return:
"""
pixel_dims = tf.cast(pixel_dims, dtype=tf.float32)
padding = tf.cast(padding, dtype=tf.float32)
stroke_gt_abs = tf.cumsum(stroke_gt, axis=1)
min_x, max_x = tf.reduce_min(stroke_gt_abs[:, :, 0], axis=1), tf.reduce_max(stroke_gt_abs[:, :, 0], axis=1)
min_y, max_y = tf.reduce_min(stroke_gt_abs[:, :, 1], axis=1), tf.reduce_max(stroke_gt_abs[:, :, 1], axis=1)
curr_im_pts = [[min_x, max_x], [min_y, max_y]]
scale = tf.reshape(pixel_dims - padding, (1, -1)) / tf.reshape(tf.maximum(max_x - min_x, max_y - min_y), shape=(-1, 1))
scale = tf.reshape(scale, (-1, 1, 2))
shift = (tf.reshape(pixel_dims, (1, 1, -1)) - tf.reshape(tf.reduce_sum(curr_im_pts, axis=1), (-1, 1, 2)) * scale) / 2.0
# predicted_strokes[:, :, 0:2] *= scale # Scale all strokes (initial point is unchanged at (0, 0))
scaled_strokes = input_strokes[:, :, 0:2] * scale
scaled_and_centered_strokes = tf.concat((scaled_strokes[:, 0:1, 0:2] + shift, scaled_strokes[:, 1:, 0:2]), axis=1)
scaled_and_centered_strokes = tf.concat((scaled_and_centered_strokes, input_strokes[:, :, 2:]), axis=-1)
tf.concat((tf.tile([[[0., 0., 0., 1., 0.]]], (scaled_and_centered_strokes.shape[0], 1, 1)), scaled_and_centered_strokes), axis=1)
return scaled_and_centered_strokes
def params_to_strokes(pi, mu1, mu2, sigma1, sigma2, rho, pen, batch_size):
"""
Samples from the mixture of gaussian parameters to obtain stroke-5 representation.
Pen states are left as softmax and not made one-hot like in the label data.
:param pi:
:param mu1:
:param mu2:
:param sigma1:
:param sigma2:
:param rho:
:param pen:
:param batch_size:
:return:
"""
pen = tf.reshape(pen, shape=(batch_size, -1, 3))
# This takes the highest weighted Gaussian each step to be plotted.
max_idx = tf.stack((tf.cast(tf.range(0, pi.shape[0]), tf.int64),
tf.argmax(pi, axis=1)), axis=-1)
step_mu1, step_mu2, step_sigma1, step_sigma2, step_rho = [tf.reshape(tf.gather_nd(param, max_idx), shape=(batch_size, -1))
for param in [mu1, mu2, sigma1, sigma2, rho]]
# Compute all my point offsets using parameters per step
step_mu_2d = tf.stack((step_mu1, step_mu2), axis=-1)
step_lower_triangular_decomp = tf.stack((tf.stack((step_sigma1, tf.zeros(step_sigma1.shape)), axis=-1),
tf.stack((step_rho * step_sigma2, step_sigma2 * tf.sqrt(1 - step_rho ** 2 + 1e-6)), axis=-1)),
axis=-2)
mu = tf.reshape(step_mu_2d, (-1, 2))
eps = tf.random.normal(mu.shape)
lower_triangular_decomp = tf.reshape(step_lower_triangular_decomp, (-1, 2, 2))
relative_xy = tf.reshape(mu + tf.einsum("ijk,ik->ij", lower_triangular_decomp, eps), (batch_size, -1, 2))
# Re-add intial point
relative_xy = tf.concat((tf.zeros((relative_xy.shape[0], 1, relative_xy.shape[-1])), relative_xy), axis=1)
pen = tf.concat((tf.tile(tf.constant([[[1., 0., 0.]]]), (batch_size, 1, 1)), pen), axis=1)
return tf.concat((relative_xy, pen), axis=-1)
def strokes_to_image(strokes, image_dim):
"""
Given strokes, produce a greyscale image of dimensions image_dim.
Pixel intensity is computed based off euclidean distance from rendered line segments described by strokes.
:param strokes:
:param image_dim:
:return:
"""
batch_size = strokes.shape[0]
relative_xy, pen = strokes[:, :, 0:2], strokes[:, :, 2:]
abs_xy = tf.cumsum(relative_xy, axis=-2)
p_1, p_2 = (tf.reshape(x, (batch_size, 1, -1, 2)) for x in (abs_xy[:, :-1, :], abs_xy[:, 1:, :]))
p_3 = tf.reshape(tf.stack(tf.meshgrid(tf.range(0, tf.cast(image_dim[0], dtype=tf.float32), dtype=tf.float32),
tf.range(0, tf.cast(image_dim[1], dtype=tf.float32), dtype=tf.float32)), axis=-1), (1, -1, 1, 2))
ab, ac, bc = p_2 - p_1, p_3 - p_1, p_3 - p_2
# Computes AB . AC
ab_dot_ac = tf.einsum("ikl,ijkl->ijk", ab[:, 0], ac)
ab_cross_ac = (ab[:, :, :, 0] * ac[:, :, :, 1]) - (ab[:, :, :, 1] * ac[:, :, :, 0])
ab_norm_sq = tf.reduce_sum(ab ** 2, axis=-1)
pix_dist = tf.where(ab_dot_ac < 0,
tf.reduce_sum(ac ** 2, axis=-1),
tf.where(ab_dot_ac > ab_norm_sq,
tf.reduce_sum(bc ** 2, axis=-1),
ab_cross_ac ** 2 / (ab_norm_sq + 1e-4)))
pen_mask = tf.reshape(pen[:, :-1, 0], (batch_size, 1, -1))
pix_dist += tf.where(pen_mask > 0.5,
tf.zeros(pix_dist.shape),
tf.ones(pix_dist.shape) * 1e6)
min_dist = tf.reduce_min(pix_dist, axis=-1)
pix = tf.sigmoid(2 - 5. * min_dist)
return tf.reshape(pix, (batch_size, image_dim[0], image_dim[1], 1))
def compute_pen_state_loss(z_pen_logits, pen_data):
"""Returns a loss fn based on eq #26 of http://arxiv.org/abs/1308.0850."""
# This represents the L_R only (i.e. does not include the KL loss term).
result = tf.nn.softmax_cross_entropy_with_logits(
labels=pen_data, logits=z_pen_logits)
result = tf.reshape(result, [-1, 1])
return result
def compute_mdn_loss(z_pi, z_mu1, z_mu2, z_sigma1, z_sigma2, z_corr, x1_gt, x2_gt, pen_gt):
"""Returns a loss fn based on eq #26 of http://arxiv.org/abs/1308.0850."""
norm1, norm2, s1s2 = tf.subtract(x1_gt, z_mu1), tf.subtract(x2_gt, z_mu2), tf.multiply(z_sigma1, z_sigma2)
epsilon = 1e-6
# Eq 25
z = (tf.square(tf.divide(norm1, z_sigma1)) + tf.square(tf.divide(norm2, z_sigma2)) -
2 * tf.divide(tf.multiply(z_corr, tf.multiply(norm1, norm2)), s1s2 + epsilon))
# Eq 24
neg_rho = 1 - tf.square(z_corr)
exp = tf.exp(tf.divide(-z, 2 * neg_rho + epsilon))
denom = 2 * np.pi * tf.multiply(s1s2, tf.sqrt(neg_rho + epsilon))
gmm_pdf = tf.divide(exp, denom + epsilon)
# Weight GMM PDF
weighted_gmm_pdf = z_pi * gmm_pdf
unnorm_log_likelihood = tf.reduce_sum(weighted_gmm_pdf, 1, keepdims=True)
result = -tf.math.log(unnorm_log_likelihood + epsilon)
# Zero out loss terms beyond N_s, the last actual stroke
fs = 1.0 - pen_gt[:, 2] # use training data for this
fs = tf.reshape(fs, [-1, 1])
result = tf.multiply(result, fs)
return result
def get_mixture_coef(output):
""" Returns the tf slices containing mdn dist params. """
# This uses eqns 18 -> 23 of http://arxiv.org/abs/1308.0850.
z = output
# z = output
z_pen_logits = z[:, 0:3] # pen states
z_pi, z_mu1, z_mu2, z_sigma1, z_sigma2, z_corr = tf.split(z[:, 3:], 6, 1)
# softmax all the pi's and pen states:
z_pi = tf.nn.softmax(z_pi)
z_pen = tf.nn.softmax(z_pen_logits)
# exponentiate the sigmas and also make corr between -1 and 1.
z_sigma1 = tf.exp(z_sigma1)
z_sigma2 = tf.exp(z_sigma2)
z_corr = tf.tanh(z_corr) # \rho
r = [z_pi, z_mu1, z_mu2, z_sigma1, z_sigma2, z_corr, z_pen, z_pen_logits]
return r
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,378
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/models/drawer_enc_block.py
|
from models import DrawerModel, register_model, DecoderDefault
from models.subs.encoders import EncoderBlock4
import tensorflow as tf
try:
import horovod.tensorflow as hvd
except:
hvd = None
@register_model('drawer_enc_block')
class DrawerEncBlockModel(DrawerModel):
def __init__(self, base_dir, model_id, params, training=True, ckpt=None):
"""
Inherit from the base Drawer but using the Conv4 encoder backbone from https://arxiv.org/abs/1703.03400
:param base_dir:
:param model_id:
:param params:
:param training:
:param ckpt:
"""
# ----- Init Model ----- #
super(DrawerEncBlockModel, self).__init__(base_dir, model_id, params, training, ckpt=ckpt)
def _build_model(self):
self._decoder = DecoderDefault(self._dec_rnn_size, self._num_mixture, self._rnn_model, self._cell_configs)
self._encoder = EncoderBlock4(self._z_size, self._decoder.cell.state_size)
lr_init = self._lr if not self._distributed else self._lr * hvd.size()
lr = tf.keras.optimizers.schedules.ExponentialDecay(lr_init, self._lr_decay_freq, self._lr_decay_rate)
self._optimizer = tf.optimizers.Adam(learning_rate=lr, clipvalue=self._gradient_cap)
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,379
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/util/quickdraw_utils.py
|
import os
from io import BytesIO
import numpy as np
import svgwrite
from PIL import Image
try:
import cairosvg
except:
cairosvg = None
def stroke_three_format(big_stroke):
"""
Convert from stroke-5 format (from sketch-rnn paper) back to stroke-3.
This is only for SCALE INVARIANT and UNCENTERED stroke-5 format.
"""
l = 0
for i in range(len(big_stroke)):
if big_stroke[i, 4] > 0:
l = i
break
if l == 0:
l = len(big_stroke)
result = np.zeros((l, 3))
result[:, 0:2] = big_stroke[0:l, 0:2]
result[:, 2] = big_stroke[0:l, 3]
return result
def stroke_five_format(sketch, max_len):
"""
Pad the batch to be stroke-5 bigger format as described in paper.
This is only for SCALE INVARIANT and UNCENTERED stroke-3 format.
"""
result = np.zeros((max_len + 1, 5), dtype=float)
sketch_length = len(sketch)
result[0:sketch_length, 0:2] = sketch[:, 0:2]
result[0:sketch_length, 3] = sketch[:, 2]
result[0:sketch_length, 2] = 1 - result[0:sketch_length, 3]
result[sketch_length:, 4] = 1
# put in the first token, as described in sketch-rnn methodology
# result[1:, :] = result[:-1, :]
result[0, :] = np.array([0, 0, 1, 0, 0])
return result
def stroke_three_format_centered(big_stroke):
"""
Convert from stroke-5 format (from sketch-rnn paper) back to stroke-3.
Note: This is only for SCALED AND CENTERED stroke-5 format.
"""
l = 0
for i in range(len(big_stroke)):
if big_stroke[i, 4] > 0:
l = i
break
if l == 0:
l = len(big_stroke)
result = np.zeros((l-1, 3))
result[:, 0:2] = big_stroke[1:l, 0:2]
result[:, 2] = big_stroke[1:l, 3]
return result
def stroke_five_format_centered(sketch, max_len):
"""
Pad the batch to be stroke-5 bigger format as described in paper.
This is only for SCALED AND CENTERED stroke-3 format
"""
result = np.zeros((max_len + 2, 5), dtype=float)
sketch_length = len(sketch)
result[0:sketch_length, 0:2] = sketch[:, 0:2]
result[0:sketch_length, 3] = sketch[:, 2]
result[0:sketch_length, 2] = 1 - result[0:sketch_length, 3]
result[sketch_length:, 4] = 1
# put in the first token, as described in sketch-rnn methodology
result[1:, :] = result[:-1, :]
result[0, :] = np.array([0, 0, 0, 1, 0])
return result
def scale_and_center_stroke_three(sketch, png_dimensions, padding):
"""
Modifies parameters of a stroke-3 format sketch such that it is maximized in size and centered in an image of png_dimensions
and provided padding.
:param sketch:
:param png_dimensions:
:param padding:
:return:
"""
min_x, max_x, min_y, max_y = _get_bounds(sketch)
try:
x_scale = (png_dimensions[0] - padding) / (max_x - min_x)
except:
x_scale = float('inf')
try:
y_scale = (png_dimensions[1] - padding) / (max_y - min_y)
except:
y_scale = float('inf')
scale = min(x_scale, y_scale)
sketch[:, 0:2] *= scale
sketch[0, 0:2] += np.array([(png_dimensions[0] / 2) - ((max_x + min_x) / 2)*scale, (png_dimensions[1] / 2) - ((max_y + min_y) / 2)*scale])
return sketch
def rasterize(sketch, png_dimensions):
"""
Renders sketch as a rasterized image.
:param sketch:
:param png_dimensions:
:return:
"""
drawing_bytestring = _get_svg_string(sketch, png_dimensions)
png_image = Image.open(BytesIO(cairosvg.svg2png(bytestring=drawing_bytestring, scale=1.0)))
padded_image = pad_image(png_image, png_dimensions)
return padded_image
def color_rasterize(sketches, png_dimensions, stroke_width=1):
"""
Renders sketch as a rasterized image with a rotating sequence of colors for each stroke.
:param sketches:
:param png_dimensions:
:param stroke_width:
:return:
"""
drawing_bytestring = _get_colored_svg_string(sketches, png_dimensions, stroke_width)
png_image = Image.open(BytesIO(cairosvg.svg2png(bytestring=drawing_bytestring, scale=1.0)))
padded_image = pad_image(png_image, png_dimensions)
return padded_image
def _get_colored_svg_string(sketches, png_dimensions, stroke_width):
dims = png_dimensions
colors = ['black', 'red', 'blue', 'green', 'orange', 'purple']
command = "m"
dwg = svgwrite.Drawing(size=dims)
dwg.add(dwg.rect(insert=(0, 0), size=dims, fill='white'))
for idx, sketch in enumerate(sketches):
color = colors[idx % len(colors)]
start_x, start_y, lift_pen = sketch[0, 0:3]
p = "M%s, %s " % (start_x, start_y)
for i in range(1, len(sketch)):
if lift_pen == 1:
command = "m"
elif lift_pen == 0:
command = "l"
x = float(sketch[i, 0])
y = float(sketch[i, 1])
lift_pen = sketch[i, 2]
p += command + str(x) + ", " + str(y) + " "
dwg.add(dwg.path(p).stroke(color, stroke_width).fill("none"))
return dwg.tostring()
def _get_svg_string(sketch, png_dimensions):
dims = png_dimensions
stroke_width = 1
color = "black"
command = "m"
dwg = svgwrite.Drawing(size=dims)
dwg.add(dwg.rect(insert=(0, 0), size=dims, fill='white'))
start_x, start_y = sketch[0, 0:2]
lift_pen = sketch[0, 2]
p = "M%s, %s " % (start_x, start_y)
for i in range(1, len(sketch)):
if lift_pen == 1:
command = "m"
elif command != "l":
command = "l"
else:
command = ""
x = float(sketch[i, 0])
y = float(sketch[i, 1])
lift_pen = sketch[i, 2]
p += command + str(x) + ", " + str(y) + " "
dwg.add(dwg.path(p).stroke(color, stroke_width).fill("none"))
return dwg.tostring()
def scale_and_rasterize(sketch, png_dimensions, stroke_width=1):
"""Converts unscaled Stroke-3 SVG image to PNG."""
padding = round(min(png_dimensions) / 10.) * 2
svg_dimensions, drawing_bytestring = _scale_and_get_svg_string(sketch, png_dimensions, padding=padding, stroke_width=stroke_width)
svg_width, svg_height = svg_dimensions
png_width, png_height = png_dimensions
x_scale = (png_width) / svg_width
y_scale = (png_height) / svg_height
png_image = Image.open(BytesIO(cairosvg.svg2png(bytestring=drawing_bytestring, scale=min(x_scale, y_scale))))
padded_image = pad_image(png_image, png_dimensions)
return padded_image
def _scale_and_get_svg_string(svg, png_dimensions, padding, stroke_width=1):
"""Retrieves SVG native dimension and bytestring."""
min_x, max_x, min_y, max_y = _get_bounds(svg)
try:
x_scale = (png_dimensions[0] - padding) / (max_x - min_x)
except:
x_scale = float('inf')
try:
y_scale = (png_dimensions[1] - padding) / (max_y - min_y)
except:
y_scale = float('inf')
scale = min(x_scale, y_scale)
dims = png_dimensions
lift_pen = 1
color = "black"
command = "m"
dwg = svgwrite.Drawing(size=dims)
dwg.add(dwg.rect(insert=(0, 0), size=dims, fill='white'))
start_x = (png_dimensions[0] / 2) - ((max_x + min_x) / 2) * scale
start_y = (png_dimensions[1] / 2) - ((max_y + min_y) / 2) * scale
p = "M%s, %s " % (start_x, start_y)
for i in range(len(svg)):
if lift_pen == 1:
command = "m"
elif command != "l":
command = "l"
else:
command = ""
x = float(svg[i, 0]) * scale
y = float(svg[i, 1]) * scale
lift_pen = svg[i, 2]
p += command + str(x) + ", " + str(y) + " "
dwg.add(dwg.path(p).stroke(color, stroke_width).fill("none"))
return dims, dwg.tostring()
def _get_bounds(svg):
"""Return bounds of data."""
min_x, max_x, min_y, max_y = float('inf'), float('-inf'), float('inf'), float('-inf')
abs_x, abs_y = 0, 0
for i in range(len(svg)):
x, y = float(svg[i, 0]), float(svg[i, 1])
abs_x += x
abs_y += y
min_x, min_y, max_x, max_y = min(min_x, abs_x), min(min_y, abs_y), max(max_x, abs_x), max(max_y, abs_y)
return min_x, max_x, min_y, max_y
def pad_image(png, png_dimensions):
"""
Pads png to ensure it is the correct dimensions after rasterization
:param png:
:param png_dimensions:
:return:
"""
png_curr_w = png.width
png_curr_h = png.height
padded_png = np.zeros(shape=[png_dimensions[1], png_dimensions[0], 3], dtype=np.uint8)
padded_png.fill(255)
if png_curr_w > png_curr_h:
pad = int(round((png_curr_w - png_curr_h) / 2))
padded_png[pad: pad + png_curr_h, :png_curr_w] = np.array(png, dtype=np.uint8)
else:
pad = int(round((png_curr_h - png_curr_w) / 2))
padded_png[:png_curr_h, pad: pad + png_curr_w] = np.array(png, dtype=np.uint8)
return padded_png
def get_normalizing_scale_factor(sketches):
"""Calculate the normalizing factor explained in appendix of sketch-rnn."""
sketches = list(map(lambda x: x[0], sketches))
sketches = np.concatenate(sketches, axis=0)[:, 0:2]
sketches = sketches.flatten()
return np.std(sketches)
def quickdraw_process(batch_data, max_seq_len, png_dims, save_path, normalizing_scale_factor,
gap_limit=1000, flip_x=False, flip_y=False):
"""Preprocess sketches to drop large gaps, produce sketch-5 format and generate rasterized images."""
stroke_five_sketches = []
rasterized_images = []
class_names = []
padding = round(min(png_dims)/10.) * 2
for sketch, class_name in batch_data:
# cast and scale
try:
sketch = np.array(sketch, dtype=np.float32)
# removes large gaps from the data
stroke_three = np.maximum(np.minimum(sketch, gap_limit), -gap_limit)
# Centered and normalized strokes for training sequence
stroke_three_normalized = stroke_three
stroke_three_normalized[:, 0:2] /= normalizing_scale_factor
stroke_five_sketch = stroke_five_format(stroke_three_normalized, max_seq_len)
# Centered and pixel-scaled for rasterization to produce input image
stroke_three_scaled_and_centered = scale_and_center_stroke_three(np.copy(stroke_three), png_dims, padding)
if flip_x:
stroke_five_sketch[:, 0] = -stroke_five_sketch[:, 0]
if flip_y:
stroke_five_sketch[:, 1] = -stroke_five_sketch[:, 1]
raster_image = rasterize(stroke_three_scaled_and_centered, png_dims)
except:
continue
stroke_five_sketches.append(stroke_five_sketch)
rasterized_images.append(raster_image)
class_names.append(class_name)
# Image.fromarray(rasterized_images[0].astype(np.uint8)).save("rastertest.png")
np.savez(save_path,
stroke_five_sketches=np.array(stroke_five_sketches, dtype=np.float32),
rasterized_images=np.array(rasterized_images, dtype=np.float32),
class_name=np.array(class_names))
return 1
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,380
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/datasets/base/datasets.py
|
DATA_SETS = {}
def register_dataset(dataset_name):
"""
Decorator for registering a dataset class.
:param dataset_name:
:return:
"""
def decorator(f):
DATA_SETS[dataset_name] = f
return f
return decorator
def get_dataset(dataset):
"""
Returns dataset class if registered.
:param dataset:
:return:
"""
if dataset in DATA_SETS:
dataset = DATA_SETS[dataset]
return dataset
else:
raise ValueError("Dataset not found: %s", dataset)
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,381
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/models/subs/encoders.py
|
import tensorflow as tf
from models.subs.conv_block import ResNet12Block
class EncoderConv(tf.keras.Model):
def __init__(self, z_size, cell_state_size, filters=(4, 4, 8, 8, 8, 8)):
"""
Minimal pure-conv encoder used in https://arxiv.org/pdf/1709.04121.pdf
:param z_size:
:param cell_state_size:
:param filters:
"""
super(EncoderConv, self).__init__()
self._z_size = z_size
glorot_init = tf.initializers.GlorotNormal()
self._conv1 = tf.keras.layers.Conv2D(filters[0], 2, (2, 2), padding="SAME", kernel_initializer=glorot_init)
self._conv2 = tf.keras.layers.Conv2D(filters[1], 2, padding="SAME", kernel_initializer=glorot_init)
self._conv3 = tf.keras.layers.Conv2D(filters[2], 2, (2, 2), padding="SAME", kernel_initializer=glorot_init)
self._conv4 = tf.keras.layers.Conv2D(filters[3], 2, padding="SAME", kernel_initializer=glorot_init)
self._conv5 = tf.keras.layers.Conv2D(filters[2], 2, (2, 2), padding="SAME", kernel_initializer=glorot_init)
self._conv6 = tf.keras.layers.Conv2D(filters[3], 2, padding="SAME", kernel_initializer=glorot_init)
random_init = tf.initializers.RandomNormal(0.001)
zero_init = tf.initializers.Zeros()
self._mu = tf.keras.layers.Dense(z_size, kernel_initializer=random_init, bias_initializer=zero_init)
self._var = tf.keras.layers.Dense(z_size, kernel_initializer=random_init, bias_initializer=zero_init)
self._cell_init_state = tf.keras.layers.Dense(cell_state_size, activation=tf.keras.activations.tanh,
bias_initializer=zero_init, kernel_initializer=random_init)
def call(self, inputs, training=None, **kwargs):
x = self._conv1(inputs)
x = tf.keras.activations.relu(x)
x = self._conv2(x)
x = tf.keras.activations.relu(x)
x = self._conv3(x)
x = tf.keras.activations.relu(x)
x = self._conv4(x)
x = tf.keras.activations.relu(x)
x = self._conv5(x)
x = tf.keras.activations.relu(x)
x = self._conv6(x)
x = tf.keras.activations.tanh(x)
x = tf.reshape(x, (inputs.shape[0], -1))
mu = self._mu(x)
logvar = self._var(x)
sigma = tf.exp(logvar / 2.0)
z = mu + tf.multiply(tf.random.normal(mu.shape), sigma)
cell_init_state = self._cell_init_state(z)
return z, cell_init_state, mu, logvar
class EncoderBlock4(tf.keras.Model):
def __init__(self, z_size, cell_state_size, filters=(64, 64, 64, 64)):
"""
Conv4 encoder from https://arxiv.org/abs/1703.03400
:param z_size:
:param cell_state_size:
:param filters:
"""
super(EncoderBlock4, self).__init__()
self._cell_state_size = cell_state_size
self._filters = filters
self._z_size = z_size
def build(self, input_shape):
glorot_init = tf.initializers.GlorotNormal()
random_init = tf.initializers.RandomNormal(0.001)
zero_init = tf.initializers.Zeros()
self._conv1 = tf.keras.layers.Conv2D(self._filters[0], 3, padding="SAME", kernel_initializer=glorot_init)
self._bnorm1 = tf.keras.layers.BatchNormalization()
self._pool1 = tf.keras.layers.MaxPool2D() # No actual trainable weights
self._conv2 = tf.keras.layers.Conv2D(self._filters[1], 3, padding="SAME", kernel_initializer=glorot_init)
self._bnorm2 = tf.keras.layers.BatchNormalization()
self._pool2 = tf.keras.layers.MaxPool2D()
self._conv3 = tf.keras.layers.Conv2D(self._filters[2], 3, padding="SAME", kernel_initializer=glorot_init)
self._bnorm3 = tf.keras.layers.BatchNormalization()
self._pool3 = tf.keras.layers.MaxPool2D()
self._conv4 = tf.keras.layers.Conv2D(self._filters[3], 3, padding="SAME", kernel_initializer=glorot_init)
self._bnorm4 = tf.keras.layers.BatchNormalization()
self._pool4 = tf.keras.layers.MaxPool2D()
self._final = tf.keras.layers.GlobalAveragePooling2D()
self._mu = tf.keras.layers.Dense(self._z_size, kernel_initializer=random_init, bias_initializer=zero_init)
self._var = tf.keras.layers.Dense(self._z_size, kernel_initializer=random_init, bias_initializer=zero_init)
self._cell_init_state = tf.keras.layers.Dense(self._cell_state_size, activation=tf.keras.activations.tanh,
bias_initializer=zero_init, kernel_initializer=random_init)
def call(self, inputs, training=None, **kwargs):
x = self._conv1(inputs)
x = self._bnorm1(x, training=training)
x = tf.keras.activations.relu(x)
x = self._pool1(x)
x = self._conv2(x)
x = self._bnorm2(x, training=training)
x = tf.keras.activations.relu(x)
x = self._pool2(x)
x = self._conv3(x)
x = self._bnorm3(x, training=training)
x = tf.keras.activations.relu(x)
x = self._pool3(x)
x = self._conv4(x)
x = self._bnorm4(x, training=training)
x = tf.keras.activations.relu(x)
x = self._pool4(x)
x = self._final(x)
mu = self._mu(x)
logvar = self._var(x)
sigma = tf.exp(logvar / 2.0)
z = mu + tf.multiply(tf.random.normal(mu.shape), sigma)
cell_init_state = self._cell_init_state(z)
return z, cell_init_state, mu, logvar
class EncoderResnet12(tf.keras.Model):
def __init__(self, z_size, cell_state_size, filters=(64, 128, 256, 512)):
"""
Encoder using ResNet12.
:param z_size:
:param cell_state_size:
:param filters:
"""
super(EncoderResnet12, self).__init__()
self._cell_state_size = cell_state_size
self._filters = filters
self._z_size = z_size
def build(self, input_shape):
random_init = tf.initializers.RandomNormal(0.001)
zero_init = tf.initializers.Zeros()
self._resnet_block1 = ResNet12Block(self._filters[0])
self._resnet_block2 = ResNet12Block(self._filters[1])
self._resnet_block3 = ResNet12Block(self._filters[2])
self._resnet_block4 = ResNet12Block(self._filters[3])
self._final = tf.keras.layers.GlobalAveragePooling2D()
self._mu = tf.keras.layers.Dense(self._z_size, kernel_initializer=random_init, bias_initializer=zero_init)
self._var = tf.keras.layers.Dense(self._z_size, kernel_initializer=random_init, bias_initializer=zero_init)
self._cell_init_state = tf.keras.layers.Dense(self._cell_state_size, activation=tf.keras.activations.tanh,
bias_initializer=zero_init, kernel_initializer=random_init)
def call(self, inputs, training=None, **kwargs):
x = self._resnet_block1(inputs, training=training)
x = self._resnet_block2(x, training=training)
x = self._resnet_block3(x, training=training)
x = self._resnet_block4(x, training=training)
x = self._final(x)
mu = self._mu(x)
logvar = self._var(x)
sigma = tf.exp(logvar / 2.0)
z = mu + tf.multiply(tf.random.normal(mu.shape), sigma)
cell_init_state = self._cell_init_state(z)
return z, cell_init_state, mu, logvar
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,382
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/prepare_data.py
|
import os
import traceback
import numpy as np
import tensorflow as tf
from absl import app, flags, logging
import configs
import datasets
from util import HParams
from util import log_flags, log_hparams
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
FLAGS = flags.FLAGS
flags.DEFINE_string("dir", "/h/wangale/project/few-shot-sketch", "Project directory")
flags.DEFINE_string("data_dir", "/h/wangale/data", "Data directory")
flags.DEFINE_string("id", None, "training_id")
flags.DEFINE_string("logfile", "", "Logfile name")
flags.DEFINE_boolean("test", False, "Perform testing")
flags.DEFINE_string("dataset", None, "Dataset used")
flags.DEFINE_string("dataset_cfgset", None, "Configuration set for the dataset")
flags.DEFINE_string("dataset_cfgs", "", "Custom configuration for the dataset configs")
flags.DEFINE_integer("random_seed", 1, "Random seed")
flags.mark_flags_as_required(["id", "dataset", "dataset_cfgset"])
def prepare():
dataset_config: HParams = configs.get_config(FLAGS.dataset_cfgset)().parse(FLAGS.dataset_cfgs)
log_hparams(dataset_config)
logging.info("Getting and preparing dataset: %s", FLAGS.dataset)
dataset = datasets.get_dataset(FLAGS.dataset)(FLAGS.data_dir, dataset_config)
dataset.prepare(FLAGS)
def main(argv):
"""Create directories and configure python settings"""
FLAGS.dir = os.path.join(FLAGS.dir, FLAGS.id)
if not os.path.exists(FLAGS.dir):
os.makedirs(FLAGS.dir)
os.makedirs(os.path.join(FLAGS.dir, "logs"))
FLAGS.alsologtostderr = True
logging.get_absl_handler().use_absl_log_file(FLAGS.logfile, os.path.join(FLAGS.dir, "logs"))
np.random.seed(FLAGS.random_seed)
tf.random.set_seed(FLAGS.random_seed)
log_flags(FLAGS)
try:
prepare()
except:
exception = traceback.format_exc()
logging.info(exception)
if __name__ == "__main__":
app.run(main)
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,383
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/models/base/model_base.py
|
import os
import tensorflow as tf
class BaseModel(object):
"""
Basic model
"""
def __init__(self, base_dir, model_id):
self._base_dir = base_dir
self._dir = os.path.join(base_dir, model_id)
self._summary_dir = os.path.join(self._dir, "tfsummary", self.__class__.__name__)
self._sampling_dir = os.path.join(self._dir, "sampling", self.__class__.__name__)
def train(self, *args, **kwargs):
raise NotImplementedError
def test(self, *args, **kwargs):
raise NotImplementedError
class TrainableModel(BaseModel):
"""
Iteratively trained model
"""
def __init__(self, base_dir, model_id, training, ckpt=None):
super(TrainableModel, self).__init__(base_dir, model_id)
self.training = training
self._ckpt = ckpt
# ----- Directory Flags ----- #
self._checkpoint_dir = os.path.join(self._dir, "checkpoints", self.__class__.__name__)
# ----- Summary Writer ----- #
if self.training:
self._writer = tf.summary.create_file_writer(self._summary_dir)
else:
None
# ----- Build Model ----- #
self._build_model()
# ----- Checkpoint Model ----- #
self._checkpoint_model()
def _build_model(self):
raise NotImplementedError
def _checkpoint_model(self):
raise NotImplementedError
def train(self, train_dataset, train_steps, print_freq, save_freq, eval_dataset=None, eval_freq=None):
raise NotImplementedError
def evaluate(self, step, eval_dataset):
raise NotImplementedError
def test(self, test_dataset, result_name, steps=None):
raise NotImplementedError
def forward(self, *args, **kwargs):
raise NotImplementedError
def _write_summaries(self, step, summaries_dict):
for key in summaries_dict:
tf.summary.scalar(key, summaries_dict[key], step=step)
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,384
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/run_full_eval.py
|
import os
import traceback
import numpy as np
import tensorflow as tf
from absl import app, flags, logging
import models
import configs
import datasets
from models import ClassifierModel, DrawerModel
from util import HParams, interpolate
from util import log_flags
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
FLAGS = flags.FLAGS
flags.DEFINE_string("dir", "/h/wangale/project/few-shot-sketch", "Project directory")
flags.DEFINE_string("data_dir", "/h/wangale/data", "Data directory")
flags.DEFINE_boolean("check_numerics", False, "Enable tensorflow check numerics.")
flags.DEFINE_string("id", None, "training_id")
flags.DEFINE_string("logfile", None, "Logfile name")
flags.DEFINE_string("model", None, "Model to train")
flags.DEFINE_string("model_cfgset", None, "Configuration set for the model")
flags.DEFINE_string("model_cfgs", "", "Custom configuration for the model configs")
flags.DEFINE_integer("ckpt", None, "checkpoint")
flags.DEFINE_string("class_model_id", None, "Classifier model ID")
flags.DEFINE_string("class_model", None, "Model to train")
flags.DEFINE_string("class_model_cfgset", None, "Configuration set for the model")
flags.DEFINE_string("class_model_cfgs", "", "Custom configuration for the model configs")
flags.DEFINE_boolean("natural", False, "Natural image model")
flags.DEFINE_boolean("sample", True, "Sample generated images")
flags.DEFINE_boolean("usfs", True, "Test unsupervised few-shot classification")
flags.DEFINE_boolean("checkpoint", True, "Test model over checkpoints")
flags.DEFINE_boolean("gen", True, "Test model generation")
flags.DEFINE_integer("random_seed", 0, "Random seed")
flags.mark_flags_as_required(["id", "logfile",
"model", "model_cfgset"])
def experiment():
model_config: HParams = configs.get_config(FLAGS.model_cfgset)().parse(FLAGS.model_cfgs)
model = models.get_model(FLAGS.model)(FLAGS.dir, FLAGS.id, model_config, training=False, ckpt=FLAGS.ckpt)
if FLAGS.natural:
# If natural images, use miniImageNet.
logging.info("#######################################################")
logging.info("#########Natural Images, using mini-ImageNet###########")
logging.info("#######################################################")
if FLAGS.sample:
logging.info("=======================================================")
logging.info("============Sampling examples from datasets============")
logging.info("=======================================================")
logging.info("=====Sampling Decodings of mini-ImageNet Examples...=====")
sample_dataset_config: HParams = configs.get_config("miniimagenet/sachinravi_test")().parse("")
gen_dataset_proto = datasets.get_dataset("miniimagenet")(FLAGS.data_dir, sample_dataset_config)
sample_dataset = gen_dataset_proto.load(repeat=False)
model.test(sample_dataset,
"full-eval-mii",
40,
generation_length=100)
logging.info("=====Sampling Decodings of Sketchy Examples...=====")
sample_dataset_config: HParams = configs.get_config("sketchy")().parse("split=msl100_84_noclash_noflip,shuffle=False")
gen_dataset_proto = datasets.get_dataset("sketchy")(FLAGS.data_dir, sample_dataset_config)
sample_dataset = gen_dataset_proto.load(repeat=False)
model.test(sample_dataset,
"full-eval-sketchy",
20,
generation_length=100)
if FLAGS.usfs:
logging.info("=======================================================")
logging.info("==========Unsupervised few-shot mini-ImageNet==========")
logging.info("=======================================================")
for cmodel_type in ["lr_fs"]:
cmodel_config = configs.get_config("lr_fs")().parse("")
cmodel = models.get_model(cmodel_type)(FLAGS.dir, FLAGS.id, cmodel_config)
for split in ["sachinravi"]:
for setup in ["5way1shot", "5way5shot", "5way20shot", "5way50shot"]:
usfs_dataset_config = configs.get_config("miniimagenet/{}_test/{}".format(split, setup))().parse("")
usfs_dataset = datasets.get_dataset("miniimagenet")(FLAGS.data_dir, usfs_dataset_config)
logging.info("===== Running Unsupervised Few-shot | linear_head: %s | split: %s | %s ======", cmodel_type, split,
setup)
cmodel.episode(model, usfs_dataset, 1000)
if FLAGS.checkpoint:
logging.info("================================================")
logging.info("==========Performing checkpoint sweep.==========")
logging.info("================================================")
ckpts_dir = os.path.join(FLAGS.dir, FLAGS.id, "checkpoints")
ckpts = os.listdir(os.path.join(ckpts_dir, os.listdir(ckpts_dir)[0]))
ckpts = list(filter(lambda x: x.endswith(".index"), ckpts))
ckpt_ids = [str(y) for y in sorted(list(map(lambda x: int(x.split(".")[0].split("-")[-1]), ckpts)))]
for ckpt_id in ckpt_ids:
logging.info("=====Loading Model with ckpt %s=====", ckpt_id)
model_config: HParams = configs.get_config(FLAGS.model_cfgset)().parse(FLAGS.model_cfgs)
model = models.get_model(FLAGS.model)(FLAGS.dir, FLAGS.id, model_config, training=False, ckpt=ckpt_id)
for cmodel_type in ["lr_fs"]:
cmodel_config = HParams().parse("")
cmodel = models.get_model(cmodel_type)(FLAGS.dir, FLAGS.id, cmodel_config)
for split in ["sachinravi"]:
for setup in ["5way1shot"]:
usfs_dataset_config = configs.get_config("miniimagenet/{}_test/{}".format(split, setup))().parse("")
usfs_dataset = datasets.get_dataset("miniimagenet")(FLAGS.data_dir, usfs_dataset_config)
logging.info("===== Running Unsupervised Few-shot | linear_head: %s | split: %s | %s ======", cmodel_type,
split, setup)
cmodel.episode(model, usfs_dataset, 500)
else:
logging.info("#######################################################")
logging.info("########## Sketches using Omniglot dataset ############")
logging.info("#######################################################")
ds = "fs_omniglot_28"
if FLAGS.sample:
logging.info("============================")
logging.info("=====Sampling decodings=====")
logging.info("============================")
logging.info("=====Omniglot dataset=====")
sample_dataset_config: HParams = configs.get_config("fs_omniglot/vinyals_test_fake")().parse("")
gen_dataset_proto = datasets.get_dataset(ds)(FLAGS.data_dir, sample_dataset_config)
sample_dataset = gen_dataset_proto.load(repeat=False)
model.test(sample_dataset,
"full-eval-sample",
40)
logging.info("=====Seen quickdraw examples=====")
sample1_dataset_config: HParams = configs.get_config("quickdraw")().parse("split=T1_msl64_28,shuffle=False")
gen1_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, sample1_dataset_config)
sample1_dataset = gen1_dataset_proto.load(repeat=False)
model.test(sample1_dataset,
"full-eval-qd-T1",
40)
logging.info("=====Unseen quickdraw examples=====")
sample2_dataset_config: HParams = configs.get_config("quickdraw")().parse("split=T2_msl64_28,shuffle=False")
gen2_dataset_proto = datasets.get_dataset('quickdraw')(FLAGS.data_dir, sample2_dataset_config)
sample2_dataset = gen2_dataset_proto.load(repeat=False)
model.test(sample2_dataset,
"full-eval-qd-T2",
40)
logging.info("=====Latent Space Interpolation=====")
if isinstance(model, DrawerModel):
interpolate(model,
sample1_dataset,
"interpolations",
interps=20)
if FLAGS.gen:
logging.info("=======================================")
logging.info("==========Generation Testing===========")
logging.info("=======================================")
logging.info("===== Classifier Dataset: T1 =====")
classifier1_configs = configs.get_config("classifier/T1")().parse("")
class_model1: ClassifierModel = models.get_model("classifier")(FLAGS.dir, "05-24_classifiers/classifier_T1",
classifier1_configs, training=False)
gen1_dataset_config: HParams = configs.get_config("quickdraw")().parse("split=T1_msl64_28")
gen1_dataset_proto = datasets.get_dataset("quickdraw")(FLAGS.data_dir, gen1_dataset_config)
gen1_dataset = gen1_dataset_proto.load(repeat=False)
logging.info("ST1 Classifier Test")
class_model1.classify_predictions(gen1_dataset, model, steps=20)
logging.info("===== Classifier Dataset: T2 =====")
classifier2_configs = configs.get_config("classifier/T2")().parse("")
class_model2: ClassifierModel = models.get_model("classifier")(FLAGS.dir, "05-24_classifiers/classifier_T2",
classifier2_configs, training=False)
gen2_dataset_config: HParams = configs.get_config("quickdraw")().parse("split=T2_msl64_28")
gen2_dataset_proto = datasets.get_dataset("quickdraw")(FLAGS.data_dir, gen2_dataset_config)
gen2_dataset = gen2_dataset_proto.load(repeat=False)
logging.info("ST2 Classifier Test")
class_model2.classify_predictions(gen2_dataset, model, steps=20)
if FLAGS.usfs:
logging.info("==================================================")
logging.info("==========Unsupervised few-shot Omniglot==========")
logging.info("==================================================")
for cmodel_type in ["lr_fs"]:
cmodel_config = HParams().parse("")
cmodel = models.get_model(cmodel_type)(FLAGS.dir, FLAGS.id, cmodel_config)
for setup in ["20way1shot", "20way5shot", "5way1shot", "5way5shot"]:
usfs_dataset_config = configs.get_config("fs_omniglot/vinyals_test/{}".format(setup))().parse("")
usfs_dataset = datasets.get_dataset("fs_omniglot_vinyals")(FLAGS.data_dir, usfs_dataset_config)
logging.info("===== Running Unsupervised Few-shot | linear_head: %s | split: %s | %s ======", cmodel_type,
"vinyals", setup)
cmodel.episode(model, usfs_dataset, 2000)
for split in ["lake"]:
logging.info("===Getting usfs test dataset: %s/%s===", split, setup)
usfs_dataset_config = configs.get_config("fs_omniglot/{}_test/{}".format(split, setup))().parse("")
usfs_dataset = datasets.get_dataset(ds)(FLAGS.data_dir, usfs_dataset_config)
logging.info("===== Running Unsupervised Few-shot | linear_head: %s | split: %s | %s ======", cmodel_type,
split, setup)
cmodel.episode(model, usfs_dataset, 2000)
if FLAGS.checkpoint:
logging.info("================================================")
logging.info("==========Performing checkpoint sweep.==========")
logging.info("================================================")
ckpts_dir = os.path.join(FLAGS.dir, FLAGS.id, "checkpoints")
ckpts = os.listdir(os.path.join(ckpts_dir, os.listdir(ckpts_dir)[0]))
ckpts = list(filter(lambda x: x.endswith(".index"), ckpts))
ckpt_ids = sorted(list(map(lambda x: int(x.split(".")[0].split("-")[-1]), ckpts)))
for ckpt_id in ckpt_ids:
ckpt_id = str(ckpt_id)
logging.info("=====Loading Model with ckpt %s=====", ckpt_id)
ckpt_model_config: HParams = configs.get_config(FLAGS.model_cfgset)().parse(FLAGS.model_cfgs)
ckpt_model = models.get_model(FLAGS.model)(FLAGS.dir, FLAGS.id, ckpt_model_config, training=False, ckpt=ckpt_id)
for cmodel_type in ["lr_fs"]:
cmodel_config = HParams().parse("")
cmodel = models.get_model(cmodel_type)(FLAGS.dir, FLAGS.id, cmodel_config)
for setup in ["20way1shot"]:
usfs_dataset_config = configs.get_config("fs_omniglot/vinyals_test/{}".format(setup))().parse("")
usfs_dataset = datasets.get_dataset("fs_omniglot_vinyals")(FLAGS.data_dir, usfs_dataset_config)
logging.info("===== Running Unsupervised Few-shot | linear_head: %s | split: %s | %s ======", cmodel_type,
"Vinyals", setup)
cmodel.episode(ckpt_model, usfs_dataset, 500)
if FLAGS.gen:
logging.info("==========Generation Testing===========")
logging.info("ST1")
class_model1.classify_predictions(gen1_dataset, ckpt_model, steps=5)
logging.info("ST2")
class_model2.classify_predictions(gen2_dataset, ckpt_model, steps=5)
def main(argv):
"""Create directories and configure python settings"""
# Setup Directory
experiment_dir = os.path.join(FLAGS.dir, FLAGS.id)
if not os.path.exists(experiment_dir):
os.makedirs(os.path.join(experiment_dir, "logs"), exist_ok=True)
# Setup Logging
FLAGS.alsologtostderr = True
logging.get_absl_handler().use_absl_log_file(FLAGS.logfile, os.path.join(experiment_dir, "logs"))
# Setup seeds
if FLAGS.random_seed:
np.random.seed(FLAGS.random_seed)
tf.random.set_seed(FLAGS.random_seed)
# Log Flags
log_flags(FLAGS)
try:
experiment()
except:
exception = traceback.format_exc()
logging.info(exception)
if __name__ == "__main__":
app.run(main)
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,385
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/models/__init__.py
|
from .drawer import *
from .drawer_enc_block import *
from .drawer_enc_resnet12 import *
from .classifier import *
from .vae import *
from .vae_enc_block import *
from .base import *
from .lr_fs import *
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,386
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/datasets/miniimagenet.py
|
import os
import pickle as pkl
import numpy as np
from PIL import Image
from absl import logging
from datasets import register_dataset, DatasetBase, DatasetEpisodic
@register_dataset("miniimagenet")
class MiniImageNet(DatasetEpisodic):
def __init__(self, data_dir, params):
super(MiniImageNet, self).__init__(data_dir, params)
self._mode = params.mode
self.way = params.way
self.shot = params.shot
self._dataset_path = os.path.join(self._data_dir, 'miniimagenet')
def load(self, repeat=True):
data_path = os.path.join(self._dataset_path, 'caches')
if not self._split:
self._split = self._split = ','.join(sorted(os.listdir(data_path)))
return self._load_episodic_or_batch(data_path, repeat)
def _filter_collections(self, files):
"""
:param files: {"image", "class_dict"}
:return: ["image", "class_name"]
"""
files_sorted = sorted(files)
return files_sorted[1], files_sorted[0]
def prepare(self, FLAGS, png_dims=(84, 84), padding=None):
"""
Preparation function for miniImageNet
:param FLAGS:
:param png_dims:
:param padding:
:return:
"""
padding = padding if padding else round(min(png_dims) / 10.0) * 2
save_dir = os.path.join(self._dataset_path, "caches")
raw_dir = os.path.join(self._dataset_path, "raw")
os.makedirs(save_dir, exist_ok=True)
logging.info("Processing MiniImageNet | png_dimensions: %s | padding: %s", png_dims, padding)
total_count = 0
for pkl_file in [os.path.join(raw_dir, file) for file in os.listdir(raw_dir)]:
with open(pkl_file, 'rb') as file:
pkl_dict = pkl.load(file, encoding='latin1')
img_data, class_dict = pkl_dict["image_data"], pkl_dict["class_dict"]
for imgnet_class_id in class_dict.keys():
logging.info("Processing Imagenet ID: %s", imgnet_class_id)
accumulate = {"image": [], "class_name": []}
char_save_path = os.path.join(save_dir, imgnet_class_id + ".npz")
per_class_count = 0
for idx in class_dict[imgnet_class_id]:
image: Image.Image = Image.fromarray(img_data[idx])
image = image.resize(size=png_dims)
accumulate['image'].append(np.array(image, dtype=np.float32))
accumulate['class_name'].append(imgnet_class_id)
per_class_count += 1
logging.info("Per Class Count: %s", per_class_count)
total_count += per_class_count
np.savez(char_save_path, **accumulate)
logging.info("Processing done, total count: %s", total_count)
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,387
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/util/write_routines.py
|
import os
from time import time
import numpy as np
from PIL import Image
from absl import logging
from util import stroke_three_format, scale_and_rasterize, stroke_three_format_centered, rasterize
def parallel_writer_sketches(path, queue, shard_size=1, cumul=False, png_dims=(28, 28)):
logging.info("Archiving latent outputs to: %s", path)
sample_path = path
os.makedirs(sample_path, exist_ok=True)
accumulate, count = None, 0
while True:
entry = queue.get()
if not entry:
break
count += 1
if count and count % shard_size == 0:
if len(entry['stroke_five_sketches']) > 2:
stroke_three_gt = stroke_three_format(entry['stroke_five_sketches'])
if len(entry["stroke_five_sketches"]) == 65:
entry["rasterized_images"] = scale_and_rasterize(stroke_three_gt, png_dims, 2).astype("float32")
elif len(entry['stroke_five_sketches']) == 101:
np_rasterized_gt_strokes = scale_and_rasterize(stroke_three_gt, png_dims, 2).astype('uint8')
rasterized_gt_strokes = Image.fromarray(np_rasterized_gt_strokes)
rasterized_gt_strokes.save(os.path.join(sample_path, "{}-{}_y_raster.jpg".format(entry["class_names"].decode("utf-8"), count)))
else:
stroke_three_gt = None
stroke_three = stroke_three_format(entry["stroke_predictions"])
entry["rasterized_predictions"] = scale_and_rasterize(stroke_three, png_dims, stroke_width=2).astype("float32")
np_image = np.concatenate((entry["rasterized_images"], entry["rasterized_predictions"]))
img = Image.fromarray(np_image.astype("uint8"))
gt_image = entry["rasterized_images"].astype("uint8")
predicted_image = entry["rasterized_predictions"].astype("uint8")
gt_img = Image.fromarray(gt_image.astype("uint8"))
pt_img = Image.fromarray(predicted_image.astype('uint8'))
try:
img.save(os.path.join(sample_path, "{}-{}.jpg".format(entry["class_names"].decode("utf-8"), count)))
gt_img.save(os.path.join(sample_path, "{}-{}_x.jpg".format(entry["class_names"].decode("utf-8"), count)))
pt_img.save(os.path.join(sample_path, "{}-{}_predicted.jpg".format(entry["class_names"].decode("utf-8"), count)))
except:
gt_img.save(os.path.join(sample_path, "{}-{}_x.jpg".format(entry["class_names"], count)))
pt_img.save(os.path.join(sample_path, "{}-{}_predicted.jpg".format(entry["class_names"], count)))
img.save(os.path.join(sample_path, "{}-{}.jpg".format(entry["class_names"], count)))
if cumul:
if stroke_three_gt is not None:
source_sample_dir = os.path.join(sample_path, "cumulative", "source", "{}-{}".format(entry['class_names'], count))
os.makedirs(source_sample_dir)
pen_strokes = np.copy(stroke_three_gt[:, 2])
for i in range(1, len(stroke_three_gt)):
copy_pen_strokes = np.copy(pen_strokes)
copy_pen_strokes[i:] = np.ones((len(stroke_three_gt) - i,))
stroke_three_gt[:, 2] = copy_pen_strokes
cumul_img = scale_and_rasterize(stroke_three_gt, png_dims, stroke_width=3).astype("float32")
cumul_img = Image.fromarray(cumul_img.astype('uint8'))
cumul_img.save(os.path.join(source_sample_dir, "gt_{}.jpg".format(i)))
cum_sample_dir = os.path.join(sample_path, "cumulative", "predict", "{}-{}".format(entry['class_names'], count))
os.makedirs(cum_sample_dir)
pen_strokes = np.copy(stroke_three[:, 2])
for i in range(1, len(stroke_three)):
copy_pen_strokes = np.copy(pen_strokes)
copy_pen_strokes[i:] = np.ones((len(stroke_three) - i,))
stroke_three[:, 2] = copy_pen_strokes
cumul_img = scale_and_rasterize(stroke_three, png_dims, stroke_width=3).astype("float32")
cumul_img = Image.fromarray(cumul_img.astype('uint8'))
cumul_img.save(os.path.join(cum_sample_dir, "pred_{}.jpg".format(i)))
def parallel_writer_vae_latent(path, queue, shard_size=1):
logging.info("Archiving vae latent outputs to %s", path)
sample_path = path
os.makedirs(sample_path, exist_ok=True)
start_time = last_time = time()
count = 0
while True:
entry = queue.get()
if not entry:
break
count += 1
if count and count % shard_size == 0:
np_image = np.concatenate((entry["rasterized_images"], entry["reconstructed_images"]))
np_image = np_image.squeeze() * 255.0
img = Image.fromarray(np_image.astype("uint8"))
try:
img.save(os.path.join(sample_path, "{}-{}.jpg".format(entry["class_names"].decode("utf-8"), count)))
except:
img.save(os.path.join(sample_path, "{}-{}.jpg".format(entry["class_names"], count)))
curr_time = time()
if count and count % 1000 == 0:
logging.info("Samples complete: %6d | Time/Sample: %5.4f | Total Elapsed Time: %7d",
count, (curr_time - last_time) / shard_size, curr_time - start_time)
last_time = curr_time
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,388
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/models/classifier.py
|
import os
from time import time
import tensorflow as tf
import numpy as np
from absl import logging
from .drawer import DrawerModel
from .vae import VAE
from models.base import TrainableModel, register_model
from util import stroke_three_format, scale_and_center_stroke_three, rasterize
@register_model('classifier')
class ClassifierModel(TrainableModel):
def __init__(self, base_dir, model_id, params, training=True):
"""
Initializes Resnet-101 classifier model.
:param params:
:param name:
"""
if not params.class_list:
logging.fatal("No categories defined, class_list is empty.")
# ----- Model Parameters ----- #
self._class_list = params.class_list.split(",")
self._png_dims = params.png_dims
self._num_classes = len(self._class_list)
self._weights = params.weights
# ----- Training Parameters ----- #
self._lr = params.lr
self._lr_schedule: dict = params.lr_schedule
super(ClassifierModel, self).__init__(base_dir, model_id, training)
def _build_model(self):
self._class_lookup = tf.lookup.StaticVocabularyTable(
initializer=tf.lookup.KeyValueTensorInitializer(
keys=self._class_list,
values=tf.range(tf.size(self._class_list, out_type=tf.int64), dtype=tf.int64)),
num_oov_buckets=1)
resnet = tf.keras.applications.ResNet101(include_top=False, weights=self._weights, input_shape=(self._png_dims, self._png_dims, 3))
self._model = tf.keras.Sequential([resnet,
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(self._num_classes)])
lr = tf.keras.optimizers.schedules.PiecewiseConstantDecay(list(self._lr_schedule.keys()),
[self._lr] + list(self._lr_schedule.values()))
self._optimizer = tf.optimizers.Adam(learning_rate=lr)
def _checkpoint_model(self):
ckpt = tf.train.Checkpoint(optimizer=self._optimizer,
model=self._model)
self._ckpt_manager = tf.train.CheckpointManager(ckpt, self._checkpoint_dir, max_to_keep=None)
if self._ckpt_manager.latest_checkpoint:
logging.info("Restoring Checkpoint: %s", self._ckpt_manager.latest_checkpoint)
status = ckpt.restore(self._ckpt_manager.latest_checkpoint)
if self.training:
status.assert_existing_objects_matched()
else:
status.expect_partial()
def train(self, train_dataset, train_steps, print_freq, save_freq, eval_dataset=None, eval_freq=None):
if eval_dataset and not eval_freq:
eval_freq = save_freq
train_dataset, _ = train_dataset # The second element is a saveable file-based dataset, currently not used
train_dataset = train_dataset.map(lambda a, b, image, class_str: (a, b,
tf.image.resize(image, (self._png_dims, self._png_dims)),
class_str))
train_iter = train_dataset.__iter__()
last_time = start_time = time()
for step in tf.range(self._optimizer.iterations + 1, tf.constant(train_steps + 1)):
x_image, class_name = next(train_iter)[2:4]
class_name = tf.cast(tf.one_hot(self._class_lookup.lookup(class_name), depth=self._num_classes), dtype=tf.float32)
loss, accuracy = self.train_step(x_image, class_name)
if step and step % print_freq == 0:
curr_time = time()
logging.info("Step: %6d | Loss: %.5f | Accuracy: %.4f | LR: %.5f | time/step: %.4f | Total Time: %7d",
step, loss, accuracy, self._optimizer._decayed_lr('float32').numpy(),
(curr_time-last_time)/print_freq, curr_time-start_time)
last_time = curr_time
with self._writer.as_default():
self._write_summaries(step, {"lr": self._optimizer._decayed_lr('float32'),
"loss": loss, "accuracy": accuracy})
if step and step % save_freq == 0:
self._ckpt_manager.save(step)
if eval_dataset and step and step % eval_freq == 0:
self.evaluate(step, eval_dataset)
def evaluate(self, step, eval_dataset):
eval_dataset, _ = eval_dataset
loss_mean, acc_mean = tf.keras.metrics.Mean(), tf.keras.metrics.Mean()
eval_start_time = time()
eval_dataset = eval_dataset.map(lambda a, b, image, class_str: (a, b,
tf.image.resize(image, (self._png_dims, self._png_dims)),
class_str), deterministic=False)
for step, entry in enumerate(eval_dataset.__iter__()):
x_image, class_name = entry[2:4]
class_name = tf.cast(tf.one_hot(self._class_lookup.lookup(class_name), depth=self._num_classes), dtype=tf.float32)
logits = self.forward(x_image, training=False)
loss_batched = tf.nn.softmax_cross_entropy_with_logits(class_name, logits)
loss = tf.reduce_mean(loss_batched)
accuracy = tf.reduce_mean(tf.cast(tf.equal(tf.argmax(class_name, axis=1), tf.argmax(logits, axis=1)), dtype=tf.float32))
loss_mean(loss)
acc_mean(accuracy)
last_time = time()
eval_loss, eval_acc = loss_mean.result(), acc_mean.result()
with self._writer.as_default():
self._write_summaries(step, {"lr": self._optimizer._decayed_lr('float32'),
"eval_loss": eval_loss, "eval_acc": eval_acc})
logging.info("Eval Done | Loss: %.5f | Accuracy: %.4f Eval Time: %.4f",
eval_loss, eval_acc, last_time - eval_start_time)
def test(self, test_dataset, result_name, steps=None):
logging.info("Beginning testing loop")
sampling_dir = os.path.join(self._sampling_dir, result_name)
os.makedirs(sampling_dir)
write_file = open(os.path.join(sampling_dir, 'result_log.txt'), 'a')
test_dataset, _ = test_dataset
loss_mean, acc_mean = tf.keras.metrics.Mean(), tf.keras.metrics.Mean()
for step, entry in enumerate(test_dataset):
if step == steps:
break
x_image, class_name = entry[2:4]
class_name = tf.cast(tf.one_hot(self._class_lookup.lookup(class_name), depth=self._num_classes), dtype=tf.float32)
x_image = tf.cast(x_image, dtype=tf.float32)
logits = self.forward(x_image, training=False)
loss_batched = tf.nn.softmax_cross_entropy_with_logits(class_name, logits)
loss = tf.reduce_mean(loss_batched)
accuracy = tf.reduce_mean(tf.cast(tf.equal(tf.argmax(class_name, axis=1), tf.argmax(logits, axis=1)), dtype=tf.float32))
loss_mean(loss)
acc_mean(accuracy)
logging.info('cumulative_loss: %s | cumulative_acc: %s', loss_mean.result(), acc_mean.result())
write_file.write('cumulative_loss: {} | cumulative_acc: {} \n'.format(loss_mean.result(), acc_mean.result()))
def classify_predictions(self, dataset, model, steps):
"""
Samples examples from the model over the dataset.
Sampled examples are then classified by the ResNet101 classifier.
:param dataset:
:param model:
:param steps:
:return:
"""
dataset, _ = dataset
loss_mean, acc_mean = tf.keras.metrics.Mean(), tf.keras.metrics.Mean()
acc_list = []
padding = round(self._png_dims / 10.) * 2
for step, entry in enumerate(dataset):
if step > steps:
break
x_image, class_name = entry[2:4]
if isinstance(model, DrawerModel):
input_strokes = tf.tile(tf.constant([[[0., 0., 1., 0., 0.]]]), (x_image.shape[0], 2, 1))
predicted_strokes = model.forward(input_strokes, x_image, training=False, generation_length=64)[3]
image_inputs = []
for predicted_stroke in predicted_strokes.numpy():
stroke_three = stroke_three_format(predicted_stroke)
stroke_three_scaled_and_centered = scale_and_center_stroke_three(stroke_three, [self._png_dims] * 2, padding)
image_inputs.append(rasterize(stroke_three_scaled_and_centered, [self._png_dims] * 2))
image_inputs = np.array(image_inputs, dtype=np.float32)
image_inputs = tf.image.resize(image_inputs, (self._png_dims, self._png_dims))
elif isinstance(model, VAE):
image_inputs = model.forward(x_image, training=False)[0] * 255.0
logits = self.forward(image_inputs, training=False)
y_labels = tf.cast(tf.one_hot(self._class_lookup.lookup(class_name), depth=self._num_classes), dtype=tf.float32)
loss_batched = tf.nn.softmax_cross_entropy_with_logits(y_labels, logits)
loss = tf.reduce_mean(loss_batched)
accuracy = tf.reduce_mean(tf.cast(tf.equal(tf.argmax(y_labels, axis=1), tf.argmax(logits, axis=1)), dtype=tf.float32))
loss_mean(loss)
acc_mean(accuracy)
acc_list.append(accuracy)
if step and step % 5 == 0:
logging.info('cumulative_loss: %s | cumulative_acc: %s', loss_mean.result().numpy(), acc_mean.result().numpy())
logging.info("Final Result | Mean Accuracy: %.4f | Std: %.4f | Var: %.4f | p95: %.4f",
acc_mean.result(), np.std(acc_list), np.var(acc_list), 1.96 * np.std(acc_list) / np.sqrt(len(acc_list)))
@tf.function
def forward(self, x_image, training):
logits = self._model(x_image, training=True)
return logits
@tf.function
def train_step(self, x_image, class_name):
with tf.GradientTape() as tape:
logits = self.forward(x_image, training=True)
loss_batched = tf.nn.softmax_cross_entropy_with_logits(class_name, logits)
loss = tf.reduce_mean(loss_batched)
accuracy = tf.reduce_mean(tf.cast(tf.equal(tf.argmax(class_name, axis=1), tf.argmax(logits, axis=1)), dtype=tf.float32))
grads = tape.gradient(loss, self._model.trainable_variables)
self._optimizer.apply_gradients(zip(grads, self._model.trainable_variables))
return loss, accuracy
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,389
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/datasets/base/dataset_episodic.py
|
import os
import traceback
import random
import tensorflow as tf
import numpy as np
from absl import logging
from .dataset_base import DatasetBase
class DatasetEpisodic(DatasetBase):
def __init__(self, data_dir, params):
super(DatasetEpisodic, self).__init__(data_dir, params)
def _load_episodic_or_batch(self, data_path, repeat):
if self._mode == "batch":
# If not episdic, i.e. conventional loading
files = []
for alphabet in self._split.split(","):
path = os.path.join(data_path, alphabet)
if os.path.isdir(path):
file_list = os.listdir(os.path.join(data_path, alphabet))
files.extend([os.path.join(data_path, alphabet, c) for c in file_list])
else:
files.append(path)
return self._create_dataset_from_filepaths(files, repeat)
elif self._mode == "episodic":
episodes = []
for episode_string in self._split.split(";"):
files = []
for collection in episode_string.split(","):
path = os.path.join(data_path, collection)
if os.path.isdir(path):
file_list = os.listdir(os.path.join(data_path, collection))
files.extend([os.path.join(data_path, collection, file) for file in file_list])
else:
files.append(path)
episodes.append(",".join(files))
return self._create_episodic_dataset_from_nested_filespaths(episodes, self.shot, self.way, repeat)
else:
logging.fatal("Dataset mode not \"episodic\" or \"batch\", value supplied: %s", self._mode)
def _create_episodic_dataset_from_nested_filespaths(self, episodes, shot, way, repeat):
try:
npz = np.load(episodes[0].split(",")[0], allow_pickle=True, encoding='latin1')
npz_collections = self._filter_collections(npz.files)
# shapes = [npz[key][0].shape for key in npz_collections] * 2
types = tuple([tf.as_dtype(npz[key].dtype) for key in npz_collections] * 2)
except Exception as e:
logging.error("%s file load unsuccessful from %s \n %s", type(self).__name__, episodes, str(e))
logging.info(traceback.format_exc())
raise e
dataset = tf.data.Dataset.from_generator(self._make_episode_generator,
args=(episodes, shot, way, npz_collections),
output_types=types)
if self._augmentations:
dataset = dataset.interleave(lambda *args: tf.data.Dataset.from_generator(self._apply_augmentations_generator,
args=args,
output_types=types),
num_parallel_calls=self._num_parallel_calls,
block_length=self._block_length,
cycle_length=self._cycle_length)
if self._shuffle:
dataset = dataset.shuffle(self._buff_size * self._batch_size)
if repeat:
dataset = dataset.repeat()
dataset = dataset.prefetch(tf.data.experimental.AUTOTUNE)
return dataset
def _make_episode_generator(self, episodes, shot, way, npz_collections):
for episode_classes_string in episodes:
episode_classes_list = episode_classes_string.decode('utf-8').split(",")
episode_classes = random.sample(list(episode_classes_list), way)
support = [[] for _ in range(len(npz_collections))]
query = [[] for _ in range(len(npz_collections))]
for class_file in episode_classes:
try:
npz = np.load(class_file, allow_pickle=True, encoding='latin1')
except FileNotFoundError as error:
logging.fatal("Shard not found when producing generator fn: %s", class_file)
raise error
collections = [npz[key.decode('utf-8')] for key in npz_collections]
sample_idxs = np.linspace(0., float(len(collections[0])-1), len(collections[0])).astype(np.int32)
np.random.shuffle(sample_idxs)
for idx, collection in enumerate(collections):
support[idx].append(collection[sample_idxs[:shot]])
for idx, collection in enumerate(collections):
query[idx].append(collection[sample_idxs[shot:]])
yield tuple([np.concatenate(x, axis=0) for x in support] + [np.concatenate(x, axis=0) for x in query])
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,390
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/datasets/sketchy.py
|
import os
from concurrent.futures.process import ProcessPoolExecutor
from time import time
from itertools import repeat
from math import ceil
import numpy as np
import psutil
import csv
import requests
import gc
from PIL import Image
from absl import logging
from svgpathtools import svg2paths
from datasets.base import register_dataset, DatasetBase
from util import sketch_process, miniimagenet_test, svg_to_stroke_three, get_normalizing_scale_factor, sketchy_train_list, \
sketchy_val_list
@register_dataset("sketchy")
class SketchyDataset(DatasetBase):
def __init__(self, data_dir, params):
super(SketchyDataset, self).__init__(data_dir, params)
self._dataset_path = os.path.join(self._data_dir, "sketchy")
def load(self, repeat=True):
data_path = os.path.join(self._dataset_path, 'caches', self._split)
files = [os.path.join(data_path, shard_name) for shard_name in os.listdir(data_path)]
return self._create_dataset_from_filepaths(files, repeat)
def _filter_collections(self, files):
"""
files = ['natural_image', 'strokes', 'rasterized_strokes', 'imagenet_id', 'sketch_id']
:param files:
:return: strokes_gt, strokes_teacher, natural_image, class_name, rasterized_strokes
"""
return files[1], files[1], files[0], files[3], files[2]
def prepare(self, FLAGS, epsilon=5.0, max_seq_len=100, png_dims=(84, 84), padding=None, shard_size=1000,
exclusive_set_parents=miniimagenet_test,
class_list=sketchy_train_list, flip_x=True, flip_y=False):
"""
Parallelized processing function that converts .svg files into our model dataset. Normalizes and deduplicates from mini-ImageNet.
:param FLAGS:
:param epsilon:
:param max_seq_len:
:param png_dims:
:param padding:
:param shard_size:
:param exclusive_set_parents:
:param class_list:
:param flip_x:
:param flip_y:
:return:
"""
padding = padding if padding else round(min(png_dims) / 10.0) * 2
save_dir = os.path.join(self._dataset_path, "caches", self._split)
sample_dir = os.path.join(self._dataset_path, "processing-samples", self._split)
raw_dir = os.path.join(self._dataset_path, "raw")
os.makedirs(save_dir, exist_ok=True)
os.makedirs(sample_dir, exist_ok=True)
raw_info_dir = os.path.join(raw_dir, "info-06-04", "info")
raw_photo_dir = os.path.join(raw_dir, "rendered_256x256", "256x256", "photo", "tx_000000000000")
raw_sketch_dir = os.path.join(raw_dir, "rendered_256x256", "256x256", "sketch", "tx_000100000000")
raw_svg_dir = os.path.join(raw_dir, "sketches-06-04", "sketches")
logging.info("Processing Sketchy | png_dimensions: %s | padding: %s | epsilon %s | max_seq_len %s | shard_size %s | only_valid %s", png_dims, padding,
epsilon, max_seq_len, shard_size, only_valid)
# We sometimes desire a set of sketchy training examples that are class exclusive of anything in our downstream few-shot tasks.
# As our images are sourced from imagenet, this is an issue with datsets such as miniimagenet and tieredimagenet.
# While we are not currently testing on tieredimagenet, we should dedupe classes from miniimagenet.
# To do this, we identify all hyponyms of minimagenet test classes and remove any sketchy examples that are of imagenet IDs that
# belong in the hyponym sets of all minimagenet test classes.
# NOTE: This is deduped on a per-image basis, not a per-class in sketchy basis. Many sketchy classes contain multiple IDs,
# Some of which may be excluded, some of which may not.
exclusive_set = set()
for parent in exclusive_set_parents:
res = requests.get("http://www.image-net.org/api/text/wordnet.structure.hyponym?wnid={}&full=1".format(parent))
hyponyms = res.content.decode("utf-8").replace("\r\n", "").split("-")
exclusive_set.update(hyponyms)
imagenet_to_bbox = {}
with open(os.path.join(raw_info_dir, "stats.csv"), 'r', newline='\n') as statscsv:
reader = csv.reader(statscsv, delimiter=',')
next(reader)
for row in reader:
imagenet_id_plus_count, bbox, width_height = row[2], row[14:18], row[12:14]
if imagenet_id_plus_count not in imagenet_to_bbox:
# Note: BBOX is in BBox_x, BBox_y, BBox_width, BBox_height
imagenet_to_bbox[imagenet_id_plus_count] = [int(x) for x in bbox], [int(x) for x in width_height]
sketchy_imagenet_hyponyms = {}
skipped = {} # Keep track of skipped sets for logging purposes
class_list = [x.replace(" ", "_") for x in class_list]
all_examples = np.empty((0, 6))
for class_name in class_list:
logging.info("Loading Class: %s", class_name)
photo_folder = os.path.join(raw_photo_dir, class_name)
sketch_folder = os.path.join(raw_sketch_dir, class_name)
svg_folder = os.path.join(raw_svg_dir, class_name)
valid_sketches_for_class = open(os.path.join(svg_folder, "checked.txt")).read().splitlines()
invalid_sketches_for_class = set(open(os.path.join(svg_folder, "invalid.txt")).read().splitlines())
for photo_file in os.listdir(photo_folder):
imagenet_id_plus_count = photo_file.split(".")[0]
# Determine if the set of hyponyms of the miniimagenet dataset intersect with any hyponyms of this imagenet id.
# If so, there is class overlap and as such we will pass the image.
if exclusive_set:
imagenet_id = imagenet_id_plus_count.split("_")[0]
if imagenet_id not in sketchy_imagenet_hyponyms:
res = requests.get("http://www.image-net.org/api/text/wordnet.structure.hyponym?wnid={}&full=1".format(imagenet_id))
hyponyms = res.content.decode("utf-8").replace("\r\n", "").split("-")
sketchy_imagenet_hyponyms[imagenet_id] = set(hyponyms)
if not exclusive_set.isdisjoint(sketchy_imagenet_hyponyms[imagenet_id]):
if imagenet_id not in skipped:
skipped[imagenet_id] = 1
logging.info("Skipping ImageNet ID: %s | from sketchy class %s", imagenet_id, class_name)
else:
skipped[imagenet_id] += 1
continue
sketches_for_photo = list(filter(lambda x: photo_file[:-4] in x, valid_sketches_for_class))
valid_sketches_for_photo = list(filter(lambda sketch: sketch not in invalid_sketches_for_class, sketches_for_photo))
natural_image = Image.open(os.path.join(photo_folder, photo_file))
for valid_sketch in valid_sketches_for_photo:
valid_sketch_path = os.path.join(svg_folder, valid_sketch+".svg")
try:
svg = svg2paths(valid_sketch_path)[0]
except:
with open(valid_sketch_path, "r") as errorsvg:
val = errorsvg.read()
if "</svg>" not in val[-10:]:
with open(valid_sketch_path, "a") as errorsvg:
errorsvg.write("</svg>\n")
logging.info("fixed %s", valid_sketch_path)
else:
logging.info("still_broken %s", valid_sketch_path)
if only_valid:
continue
else:
svg = None
sketch_path = os.path.join(sketch_folder, valid_sketch + ".png")
x = np.array([[natural_image, sketch_path, svg, valid_sketch] + list(imagenet_to_bbox[imagenet_id_plus_count])])
all_examples = np.concatenate((all_examples, x))
np.random.shuffle(all_examples)
logging.info("Total Skipped: | %s", str(skipped))
logging.info("Beginning Processing | %s sketches | %s classes ",
all_examples.shape[0], len(class_list))
cpu_count = psutil.cpu_count(logical=False)
workers_per_cpu = 0.5
# First, parallelize our computation of stroke-three formats from our svg files
# with ProcessPoolExecutor(max_workers=int(cpu_count * workers_per_cpu)) as executor:
with ProcessPoolExecutor(max_workers=int(cpu_count * workers_per_cpu)) as executor:
out_iter = executor.map(svg_to_stroke_three,
(all_examples[i: i + shard_size, 2:3] for i in range(0, all_examples.shape[0], shard_size)),
repeat(epsilon),
repeat(flip_x),
repeat(flip_y))
try:
count = 0
for idx, data in enumerate(out_iter):
all_examples[idx * shard_size: (idx + 1) * shard_size, 2:3] = data
count += data.shape[0]
logging.info("Converted to stroke-three: %d/%d",
count,
all_examples.shape[0])
except Exception as e:
logging.info("SVGs Converted to stroke-three complete")
# Clean up garbage to save RAM
gc.collect()
normalizing_scale_factor = get_normalizing_scale_factor(all_examples[:, 2:3])
with ProcessPoolExecutor(max_workers=int(cpu_count * workers_per_cpu)) as executor:
out = executor.map(sketch_process,
(all_examples[i: i + shard_size] for i in range(0, all_examples.shape[0], shard_size)),
repeat(padding),
repeat(max_seq_len),
repeat(png_dims),
repeat(normalizing_scale_factor),
(os.path.join(save_dir, "{}.npz".format(i)) for i in range(ceil(all_examples.shape[0] // shard_size) + 1)),
(os.path.join(sample_dir, "{}".format(i)) for i in range(ceil(all_examples.shape[0] // shard_size) + 1)),
chunksize=1)
total_count = 0
last_time = time()
try:
for write_signal in out:
total_count += write_signal
curr_time = time()
logging.info("Processed Total: {:8}/{:8} | Time/Batch: {:8.2f} | Time/Image: {:8.8f}"
.format(total_count,
all_examples.shape[0],
(curr_time - last_time) / cpu_count,
(curr_time - last_time) / (cpu_count * shard_size)))
last_time = curr_time
except Exception as e:
logging.info("Processing Done")
raise e
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,391
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/datasets/fs_omniglot_vinyals.py
|
import os
import numpy as np
from PIL import Image
from absl import logging
from datasets.base import register_dataset, DatasetEpisodic
from util import string_to_strokes, apply_rdp, strokes_to_stroke_three, stroke_five_format, get_normalizing_scale_factor, \
scale_and_center_stroke_three, rasterize, stroke_three_format_centered, rotate_4
@register_dataset("fs_omniglot_vinyals")
class FSVinyalsOmniglotDataset(DatasetEpisodic):
def __init__(self, data_dir, params):
"""
Modification of the FS_Omniglot dataset, each example is augmented with 90, 180 and 270 degree rotations as unique classes in the
dataset.
:param data_dir:
:param params:
"""
super(FSVinyalsOmniglotDataset, self).__init__(data_dir, params)
self._mode = params.mode
self.way = params.way
self.shot = params.shot
if "augmentations" in params:
self._augmentations = params.augmentations
self._dataset_path = os.path.join(self._data_dir, "fs_omniglot_vinyals")
def load(self, repeat=True):
data_path = os.path.join(self._dataset_path, 'caches')
if not self._split:
self._split = self._split = ','.join(sorted(os.listdir(data_path)))
return self._load_episodic_or_batch(data_path, repeat)
def _filter_collections(self, files):
"""
:param files: class, stroke, raster
:return: y_strokes(ground_truth), y_strokes(teacher) x_image, class_names
"""
files = sorted(files)
return files[2], files[2], files[1], files[0]
def prepare(self, FLAGS, epsilon=2., png_dims=(28, 28), padding=None, max_seq_len=120):
"""
Processing function that converts the default data for ingestion by our model.
Includes augmentations rotations of each example.
:param FLAGS:
:param epsilon:
:param png_dims:
:param padding:
:param max_seq_len:
:return:
"""
padding = padding if padding else round(min(png_dims) / 10.0) * 2
save_dir = os.path.join(self._dataset_path, "caches")
sample_dir = os.path.join(self._dataset_path, "processing-samples")
raw_dir = os.path.join(self._dataset_path, "raw")
images_background = os.path.join(raw_dir, "images_background")
strokes_evaluation = os.path.join(raw_dir, "strokes_evaluation")
images_evaluation = os.path.join(raw_dir, "images_evaluation")
strokes_background = os.path.join(raw_dir, "strokes_background")
logging.info("Processing FSO | png_dimensions: %s | padding: %s", png_dims, padding)
to_save = []
all_sketches = []
for image_dir, stroke_dir in [(images_background, strokes_background), (images_evaluation, strokes_evaluation)]:
alphabet_list = os.listdir(image_dir)
for alphabet in alphabet_list:
character_list = os.listdir(os.path.join(image_dir, alphabet))
for character in character_list:
logging.info("Processing | Alphabet: %s | Character: %s", alphabet, character)
image_files = sorted(os.listdir(os.path.join(image_dir, alphabet, character)))
stroke_files = sorted(os.listdir(os.path.join(stroke_dir, alphabet, character)))
accumulates = [{"strokes": [], "rasterized_strokes": [], "class": []} for _ in range(4)]
for image_file, stroke_file in zip(image_files, stroke_files):
image: Image.Image = Image.open(os.path.join(image_dir, alphabet, character, image_file)).convert("RGB")
strokes_str = open(os.path.join(stroke_dir, alphabet, character, stroke_file)).read()
strokes = string_to_strokes(strokes_str)
strokes = apply_rdp(strokes, epsilon=epsilon)
stroke_three = strokes_to_stroke_three(strokes)
all_sketches.append([stroke_three[1:]])
# Centered and normalized strokes for training sequence
stroke_three_centered_and_scaled = scale_and_center_stroke_three(np.copy(stroke_three), png_dims, padding)
try:
stroke_five = stroke_five_format(stroke_three, max_seq_len)
except:
logging.info("Stroke limit exceeds %s for example: %s",
max_seq_len, os.path.join(stroke_dir, alphabet, character, stroke_file))
rasterized_strokes = rasterize(stroke_three_centered_and_scaled, png_dims)
class_name = alphabet + character[-2:]
aug_res = rotate_4(stroke_five, stroke_five, rasterized_strokes, class_name)
for idx, entry in enumerate(aug_res):
accumulates[idx]["class"].append(entry[3])
accumulates[idx]["rasterized_strokes"].append(np.array(entry[2], dtype=np.float32))
accumulates[idx]["strokes"].append(entry[0].astype(np.float32))
# Save Archive dir
for idx, entry in enumerate(accumulates):
if idx == 0:
fname = character + ".npz"
elif idx == 1:
fname = character + "-rot90.npz"
elif idx == 2:
fname = character + "-rot180.npz"
elif idx == 3:
fname = character + "-rot270.npz"
char_save_dir, char_save_path = os.path.join(save_dir, alphabet), os.path.join(save_dir, alphabet, fname)
os.makedirs(char_save_dir, exist_ok=True)
# Save results so we can compute and apply scaling factors.
to_save.append((char_save_path, entry, alphabet, fname))
scale_factor = get_normalizing_scale_factor(all_sketches)
for char_save_path, accumulate, alphabet, character in to_save:
# Apply scaling factor
for idx in range(len(accumulate['strokes'])):
accumulate['strokes'][idx][:, 0:2] /= scale_factor
# Sample random example
rand_idx = np.random.randint(0, len(accumulate["strokes"]) - 1)
im_raster = Image.fromarray(accumulate['rasterized_strokes'][rand_idx].astype('uint8'))
stroke_three_string = "\n".join([str(x) for x in stroke_three_format_centered(accumulate['strokes'][rand_idx])])
save_path = os.path.join(sample_dir, alphabet, character)
os.makedirs(save_path, exist_ok=True)
im_raster.save(os.path.join(save_path, str(rand_idx) + "_raster.png"))
with open(os.path.join(save_path, str(rand_idx) + "_strokes.txt"), 'w') as f:
f.write(stroke_three_string)
np.savez(char_save_path, **accumulate)
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,392
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/configs/classifier_configs.py
|
from configs.base import register_config
from util import HParams, ST2_classes, T1_classes, T2_classes
@register_config("classifier")
def classifier_default():
return HParams(
# ----- Model Parameters ----- #
class_list=None,
png_dims=32,
weights=None,
# ----- Training Parameters ----- #
lr=0.01,
lr_schedule={40000: 0.005, 80000: 0.001, 100000: 0.0001}
)
@register_config("classifier/T1")
def classifier_T1(hparam: HParams):
hparam.set_hparam("class_list", ",".join(T1_classes))
return hparam
@register_config("classifier/T2")
def classifier_T2(hparam: HParams):
hparam.set_hparam("class_list", ",".join(T2_classes))
return hparam
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,393
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/util/fs_omniglot_utils.py
|
import numpy as np
from rdp import rdp
def string_to_strokes(stroke_str, flip=True):
"""
Convert Omniglot string-format data to an absolute stroke-3 format.
:param stroke_str:
:param epsilon:
:param flip: flip (only use if omniglot)
:return:
"""
strokes_raw = [stroke.strip() for stroke in stroke_str.split('\n')]
strokes = []
for idx in range(len(strokes_raw)):
stroke_raw = strokes_raw[idx]
if stroke_raw == "START":
curr_stroke = []
elif stroke_raw == "BREAK":
# next_stroke = np.fromstring(strokes_raw[min(idx+1, len(strokes_raw))], dtype=float, sep=',')[:-1]
# prev_stroke = np.fromstring(strokes_raw[idx-1], dtype=float, sep=',')[:-1]
# if len(next_stroke) > 0 and (np.linalg.norm(next_stroke - prev_stroke) < epsilon).all():
# continue
if not curr_stroke:
continue
curr_stroke = np.array(curr_stroke)
if flip:
curr_stroke[:, 1] = -curr_stroke[:, 1]
strokes.append(curr_stroke)
curr_stroke = []
else:
xy = np.fromstring(stroke_raw, dtype=float, sep=',')[:2]
curr_stroke.append(xy)
return strokes
def apply_rdp(strokes, epsilon=1.5):
"""
Apply Ramer-Douglas-Peucker algorithm to an absolute position stroke-3 format.
:param strokes:
:param epsilon:
:return:
"""
return [rdp(stroke, epsilon=epsilon) for stroke in strokes]
def strokes_to_stroke_three(strokes):
"""
Convert strokes of constant point position to offset stroke-3 format for drawer
:param strokes:
:return:
"""
stroke_three = []
last_pt = np.array([0, 0])
for stroke_idx in range(len(strokes)):
stroke = strokes[stroke_idx]
for point_idx in range(len(stroke)):
if point_idx == len(stroke) - 1:
if len(stroke) == 1:
new_pt = np.concatenate((stroke[point_idx] - last_pt, [0]))
else:
new_pt = np.concatenate((stroke[point_idx] - last_pt, [1]))
else:
new_pt = np.concatenate((stroke[point_idx] - last_pt, [0]))
stroke_three.append(new_pt)
last_pt = stroke[point_idx]
# Agument the dot if the stroke is only a single
if len(stroke) == 1:
stroke_three.append(np.array([0.5, 0.5, 0]))
stroke_three.append(np.array([-0.5, -0.5, 1]))
return np.array(stroke_three)
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,394
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/models/subs/cells.py
|
import tensorflow as tf
class LayerNormLSTMCell(tf.keras.layers.AbstractRNNCell):
"""Layer-Norm, with Ortho Init. and Recurrent Dropout without Memory Loss.
https://arxiv.org/abs/1607.06450 - Layer Norm
https://arxiv.org/abs/1603.05118 - Recurrent Dropout without Memory Loss
"""
def __init__(self,
num_units,
forget_bias=1.0,
use_recurrent_dropout=False,
dropout_keep_prob=0.90,
**kwargs):
"""Initialize the Layer Norm LSTM cell.
Args:
num_units: int, The number of units in the LSTM cell.
forget_bias: float, The bias added to forget gates (default 1.0).
use_recurrent_dropout: Whether to use Recurrent Dropout (default False)
dropout_keep_prob: float, dropout keep probability (default 0.90)
"""
super(LayerNormLSTMCell, self).__init__(**kwargs)
self._num_units = num_units
self._forget_bias = forget_bias
self._use_recurrent_dropout = use_recurrent_dropout
self._dropout_keep_prob = dropout_keep_prob
ortho_initializer = tf.initializers.Orthogonal()
glorot_initializer = tf.initializers.GlorotNormal()
# Original implementation split this layer into w_xh and w_hh, however we anticipate that using ortho only may be better.
self._input_w_xh = tf.keras.layers.Dense(units=4 * self._num_units, use_bias=False, kernel_initializer=glorot_initializer)
self._hidden_w_hh = tf.keras.layers.Dense(units=4 * self._num_units, use_bias=False, kernel_initializer=ortho_initializer)
self._layer_norm = tf.keras.layers.LayerNormalization(axis=1)
@property
def output_size(self):
return self._num_units
@property
def state_size(self):
return self._num_units * 2
def call(self, x, state, timestep=0, scope=None):
hidden, cell_state = tf.split(state, 2, 1)
gates = tf.split(self._input_w_xh(x) + self._hidden_w_hh(hidden), 4, 1)
i, j, f, o = [self._layer_norm(gate) for gate in gates]
if self._use_recurrent_dropout:
g = tf.keras.layers.Dropout(tf.tanh(j), self._dropout_keep_prob)
else:
g = tf.tanh(j)
new_cell_state = cell_state * tf.sigmoid(f + self._forget_bias) + tf.sigmoid(i) * g
new_hidden_state = tf.tanh(self._layer_norm(new_cell_state)) + tf.sigmoid(o)
next_state = tf.concat((new_hidden_state, new_cell_state), 1)
return new_cell_state, next_state
class HyperLSTMCell(tf.keras.layers.AbstractRNNCell):
"""HyperLSTM with Ortho Init, Layer Norm, Recurrent Dropout, no Memory Loss.
https://arxiv.org/abs/1609.09106
http://blog.otoro.net/2016/09/28/hyper-networks/
"""
def __init__(self,
num_units,
forget_bias=1.0,
use_recurrent_dropout=False,
recurrent_dropout_prob=0.90,
use_layer_norm=True,
hyper_forget_bias=1.0,
hyper_num_units=256,
hyper_embedding_size=32,
hyper_use_recurrent_dropout=False,
**kwargs):
"""Initialize the Layer Norm HyperLSTM cell.
Args:
num_units: int, The number of units in the LSTM cell.
forget_bias: float, The bias added to forget gates (default 1.0).
use_recurrent_dropout: Whether to use Recurrent Dropout (default False)
recurrent_dropout_prob: float, dropout keep probability (default 0.90)
use_layer_norm: boolean. (default True)
Controls whether we use LayerNorm layers in main LSTM & HyperLSTM cell.
hyper_num_units: int, number of units in HyperLSTM cell.
(default is 128, recommend experimenting with 256 for larger tasks)
hyper_embedding_size: int, size of signals emitted from HyperLSTM cell.
(default is 16, recommend trying larger values for large datasets)
hyper_use_recurrent_dropout: boolean. (default False)
Controls whether HyperLSTM cell also uses recurrent dropout.
Recommend turning this on only if hyper_num_units becomes large (>= 512)
"""
super(HyperLSTMCell, self).__init__(**kwargs)
# ----- Parameters ----- #
self._num_units = num_units
self._forget_bias = forget_bias
self._use_recurrent_dropout = use_recurrent_dropout
self._dropout_keep_prob = recurrent_dropout_prob
self._use_layer_norm = use_layer_norm
self._hyper_forget_bias = hyper_forget_bias
self._hyper_num_units = hyper_num_units
self._hyper_embedding_size = hyper_embedding_size
self._hyper_use_recurrent_dropout = hyper_use_recurrent_dropout
self._total_num_units = self._num_units + self._hyper_num_units
# ----- Build Model Components ----- #
self._hyper_cell = LayerNormLSTMCell(num_units=self._hyper_num_units,
forget_bias=self._hyper_forget_bias,
use_recurrent_dropout=self._hyper_use_recurrent_dropout,
dropout_keep_prob=self._dropout_keep_prob)
ortho_initializer = tf.initializers.Orthogonal()
glorot_initializer = tf.initializers.GlorotNormal()
#
# Original implementation split this layer into w_xh and w_hh, however we anticipate that using ortho only may be better.
self._input_wxh = tf.keras.layers.Dense(units=4 * self._num_units, use_bias=False, kernel_initializer=glorot_initializer)
self._hidden_whh = tf.keras.layers.Dense(4 * self._num_units, use_bias=False, kernel_initializer=ortho_initializer)
self._bias = tf.Variable(tf.zeros(4 * self._num_units))
# recurrent batch norm init trick (https://arxiv.org/abs/1603.09025).
self._hnorm_input_ix = HyperNorm(num_units=self._num_units, embedding_size=self._hyper_embedding_size, use_bias=False)
self._hnorm_input_jx = HyperNorm(num_units=self._num_units, embedding_size=self._hyper_embedding_size, use_bias=False)
self._hnorm_input_fx = HyperNorm(num_units=self._num_units, embedding_size=self._hyper_embedding_size, use_bias=False)
self._hnorm_input_ox = HyperNorm(num_units=self._num_units, embedding_size=self._hyper_embedding_size, use_bias=False)
self._hnorm_hidden_ih = HyperNorm(num_units=self._num_units, embedding_size=self._hyper_embedding_size, use_bias=True)
self._hnorm_hidden_jh = HyperNorm(num_units=self._num_units, embedding_size=self._hyper_embedding_size, use_bias=True)
self._hnorm_hidden_fh = HyperNorm(num_units=self._num_units, embedding_size=self._hyper_embedding_size, use_bias=True)
self._hnorm_hidden_oh = HyperNorm(num_units=self._num_units, embedding_size=self._hyper_embedding_size, use_bias=True)
self._layer_norm = tf.keras.layers.LayerNormalization()
self._layer_norm2 = tf.keras.layers.LayerNormalization()
@property
def output_size(self):
return self._num_units
@property
def state_size(self):
return 2 * self._total_num_units
def call(self, input, state):
joint_h, joint_c = tf.split(state, 2, 1)
hidden = joint_h[:, 0:self._num_units]
cell = joint_c[:, 0:self._num_units]
hyper_state = tf.concat((joint_h[:, self._num_units:], joint_c[:, self._num_units:]),
axis=1)
hyper_input = tf.concat((input, hidden), 1)
hyper_output, hyper_new_state = self._hyper_cell(hyper_input, hyper_state)
x_w = self._input_wxh(input)
hidden_w = self._hidden_whh(hidden)
ix, jx, fx, ox = tf.split(hidden_w, 4, 1)
ix = self._hnorm_input_ix(hyper_output, ix)
jx = self._hnorm_input_jx(hyper_output, jx)
fx = self._hnorm_input_fx(hyper_output, fx)
ox = self._hnorm_input_ox(hyper_output, ox)
ih, jh, fh, oh = tf.split(x_w, 4, 1)
ih = self._hnorm_hidden_ih(hyper_output, ih)
jh = self._hnorm_hidden_jh(hyper_output, jh)
fh = self._hnorm_hidden_fh(hyper_output, fh)
oh = self._hnorm_hidden_oh(hyper_output, oh)
ib, jb, fb, ob = tf.split(self._bias, 4, 0)
i = ix + ih + ib
j = jx + jh + jb
f = fx + fh + fb
o = ox + oh + ob
concat = tf.concat((i, j, f, o), 1)
i, j, f, o = tf.split(self._layer_norm(concat), 4, 1)
if self._use_recurrent_dropout:
g = tf.nn.dropout(tf.tanh(j), self._dropout_keep_prob)
else:
g = tf.tanh(j)
new_cell_state = cell * tf.sigmoid(f + self._forget_bias) + tf.sigmoid(i) * g
new_hidden_state = tf.tanh(self._layer_norm2(new_cell_state)) * tf.sigmoid(o)
new_hyper_hidden, new_hyper_cell = tf.split(hyper_new_state, 2, 1)
new_hidden = tf.concat((new_hidden_state, new_hyper_hidden), 1)
new_cell = tf.concat((new_cell_state, new_hyper_cell), 1)
new_total_state = tf.concat((new_hidden, new_cell), 1)
return new_hidden_state, new_total_state
class HyperNorm(tf.keras.layers.Layer):
def __init__(self, num_units, embedding_size, use_bias, **kwargs):
super(HyperNorm, self).__init__(**kwargs)
self._num_units = num_units
self._embedding_size = embedding_size
self._use_bias = use_bias
# ----- Build Model ----- #
# recurrent batch norm init trick (https://arxiv.org/abs/1603.09025).
init_gamma = 0.10
zero_init = tf.initializers.Zeros()
const_init = tf.initializers.Constant(value=1.0)
gamma_init = tf.initializers.Constant(value=init_gamma/self._embedding_size)
self._zw = tf.keras.layers.Dense(self._embedding_size, activation=None, use_bias=True,
kernel_initializer=zero_init, bias_initializer=const_init)
self._alpha = tf.keras.layers.Dense(self._num_units, activation=None, use_bias=False,
kernel_initializer=gamma_init)
if self._use_bias:
gaussian_init = tf.initializers.RandomNormal(stddev=0.01)
self._zb = tf.keras.layers.Dense(self._embedding_size, activation=None, use_bias=False,
kernel_initializer=gaussian_init)
self._beta = tf.keras.layers.Dense(self._num_units, activation=None,
kernel_initializer=const_init)
def call(self, hyper_output, layer):
zw = self._zw(hyper_output)
alpha = self._alpha(zw)
result = alpha * layer
if self._use_bias:
result += self._beta(self._zb(hyper_output))
return result
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,395
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/models/base/__init__.py
|
from .model_base import *
from .models import *
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,396
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/models/base/models.py
|
MODELS = {}
def register_model(model_name):
"""
Decorator for registering a model class.
:param model_name:
:return:
"""
def decorator(f):
MODELS[model_name] = f
return f
return decorator
def get_model(model):
"""
Returns model class if registered.
:param model:
:return:
"""
if model in MODELS:
model = MODELS[model]
return model
else:
raise ValueError("Model not found: {}".format(model))
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,397
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/util/logging.py
|
import logging
def log_flags(flags):
logging.info("---------------------------------------------FLAGS---------------------------------------------")
flags_dict = flags.flag_values_dict()
for key in list(flags_dict.keys())[20:-5]:
logging.info("[%s]: %s", key, flags_dict[key])
logging.info("-----------------------------------------------------------------------------------------------")
def log_hparams(*args):
logging.info("-----------------------------------------------------------------------------------------------")
for hparams in args:
if not hparams:
continue
values = hparams.values()
for key in values:
logging.info("[%s]: %s", key, values[key])
logging.info("-----------------------------------------------------------------------------------------------")
def bar():
logging.info("-----------------------------------------------------------------------------------------------")
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,398
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/models/drawer.py
|
import os
import tensorflow as tf
import numpy as np
from PIL import Image
from absl import logging
from time import time
from models.base import TrainableModel, register_model
from models.subs.decoders import DecoderDefault
from models.subs.encoders import EncoderConv
from util import compute_pen_state_loss, compute_mdn_loss, process_write_out, compute_pixel_loss, strokes_to_image, \
bilinear_interpolate_4_vectors, stroke_three_format, scale_and_rasterize
from util.write_routines import parallel_writer_sketches
try:
import horovod.tensorflow as hvd
except:
hvd = None
@register_model('drawer')
class DrawerModel(TrainableModel):
def __init__(self, base_dir, model_id, params, training=True, ckpt=None):
"""
SketchEmbedding drawing model.
:param params:
:param name:
"""
# ----- Model Parameters ----- #
self._z_size = params.z_size
self._num_mixture = params.num_mixture
self._dec_rnn_size = params.rnn_output_size
self._rnn_model = params.rnn_cell
self._cell_configs = params.cell_configs
self._kl_tolerance = params.kl_tolerance
self._kl_weight = params.kl_weight
self._pixel_loss_weight_max = params.pixel_loss_weight_max
self._pixel_loss_weight_min = params.pixel_loss_weight_min
self._pixel_loss_weight_interval = params.pixel_loss_weight_interval
self._pixel_loss_step = params.pixel_loss_step
self._pixel_loss_weight = tf.Variable(self._pixel_loss_weight_min, trainable=False, dtype=tf.float32)
self._sigma_decay_init = params.sigma_init
self._sigma_decay_start = params.sigma_decay_start
self._sigma_decay_freq = params.sigma_decay_freq
self._sigma_decay_rate = params.sigma_decay_rate
self._sigma = tf.Variable(self._sigma_decay_init, trainable=False, dtype=tf.float32)
# ----- Training Parameters ----- #
self._lr = params.lr
self._lr_decay_freq = params.lr_decay_freq
self._lr_decay_rate = params.lr_decay_rate
self._gradient_cap = params.gradient_cap
# ----- Other Parameters ----- #
self._distributed = params.distributed
if self._distributed:
# Modify scheduling if distributing
self._pixel_loss_step = self._pixel_loss_step // hvd.size()
self._sigma_decay_freq = self._sigma_decay_freq // hvd.size()
self._lr_decay_freq = self._lr_decay_freq // hvd.size()
# ----- Init Model ----- #
super(DrawerModel, self).__init__(base_dir, model_id, training, ckpt=ckpt)
def _build_model(self):
self._decoder = DecoderDefault(self._dec_rnn_size, self._num_mixture, self._rnn_model, self._cell_configs)
self._encoder = EncoderConv(self._z_size, self._decoder.cell.state_size)
lr_init = self._lr if not self._distributed else self._lr * hvd.size()
lr = tf.keras.optimizers.schedules.ExponentialDecay(lr_init, self._lr_decay_freq, self._lr_decay_rate)
self._optimizer = tf.optimizers.Adam(learning_rate=lr, clipvalue=self._gradient_cap)
def _checkpoint_model(self):
ckpt = tf.train.Checkpoint(optimizer=self._optimizer,
encoder=self._encoder,
decoder=self._decoder)
self._ckpt_manager = tf.train.CheckpointManager(ckpt, self._checkpoint_dir, max_to_keep=None)
if self._ckpt:
filtered_ckpts = list(filter(lambda x: x.endswith("-" + str(self._ckpt)), self._ckpt_manager.checkpoints))
if len(filtered_ckpts) == 1:
logging.info("Checkpoint %s found. Restoring.", str(filtered_ckpts[0]))
status = ckpt.restore(filtered_ckpts[0])
if self.training:
status.assert_existing_objects_matched()
else:
status.expect_partial()
else:
logging.fatal("%s matching checkpoints found. Exiting.", len(filtered_ckpts))
elif self._ckpt_manager.latest_checkpoint:
logging.info("Restoring Checkpoint: %s", self._ckpt_manager.latest_checkpoint)
status = ckpt.restore(self._ckpt_manager.latest_checkpoint)
if self.training:
status.assert_existing_objects_matched()
else:
status.expect_partial()
def train(self, train_dataset, train_steps, print_freq, save_freq, eval_dataset=None, eval_freq=None):
if self._distributed:
train_steps = train_steps // hvd.size()
print_freq = print_freq // hvd.size()
save_freq = save_freq // hvd.size()
if eval_freq:
eval_freq = eval_freq // hvd.size()
if eval_dataset and not eval_freq:
eval_freq = save_freq
train_dataset, _ = train_dataset # The second element is a saveable file-based dataset, currently not used
train_iter = train_dataset.__iter__()
last_time = time()
start_time = last_time
first_step = True
for step in tf.range(self._optimizer.iterations + 1, tf.constant(train_steps + 1)):
y_sketch_gt, y_sketch_teacher, x_image = next(train_iter)[0:3]
total_loss, pen_loss, offset_loss, pixel_loss, kl_loss = self.train_step(y_sketch_gt, y_sketch_teacher, x_image, first_step)
try:
tf.debugging.check_numerics(total_loss, "NaN Loss found. Reverting to previous checkpoint.")
except:
self._ckpt = None
self._checkpoint_model() # Reload most recent checkpoint
raise
if (not self._distributed) or (hvd.rank() == 0):
if step and step % print_freq == 0:
curr_time = time()
logging.info("Step: %6d | Loss: %.5f | PenL: %.5f | OffsetL(%4.2f): %.5f | PixelL(%4.2f): %.4f | KLL: %.4f | LR: %.5f | time/step: %.4f | Total Time: %7d",
step * (1 if not self._distributed else hvd.size()), total_loss, pen_loss,
1 - self._pixel_loss_weight.numpy(), offset_loss,
self._pixel_loss_weight.numpy(), pixel_loss,
kl_loss, self._optimizer._decayed_lr('float32').numpy(),
(curr_time-last_time)/(print_freq * (1 if not self._distributed else hvd.size())), curr_time-start_time)
last_time = curr_time
with self._writer.as_default():
self._write_summaries(step, {"lr": self._optimizer._decayed_lr('float32'),
"train_loss": total_loss, "pen_loss": pen_loss,
"pixel_loss": pixel_loss, "offset_loss": offset_loss,
"pixel_loss_weight": self._pixel_loss_weight,
"weighted_pixel_loss": self._pixel_loss_weight.numpy() * pixel_loss,
"weighted_offset_loss": (1-self._pixel_loss_weight.numpy()) * offset_loss,
"sigma": self._sigma.numpy()})
if step and step % save_freq == 0:
self._ckpt_manager.save(step * (1 if not self._distributed else hvd.size()))
if eval_dataset and step and step % eval_freq == 0:
self.evaluate(step * (1 if not self._distributed else hvd.size()), eval_dataset)
def evaluate(self, step, eval_dataset):
eval_dataset, _ = eval_dataset
total_loss_mean, pen_loss_mean, offset_loss_mean, pixel_loss_mean, kl_loss_mean = (tf.keras.metrics.Mean(), tf.keras.metrics.Mean(),
tf.keras.metrics.Mean(), tf.keras.metrics.Mean(),
tf.keras.metrics.Mean())
eval_start_time = time()
for entry in eval_dataset.__iter__():
y_sketch_gt, y_sketch_teacher, x_image = entry[0:3]
params = self.forward(y_sketch_teacher, x_image, training=False, generation_length=y_sketch_gt.shape[1]-1)[:-1]
total_loss, pen_loss, offset_loss, pixel_loss, kl_loss = self.compute_loss(params, y_sketch_gt, x_image)
total_loss_mean(total_loss)
pen_loss_mean(pen_loss)
offset_loss_mean(offset_loss)
pixel_loss_mean(pixel_loss)
kl_loss_mean(kl_loss)
last_time = time()
eval_total, eval_pen, eval_offset, eval_pixel, eval_kl = (total_loss_mean.result(), pen_loss_mean.result(),
offset_loss_mean.result(), pixel_loss_mean.result(),
kl_loss_mean.result())
with self._writer.as_default():
self._write_summaries(step, {"eval_loss": eval_total, "eval_pen": eval_pen, "eval_offset": eval_offset,
"eval_pixel": eval_pixel, "eval_kl": eval_kl, "eval_pixel_pen": eval_pixel + eval_pen,
"eval_weighted_pixel": self._pixel_loss_weight.numpy() * pixel_loss,
"eval_weighted_offset": (1-self._pixel_loss_weight.numpy()) * offset_loss,
"eval_sigma": self._sigma.numpy()})
logging.info(
"Eval Done | Loss: %.5f | PenL: %.5f | OffsetL(%4.2f): %.5f | PixelL(%4.2f): %.4f | KLL: %.4f | Eval Time: %.4f",
eval_total, eval_pen,
1 - self._pixel_loss_weight.numpy(), eval_offset,
self._pixel_loss_weight.numpy(), eval_pixel,
eval_kl, last_time - eval_start_time)
def test(self, test_dataset, result_name, steps=None, generation_length=64, decodes=1):
logging.info("Beginning testing loop")
sampling_dir = os.path.join(self._sampling_dir, result_name)
test_dataset, _ = test_dataset
# Begin Writing Child-Process
process, write_queue = process_write_out(parallel_writer_sketches, (sampling_dir,))
try:
for step, entry in enumerate(test_dataset):
if step == steps:
break
if len(entry) == 2:
x_image, class_names = entry
y_sketch_gt, y_sketch_teacher = (tf.tile(tf.constant([[[0., 0., 1., 0., 0.]]]), (x_image.shape[0], 2, 1)),
tf.tile(tf.constant([[[0., 0., 1., 0., 0.]]]), (x_image.shape[0], 2, 1)))
else:
y_sketch_gt, y_sketch_teacher, x_image, class_names = entry[0:4]
if decodes == 1:
output = self.forward(y_sketch_teacher, x_image, training=False, generation_length=generation_length)[-1]
np_images, np_sketch, np_classes, np_prediction = (x_image.numpy(),
y_sketch_gt.numpy(), class_names.numpy(), output.numpy())
for idx in range(x_image.shape[0]):
write_queue.put({"rasterized_images": np_images[idx],
"stroke_five_sketches": np_sketch[idx],
"class_names": np_classes[idx],
"stroke_predictions": np_prediction[idx]})
else:
z, _, _ = self.embed(x_image, training=False)
for _ in range(decodes):
_, output = self.decode(z, training=False, generation_length=generation_length)
np_images, np_sketch, np_classes, np_prediction = (x_image.numpy(),
y_sketch_gt.numpy(), class_names.numpy(), output.numpy())
for idx in range(0, x_image.shape[0]):
write_queue.put({"rasterized_images": np_images[idx],
"stroke_five_sketches": np_sketch[idx],
"class_names": np_classes[idx],
"stroke_predictions": np_prediction[idx]})
write_queue.put(None)
except:
process.terminate()
raise
process.join()
logging.info("Testing complete")
@tf.function
def train_step(self, y_sketch_gt, y_sketch_teacher, x_image, first_step):
with tf.GradientTape() as tape:
params = self.forward(y_sketch_teacher, x_image, training=True)[:-1]
total_loss, pen_loss, offset_loss, pixel_loss, kl_loss = self.compute_loss(params, y_sketch_gt, x_image)
if self._distributed:
tape = hvd.DistributedGradientTape(tape)
grads = tape.gradient(total_loss, self._encoder.trainable_variables + self._decoder.trainable_variables)
self._optimizer.apply_gradients(zip(grads, self._encoder.trainable_variables + self._decoder.trainable_variables))
if self._distributed and first_step:
hvd.broadcast_variables(self._encoder.trainable_variables + self._decoder.trainable_variables, root_rank=0)
hvd.broadcast_variables(self._optimizer.variables(), root_rank=0)
return total_loss, pen_loss, offset_loss, pixel_loss, kl_loss
@tf.function
def compute_loss(self, params, stroke_gt, image_gt):
step = tf.cast(self._optimizer.iterations + 1, tf.float32)
self._pixel_loss_weight.assign(tf.minimum(self._pixel_loss_weight_max,
self._pixel_loss_weight_min +
tf.floor(tf.math.divide_no_nan(step, self._pixel_loss_step)) *
self._pixel_loss_weight_interval))
self._sigma.assign(self._sigma_decay_init * tf.pow(self._sigma_decay_rate,
tf.floor(tf.maximum(0., step - self._sigma_decay_start) /
self._sigma_decay_freq)))
output_params, latent_mu, latent_logvar = params
pi, mu1, mu2, sigma1, sigma2, rho, pen, pen_logits = output_params
# Extract parameters
stroke_gt_cut = stroke_gt[:, 1:, :]
x1_gt, x2_gt, eos_data, eoc_data, cont_data = tf.split(tf.reshape(stroke_gt_cut, (-1, 5)), 5, 1)
pen_gt = tf.concat([eos_data, eoc_data, cont_data], 1)
batch_size = tf.shape(image_gt)[0]
pixel_dims = tf.shape(image_gt)[1:3]
# Compute losses
pen_state_loss_batched = compute_pen_state_loss(pen_logits, pen_gt)
mdn_loss_batched = compute_mdn_loss(pi, mu1, mu2, sigma1, sigma2, rho, x1_gt, x2_gt, pen_gt)
pixel_loss_batched = compute_pixel_loss(pi, mu1, mu2, sigma1, sigma2, rho, pen, stroke_gt, batch_size, pixel_dims)
# reduce loss vectors and compute total loss
pen_state_loss = tf.reduce_mean(pen_state_loss_batched)
mdn_loss = tf.reduce_mean(tf.boolean_mask(mdn_loss_batched, tf.math.is_finite(mdn_loss_batched)))
pixel_loss = tf.reduce_mean(pixel_loss_batched)
reconstruction_loss = (pen_state_loss +
(mdn_loss * (1 - self._pixel_loss_weight)) +
((pixel_loss * self._pixel_loss_weight) if self._pixel_loss_weight > 0.1 else 0.))
# Only compute KL Loss if it is being used.
if self._kl_weight > 0.:
kl_loss = -0.5 * tf.reduce_mean(1 + latent_logvar - tf.square(latent_mu) - tf.exp(latent_logvar))
reconstruction_loss += (tf.maximum(kl_loss, self._kl_tolerance) - self._kl_tolerance) * self._kl_weight
else:
kl_loss = 0.
return reconstruction_loss, pen_state_loss, mdn_loss, pixel_loss, kl_loss
@tf.function
def embed(self, x_image, training=False):
"""
Embeds the given image
:param x_image:
:return: z, mu, logvar
"""
z, _, mu, logvar = self._encoder(x_image, training=training)
return z, mu, logvar
@tf.function
def forward(self, y_sketch, x_image, training, generation_length=0):
input_sketch = y_sketch[:, :-1, :]
if training:
generation_length = input_sketch.shape[1]
z, init_cell_state, mu, logvar = self._encoder(x_image, training)
if training:
params = self._decoder((input_sketch, z, init_cell_state, generation_length), training)
strokes = None
else:
params, strokes = self._decoder((input_sketch, z, init_cell_state, generation_length), training)
return params, mu, logvar, strokes
@tf.function
def decode(self, z, training, generation_length=0, with_hyper_states=False):
init_cell_state = self._encoder._cell_init_state(z)
input_sketch = tf.tile(tf.constant([[[0., 0., 1., 0., 0.]]]), (z.shape[0], 1, 1))
if training:
params = self._decoder((input_sketch, z, init_cell_state, generation_length), training)
return params
else:
if not with_hyper_states:
params, strokes = self._decoder((input_sketch, z, init_cell_state, generation_length), training)
return params, strokes
else:
params, strokes, hyper_states = self._decoder.call_with_hyper_states((input_sketch, z, init_cell_state, generation_length),
training)
return params, strokes, hyper_states
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,399
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/models/vae_enc_block.py
|
from models import register_model, VAE
from models.subs.conv_block import BlockConv, BlockConvReverse
import tensorflow as tf
try:
import horovod.tensorflow as hvd
except:
hvd = None
@register_model('vae_enc_block')
class VAEEncBlock(VAE):
def __init__(self, base_dir, model_id, params, training=True):
"""
Extension of the VAE using a Conv4 backbone
:param base_dir:
:param model_id:
:param params:
:param training:
"""
# ----- Init Model ----- #
super(VAEEncBlock, self).__init__(base_dir, model_id, params, training)
def _build_model(self):
self._conv_encoder = tf.keras.Sequential([
BlockConv(64),
BlockConv(64),
BlockConv(64),
BlockConv(64),
tf.keras.layers.GlobalAveragePooling2D()
])
self._mu = tf.keras.layers.Dense(self._latent_size)
self._var = tf.keras.layers.Dense(self._latent_size)
self._conv_decoder = tf.keras.Sequential([
tf.keras.layers.UpSampling2D((self._png_dim//16, self._png_dim//16)),
BlockConvReverse(64),
BlockConvReverse(64),
BlockConvReverse(64),
BlockConvReverse(1 if self._grayscale else 3)
])
lr = tf.keras.optimizers.schedules.ExponentialDecay(self._lr, self._lr_decay_step, self._lr_decay_rate)
self._optimizer = tf.optimizers.Adam(learning_rate=lr)
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,400
|
alexnwang/SketchEmbedNet-public
|
refs/heads/master
|
/models/subs/conv_block.py
|
import tensorflow as tf
class ResNet12Block(tf.keras.layers.Layer):
def __init__(self, num_filters, add_relu=True):
"""
ResNet12 Block from https://arxiv.org/abs/1805.10123
Swapped swish activation for relu.
:param num_filters:
:param add_relu:
"""
super(ResNet12Block, self).__init__()
self._num_filters = num_filters
self._add_relu = add_relu
def build(self, input_shape):
self._shortcut_conv = tf.keras.layers.Convolution2D(filters=self._num_filters, kernel_size=1, strides=1, padding="SAME")
self._shortcut_bn = tf.keras.layers.BatchNormalization()
self._conv1 = tf.keras.layers.Convolution2D(filters=self._num_filters, kernel_size=3, strides=1, padding="SAME")
self._bn1 = tf.keras.layers.BatchNormalization()
self._conv2 = tf.keras.layers.Convolution2D(filters=self._num_filters, kernel_size=3, strides=1, padding="SAME")
self._bn2 = tf.keras.layers.BatchNormalization()
self._conv3 = tf.keras.layers.Convolution2D(filters=self._num_filters, kernel_size=3, strides=1, padding="SAME")
self._bn3 = tf.keras.layers.BatchNormalization()
self._maxpool = tf.keras.layers.MaxPool2D(pool_size=2, strides=2, padding="SAME")
def call(self, inputs, training=None, **kwargs):
shortcut = self._shortcut_conv(inputs)
shortcut = self._shortcut_bn(shortcut, training=training)
x = self._conv1(inputs)
x = self._bn1(x, training=training)
x = tf.keras.activations.relu(x)
x = self._conv2(x)
x = self._bn2(x, training=training)
x = tf.keras.activations.relu(x)
x = self._conv3(x)
x = self._bn3(x, training=training)
x = tf.keras.activations.relu(x)
x = x + shortcut
x = self._maxpool(x)
if self._add_relu:
x = tf.keras.activations.relu(x)
return x
class ResNet12BlockReverse(tf.keras.layers.Layer):
def __init__(self, num_filters, add_relu=True):
"""
Inverse function of ResNet12 block for decoding.
:param num_filters:
:param add_relu:
"""
super(ResNet12BlockReverse, self).__init__()
self._num_filters = num_filters
self._add_relu = add_relu
def build(self, input_shape):
self._shortcut_conv = tf.keras.layers.Conv2DTranspose(filters=self._num_filters, kernel_size=1, strides=1, padding="SAME")
self._shortcut_bn = tf.keras.layers.BatchNormalization()
self._conv1 = tf.keras.layers.Conv2DTranspose(filters=self._num_filters, kernel_size=3, strides=1, padding="SAME")
self._bn1 = tf.keras.layers.BatchNormalization()
self._conv2 = tf.keras.layers.Conv2DTranspose(filters=self._num_filters, kernel_size=3, strides=1, padding="SAME")
self._bn2 = tf.keras.layers.BatchNormalization()
self._conv3 = tf.keras.layers.Conv2DTranspose(filters=self._num_filters, kernel_size=3, strides=1, padding="SAME")
self._bn3 = tf.keras.layers.BatchNormalization()
self._upsample = tf.keras.layers.UpSampling2D(size=2)
def call(self, inputs, training=None, **kwargs):
shortcut = self._shortcut_conv(inputs)
shortcut = self._shortcut_bn(shortcut, training=training)
x = self._conv1(inputs)
x = self._bn1(x, training=training)
x = tf.keras.activations.relu(x)
x = self._conv2(x)
x = self._bn2(x, training=training)
x = tf.keras.activations.relu(x)
x = self._conv3(x)
x = self._bn3(x, training=training)
x = tf.keras.activations.relu(x)
x = x + shortcut
x = self._upsample(x)
if self._add_relu:
x = tf.keras.activations.relu(x)
return x
class BlockConv(tf.keras.layers.Layer):
def __init__(self, num_filters):
"""
Single block of common Conv4 architecture from https://arxiv.org/abs/1703.03400.
:param num_filters:
"""
super(BlockConv, self).__init__()
self._num_filters = num_filters
def build(self, input_shape):
self._conv1 = tf.keras.layers.Conv2D(self._num_filters, kernel_size=3, strides=1, padding="SAME")
self._bnorm1 = tf.keras.layers.BatchNormalization()
self._pool1 = tf.keras.layers.MaxPool2D() # No actual trainable weights
def call(self, inputs, training=None, **kwargs):
x = self._conv1(inputs)
x = self._bnorm1(x, training=training)
x = tf.keras.activations.relu(x)
x = self._pool1(x)
return x
class BlockConvReverse(tf.keras.layers.Layer):
def __init__(self, num_filters):
"""
Reverse, decoding block for the BlockConv layer.
:param num_filters:
"""
super(BlockConvReverse, self).__init__()
self._num_filters = num_filters
def build(self, input_shape):
self._conv = tf.keras.layers.Conv2DTranspose(self._num_filters, kernel_size=3, strides=1, padding="SAME")
self._bnorm = tf.keras.layers.BatchNormalization()
self._upsample = tf.keras.layers.UpSampling2D(size=2)
def call(self, inputs, training=None, **kwargs):
x = self._conv(inputs)
x = self._bnorm(x, training=training)
x = tf.keras.activations.relu(x)
x = self._upsample(x)
return x
|
{"/run_hyper_embedding_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/util/utils.py": ["/util/quickdraw_utils.py"], "/configs/sketchy_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/datasets/base/__init__.py": ["/datasets/base/dataset_base.py", "/datasets/base/dataset_episodic.py", "/datasets/base/datasets.py"], "/configs/quickdraw_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/configs/vae_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/configs/drawer_configs.py": ["/util/__init__.py", "/configs/base/__init__.py"], "/models/lr_fs.py": ["/models/__init__.py"], "/util/__init__.py": ["/util/logging.py", "/util/utils.py", "/util/quickdraw_utils.py", "/util/fs_omniglot_utils.py", "/util/sketchy_utils.py", "/util/drawer_utils.py", "/util/write_routines.py", "/util/augmentations.py"], "/configs/base/__init__.py": ["/configs/base/configs.py"], "/util/sketchy_utils.py": ["/util/__init__.py"], "/run_compositionality_exp.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/vae.py": ["/models/base/__init__.py", "/models/subs/conv_block.py", "/util/__init__.py", "/util/write_routines.py"], "/datasets/quickdraw.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/miniimagenet_configs.py": ["/configs/__init__.py", "/util/__init__.py"], "/models/subs/decoders.py": ["/models/subs/cells.py", "/util/__init__.py"], "/run_experiment.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/configs/__init__.py": ["/configs/drawer_configs.py", "/configs/vae_configs.py", "/configs/classifier_configs.py", "/configs/quickdraw_configs.py", "/configs/sketchy_configs.py", "/configs/miniimagenet_configs.py", "/configs/base/__init__.py"], "/datasets/__init__.py": ["/datasets/quickdraw.py", "/datasets/fs_omniglot_vinyals.py", "/datasets/sketchy.py", "/datasets/miniimagenet.py", "/datasets/base/__init__.py"], "/util/drawer_utils.py": ["/util/utils.py"], "/models/drawer_enc_block.py": ["/models/__init__.py", "/models/subs/encoders.py"], "/models/subs/encoders.py": ["/models/subs/conv_block.py"], "/prepare_data.py": ["/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/run_full_eval.py": ["/models/__init__.py", "/configs/__init__.py", "/datasets/__init__.py", "/util/__init__.py"], "/models/__init__.py": ["/models/drawer.py", "/models/drawer_enc_block.py", "/models/classifier.py", "/models/vae.py", "/models/vae_enc_block.py", "/models/base/__init__.py", "/models/lr_fs.py"], "/datasets/miniimagenet.py": ["/datasets/__init__.py"], "/util/write_routines.py": ["/util/__init__.py"], "/models/classifier.py": ["/models/drawer.py", "/models/vae.py", "/models/base/__init__.py", "/util/__init__.py"], "/datasets/base/dataset_episodic.py": ["/datasets/base/dataset_base.py"], "/datasets/sketchy.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/datasets/fs_omniglot_vinyals.py": ["/datasets/base/__init__.py", "/util/__init__.py"], "/configs/classifier_configs.py": ["/configs/base/__init__.py", "/util/__init__.py"], "/models/base/__init__.py": ["/models/base/model_base.py", "/models/base/models.py"], "/models/drawer.py": ["/models/base/__init__.py", "/models/subs/decoders.py", "/models/subs/encoders.py", "/util/__init__.py", "/util/write_routines.py"], "/models/vae_enc_block.py": ["/models/__init__.py", "/models/subs/conv_block.py"]}
|
38,406
|
borovskiy/defining_form_filling
|
refs/heads/main
|
/app.py
|
from validators import list_name_template
from flask import Flask, request
from tinydb import TinyDB
app = Flask(__name__)
db = TinyDB('base.db')
@app.route('/get_form', methods=['POST'])
def index():
if request.method == 'POST':
if request.args:
list_name = list_name_template(db=db.all(), request_args=request.args)
return list_name
return 'None'
if __name__ == '__main__':
app.run(debug=True)
|
{"/app.py": ["/validators.py"], "/test_requests.py": ["/app.py"]}
|
38,407
|
borovskiy/defining_form_filling
|
refs/heads/main
|
/test_requests.py
|
from app import app
import unittest
class BasicTestCase(unittest.TestCase):
def setUp(self) -> None:
self.url = 'http://127.0.0.1:5000/get_form?'
def test_no_valid_response(self):
tester = app.test_client(self)
response = tester.post(self.url + 'f_name1=va@mail.ru&f_name2=+79503886510', content_type='html/text')
self.assertEqual(response.json, None)
def test_valid_response(self):
tester = app.test_client(self)
response = tester.post(self.url + 'f_name1=string&f_name2=+79503886510', content_type='html/text')
self.assertEqual(type(response.json), dict)
if __name__ == '__main__':
unittest.main()
|
{"/app.py": ["/validators.py"], "/test_requests.py": ["/app.py"]}
|
38,408
|
borovskiy/defining_form_filling
|
refs/heads/main
|
/validators.py
|
import datetime
def telephone_check(enter_string_data: str) -> bool:
"""функция проверки числа на принадлежность к номеру телефона"""
if enter_string_data[0] == '8':
truncated_number = enter_string_data[1:]
elif enter_string_data[0:2].strip() in ['7', '+7']:
truncated_number = enter_string_data[1:]
else:
return False
if truncated_number.isdigit():
return True
else:
return False
def validate_email(string: str) -> bool:
string_after_point = string.split('.')[-1]
string_after_dog = string.split('@')[-1].replace('.' + string_after_point, '')
if string_after_point.isalpha() and string_after_dog.isalpha():
name_email = string.split('@')[0]
if name_email.find('._-') == -1 and name_email[-1] not in ['.', '-', '_', ] and name_email[0] not in ['.', '-',
'_', ]:
return True
return False
def date_check(enter_string_date: str) -> bool:
"""Функция проверки даты на валидность"""
try:
if len(enter_string_date) == 10 and enter_string_date.count('.') == 2:
if enter_string_date[:6].count('.') == 2:
datetime.datetime.strptime(enter_string_date, '%d.%m.%Y')
return True
elif enter_string_date[:6].count('.') == 1:
datetime.datetime.strptime(enter_string_date, '%Y.%m.%d')
return True
else:
return False
except ValueError:
return False
def check_data(value_data: str) -> str:
if date_check(value_data):
return 'date'
elif telephone_check(value_data):
return 'phone'
elif '.' and '@' in value_data and validate_email(value_data):
return 'email'
else:
return 'string'
def counting_field_types(dict_request_args):
"""Функция подсчета типов полей"""
dict_count_data = {'string': 0, 'email': 0, 'phone': 0, 'date': 0, }
for key, value in dict(dict_request_args).items():
result = check_data(value)
if result == "email":
dict_count_data['email'] += 1
elif result == "phone":
dict_count_data['phone'] += 1
elif result == "string":
dict_count_data['string'] += 1
elif result == "date":
dict_count_data['date'] += 1
return dict_count_data
def list_name_template(db, request_args) -> dict or str:
"""Функция создания словаря в зависимости от обработки данных"""
request_args = dict(request_args)
dict_name_templates = {}
dict_data_post_fields = counting_field_types(request_args)
for template in db:
email_count_in_db = list(template.values()).count('email')
phone_count_in_db = list(template.values()).count('phone')
date_count__in_db = list(template.values()).count('date')
string_count_in_db = list(template.values()).count('string')
if dict_data_post_fields['email'] >= email_count_in_db and \
dict_data_post_fields['phone'] >= phone_count_in_db and \
dict_data_post_fields['date'] >= date_count__in_db and \
dict_data_post_fields['string'] >= string_count_in_db:
dict_name_templates.update({template['name']: template})
if len(dict_name_templates) <= 0:
dict_name_templates = request_args
for key, value in dict_name_templates.items():
dict_name_templates.update({key: {value: check_data(value)}})
return dict_name_templates
else:
return max_len_template(dict_name_templates)
def max_len_template(dicts: dict) -> str:
"""Функция для отбора самой большой из совпавших форм"""
max_len_templates = {}
for values in dicts.values():
if len(values) > len(max_len_templates):
max_len_templates = values
return max_len_templates['name']
|
{"/app.py": ["/validators.py"], "/test_requests.py": ["/app.py"]}
|
38,410
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/path_history.py
|
import logging
import weakref
import simuvex
import claripy
l = logging.getLogger("angr.path_history")
class PathHistory(object):
"""
This class keeps track of historically-relevant information for paths.
"""
__slots__ = ('_parent', 'merged_from', 'merge_conditions', 'length', 'extra_length', '_addrs', '_runstr', '_target', '_guard', '_jumpkind', '_events', '_jump_source', '_jump_avoidable', '_all_constraints', '_fresh_constraints', '_satisfiable', '_state_ref', '__weakref__')
def __init__(self, parent=None):
self._parent = parent
self.merged_from = [ ]
self.merge_conditions = [ ]
self._runstr = None
self._target = None
self._jump_source = None
self._jump_avoidable = None
self._guard = None
self._jumpkind = None
self._events = None
self._addrs = ()
self.length = 0 if parent is None else parent.length + 1
self.extra_length = 0 if parent is None else parent.extra_length
# satness stuff
self._all_constraints = ()
self._fresh_constraints = ()
self._satisfiable = None
# the state itself
self._state_ref = None
def copy(self):
c = PathHistory()
c._parent = self._parent
c.merged_from = list(self.merged_from)
c.merge_conditions = list(self.merge_conditions)
c._runstr = self._runstr
c._target = self._target
c._jump_source = self._jump_source
c._jump_avoidable = self._jump_avoidable
c._guard = self._guard
c._jumpkind = self._jumpkind
c._events = self._events
c._addrs = self._addrs
c.length = self.length
c.extra_length = self.extra_length
c._all_constraints = list(self._all_constraints)
c._fresh_constraints = list(self._fresh_constraints)
c._satisfiable = self._satisfiable
c._state_ref = self._state_ref
return c
def __getstate__(self):
return [
(k, getattr(self, k)) for k in self.__slots__ if k not in
('__weakref__', '_state_ref')
]
def __setstate__(self, state):
for k,v in state:
setattr(self, k, v)
self._state_ref = None
def _record_state(self, state):
self._jumpkind = state.scratch.jumpkind
self._jump_source = state.scratch.source
self._jump_avoidable = state.scratch.avoidable
self._target = state.scratch.target
self._guard = state.scratch.guard
if state.scratch.bbl_addr_list is not None:
self._addrs = state.scratch.bbl_addr_list
elif state.scratch.bbl_addr is not None:
self._addrs = [ state.scratch.bbl_addr ]
else:
# state.scratch.bbl_addr may not be initialized as final states from the "flat_successors" list. We need to get
# the value from _target in that case.
if self.addr is None and not self._target.symbolic:
self._addrs = [ self._target._model_concrete.value ]
else:
# FIXME: redesign so this does not happen
l.warning("Encountered a path to a SimProcedure with a symbolic target address.")
if simuvex.o.UNICORN in state.options:
self.extra_length += state.scratch.executed_block_count - 1
if simuvex.o.TRACK_ACTION_HISTORY in state.options:
self._events = state.log.events
# record constraints, added constraints, and satisfiability
self._all_constraints = state.se.constraints
self._fresh_constraints = [
ev.constraint.ast for ev in state.log.events if isinstance(ev, simuvex.SimActionConstraint)
]
if isinstance(state.se._solver, claripy.frontend_mixins.SatCacheMixin):
self._satisfiable = state.se._solver._cached_satness
else:
self._satisfiable = None
# record the state as a weak reference
self._state_ref = weakref.ref(state)
def _record_run(self, run):
self._runstr = str(run)
@property
def state(self):
return self._state_ref() if self._state_ref is not None else None
#
# Some GC-dependent pass-throughts to the state
#
@property
def events(self):
if self._events is not None:
return self._events
elif self.state is not None:
return self.state.log.events
else:
return ()
def reachable(self):
if self._satisfiable is not None:
pass
elif self.state is not None:
self._satisfiable = self.state.se.satisfiable()
else:
solver = claripy.Solver()
solver.add(self._all_constraints)
self._satisfiable = solver.satisfiable()
return self._satisfiable
@property
def actions(self):
return [ ev for ev in self.events if isinstance(ev, simuvex.SimAction) ]
@property
def addr(self):
return self._addrs[0]
@addr.setter
def addr(self, v):
self._addrs = [ v ]
def closest_common_ancestor(self, other):
"""
Find the common ancestor between this PathHistory and 'other'.
:param other: the PathHistory to find a common ancestor with.
:return: the common ancestor PathHistory, or None if there isn't one
"""
our_history_iter = reversed(HistoryIter(self))
their_history_iter = reversed(HistoryIter(other))
sofar = set()
while True:
our_done = False
their_done = False
try:
our_next = next(our_history_iter)
if our_next in sofar:
# we found it!
return our_next
sofar.add(our_next)
except StopIteration:
# we ran out of items during iteration
our_done = True
try:
their_next = next(their_history_iter)
if their_next in sofar:
# we found it!
return their_next
sofar.add(their_next)
except StopIteration:
# we ran out of items during iteration
their_done = True
# if we ran out of both lists, there's no common ancestor
if our_done and their_done:
return None
def constraints_since(self, other):
"""
Returns the constraints that have been accumulated since `other`.
:param other: a prior PathHistory object
:returns: a list of constraints
"""
constraints = [ ]
cur = self
while cur is not other and cur is not None:
constraints.extend(cur._fresh_constraints)
cur = cur._parent
return constraints
class TreeIter(object):
def __init__(self, start, end=None):
self._start = start
self._end = end
def _iter_nodes(self):
n = self._start
while n is not self._end:
yield n
n = n._parent
def __iter__(self):
for i in self.hardcopy:
yield i
def __reversed__(self):
raise NotImplementedError("Why are you using this class")
@property
def hardcopy(self):
# lmao
return list(reversed(tuple(reversed(self))))
def __len__(self):
return len(self.hardcopy)
def __getitem__(self, k):
if isinstance(k, slice):
raise ValueError("Please use .hardcopy to use slices")
if k >= 0:
raise ValueError("Please use .hardcopy to use nonnegative indexes")
i = 0
for item in reversed(self):
i -= 1
if i == k:
return item
raise IndexError(k)
def count(self, v):
"""
Count occurrences of value v in the entire history. Note that the subclass must implement the __reversed__
method, otherwise an exception will be thrown.
:param object v: The value to look for
:return: The number of occurrences
:rtype: int
"""
ctr = 0
for item in reversed(self):
if item == v:
ctr += 1
return ctr
class HistoryIter(TreeIter):
def __reversed__(self):
for hist in self._iter_nodes():
yield hist
class AddrIter(TreeIter):
def __reversed__(self):
for hist in self._iter_nodes():
for a in reversed(hist._addrs):
yield a
class RunstrIter(TreeIter):
def __reversed__(self):
for hist in self._iter_nodes():
if hist._runstr is not None:
yield hist._runstr
class TargetIter(TreeIter):
def __reversed__(self):
for hist in self._iter_nodes():
if hist._target is not None:
yield hist._target
class GuardIter(TreeIter):
def __reversed__(self):
for hist in self._iter_nodes():
if hist._guard is not None:
yield hist._guard
class JumpkindIter(TreeIter):
def __reversed__(self):
for hist in self._iter_nodes():
if hist._jumpkind is not None:
yield hist._jumpkind
class EventIter(TreeIter):
def __reversed__(self):
for hist in self._iter_nodes():
for ev in reversed(hist.events):
yield ev
class ActionIter(TreeIter):
def __reversed__(self):
for hist in self._iter_nodes():
for ev in reversed(hist.actions):
yield ev
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,411
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/tablespecs.py
|
import claripy
class StringSpec(object):
def __new__(cls, string=None, sym_length=None, concat=None, name=None, nonnull=False):
print 'StringSpec is deprecated! Please use raw claripy ASTs or else face my wrath.'
if nonnull:
print 'Additional deprecation warning: nonnull completely doesn\'t work in our hacked up support shint for StringSpec. Please just fix your code. Please.'
if string is not None:
return StringTableSpec.string_to_ast(string)
if sym_length is not None:
if name is None:
name = 'stringspec_sym_%d' % sym_length
return claripy.BVS(name, sym_length * 8)
if concat is not None:
return claripy.Concat(*concat)
class StringTableSpec(object):
@staticmethod
def string_to_ast(string):
return claripy.Concat(*(claripy.BVV(ord(c), 8) for c in string))
def __init__(self):
self._contents = []
self._str_len = 0
def add_string(self, string):
if isinstance(string, str):
self._contents.append(('string', self.string_to_ast(string+'\0')))
self._str_len += len(string) + 1
elif isinstance(string, claripy.ast.Bits):
self._contents.append(('string', string.concat(claripy.BVV(0, 8))))
self._str_len += len(string) / 8 + 1
else:
raise ValueError('String must be either string literal or claripy AST')
def add_pointer(self, pointer):
self._contents.append(('pointer', pointer))
def add_null(self):
self.add_pointer(0)
def dump(self, state, end_addr, align=0x10):
if isinstance(end_addr, (int, long)):
end_addr = state.se.BVV(end_addr, state.arch.bits)
ptr_size = len(self._contents) * state.arch.bytes
size = self._str_len + ptr_size
start_addr = end_addr - size
zero_fill = state.se.any_int(start_addr % align)
start_addr -= zero_fill
start_str = start_addr + ptr_size
ptr_i = start_addr
str_i = start_str
for itemtype, item in self._contents:
if itemtype == 'string':
state.memory.store(ptr_i, str_i, endness=state.arch.memory_endness)
state.memory.store(str_i, item)
ptr_i += state.arch.bytes
str_i += len(item)/8
else:
if isinstance(item, (int, long)):
item = state.se.BVV(item, state.arch.bits)
state.memory.store(ptr_i, item, endness=state.arch.memory_endness)
ptr_i += state.arch.bytes
if zero_fill != 0:
state.memory.store(end_addr - zero_fill, state.se.BVV(0, 8*zero_fill))
return start_addr
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,412
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/surveyors/caller.py
|
from .explorer import Explorer
import simuvex
class Callable(object):
"""
Callable is a representation of a function in the binary that can be
interacted with like a native python function.
If you set perform_merge=True (the default), the result will be returned to you, and
you can get the result state with callable.result_state.
Otherwise, you can get the resulting path group (immutable) at callable.result_path_group.
"""
def __init__(self, project, addr, concrete_only=False, perform_merge=True, base_state=None, toc=None, cc=None):
"""
:param project: The project to operate on
:param addr: The address of the function to use
The following parameters are optional:
:param concrete_only: Throw an exception if the execution splits into multiple paths
:param perform_merge: Merge all result states into one at the end (only relevant if concrete_only=False)
:param base_state: The state from which to do these runs
:param toc: The address of the table of contents for ppc64
:param cc: The SimCC to use for a calling convention
"""
self._project = project
self._addr = addr
self._concrete_only = concrete_only
self._perform_merge = perform_merge
self._base_state = base_state
self._toc = toc
self._caller = None
self._cc = cc if cc is not None else simuvex.DefaultCC[project.arch.name](project.arch)
self._deadend_addr = project._simos.return_deadend
self.result_path_group = None
self.result_state = None
def set_base_state(self, state):
"""
Swap out the state you'd like to use to perform the call
:param state: The state to use to perform the call
"""
self._base_state = state
def __call__(self, *args):
self.perform_call(*args)
if self.result_state is not None:
return self.result_state.se.simplify(self._cc.get_return_val(self.result_state, stack_base=self.result_state.regs.sp - self._cc.STACKARG_SP_DIFF))
else:
return None
def perform_call(self, *args):
state = self._project.factory.call_state(self._addr, *args,
cc=self._cc,
base_state=self._base_state,
ret_addr=self._deadend_addr,
toc=self._toc)
def step_func(pg):
pg2 = pg.prune()
if len(pg2.active) > 1:
raise AngrCallableMultistateError("Execution split on symbolic condition!")
return pg2
caller = self._project.factory.path_group(state, immutable=True)
caller_end_unpruned = caller.step(until=lambda pg: len(pg.active) == 0, step_func=step_func if self._concrete_only else None).unstash(from_stash='deadended')
caller_end_unmerged = caller_end_unpruned.prune(filter_func=lambda pt: pt.addr == self._deadend_addr)
if len(caller_end_unmerged.active) == 0:
raise AngrCallableError("No paths returned from function")
self.result_path_group = caller_end_unmerged
if self._perform_merge:
caller_end = caller_end_unmerged.merge()
self.result_state = caller_end.active[0].state
class Caller(Explorer):
"""
Caller is a surveyor that executes functions to see what they do.
"""
def __init__(self, project, addr, args=(), start=None, num_find=None, concrete_only=False, **kwargs):
"""
:param project: the project
:param addr: the address to start calling at
:param args: a tuple of arguments. Any members that are None will be replaced with symbolic expressions with a
length of the architecture's bitwidth.
:param start: a path (or set of paths) to start from
:param num_find: find at least this many returns from the function
:param concrete_only: Throw an exception if the execution splits into multiple paths
"""
self._fake_return_addr = project.entry
self._cc = simuvex.DefaultCC[project.arch.name](project.arch)
self._concrete_only = concrete_only
start_paths = [ ]
if start is None:
start_paths.append(project.factory.path(project.factory.blank_state(addr=addr)))
elif isinstance(start, (tuple,list,set)):
start_paths.extend(start)
else:
start_paths.append(start)
self.symbolic_args = [ start_paths[0].state.se.Unconstrained('arg%d'%i, project.arch.bits) if arg is None else arg for i, arg in enumerate(args) ]
self._ret_addr = start_paths[0].state.se.BVV(self._fake_return_addr, project.arch.bits)
for p in start_paths:
p.state.ip = addr
self._cc.setup_callsite(p.state, self._ret_addr, self.symbolic_args)
super(Caller, self).__init__(project, find=self._fake_return_addr, start=start_paths, num_find=num_find, **kwargs)
def post_tick(self):
if not self._concrete_only: return
if len(self.active) > 1:
toomany = self.active
self.active = []
for path in toomany:
if path.state.satisfiable():
self.active.append(path)
else:
self.errored.append(path)
if len(self.active) > 1:
raise AngrCallableMultistateError("Execution produced multiple successors")
def map_se(self, func, *args, **kwargs):
"""
Maps the state.se."func" function for all the return address states. This is a generator.
:param func: the function name, used as getattr(p.state.se, func). Normally any_n_int or any_n_str
:param runs: the maximum number of runs to execute
:param solutions: check only returns with this value as a possible solution
:param sort: sort the result before yielding it
Other *args and **kwargs are passed to the called state.se.* function.
yields (r, func_return) for each state.
"""
runs = kwargs.pop('runs', None)
solution = kwargs.pop('solution', None)
extra_constraints = kwargs.pop('extra_constraints', ())
sort = kwargs.pop('sort', True)
for r,p in self.iter_returns(runs=runs, solution=solution):
v = getattr(p.state.se, func)(*args, extra_constraints=extra_constraints + (r==solution,), **kwargs)
yield r, sorted(v) if sort else v
def map_func(self, func, runs=None, solution=None):
"""
Calls func(return_value, args_tuple, path) for each function return. This is a generator.
:param func: the function to call
:param runs: the maximum number of runs to execute
:param solutions: check only returns with this value as a possible solution
yields the return values of func
"""
for r,p in self.iter_returns(runs=runs, solution=solution):
yield func(r, self.symbolic_args, p)
def iter_returns(self, runs=None, solution=None):
"""
Yields (return_value, path) for every return. This is a generator.
:param runs: the maximum number of runs to execute
:param solutions: check only returns with this value as a possible solution
"""
for p in self.iter_found(runs=runs):
r = p.state.se.simplify(self._cc.return_val.get_value(p.state))
if solution is not None and not p.state.se.solution(r, solution):
continue
yield (r, p)
__iter__ = iter_returns
from ..errors import AngrCallableError, AngrCallableMultistateError
from . import all_surveyors
all_surveyors['Caller'] = Caller
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,413
|
MayfeelYang/angr
|
refs/heads/master
|
/tests/test_argv.py
|
import nose
import angr, claripy
import logging
l = logging.getLogger("angr_tests")
import os
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
def test_mips():
arger_mips = angr.Project(test_location + "/mips/argv_test")
r_addr = 0x400768
s = arger_mips.factory.path(args = ['aaa', "Yan is a noob"], env ={"HOME": "/home/angr"})
xpl = arger_mips.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
nose.tools.assert_equals(len(xpl.found), 1)
s = arger_mips.factory.path(args = ['aaa', 'Yan is not a noob'], env ={"HOME": "/home/angr"})
xpl = arger_mips.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
nose.tools.assert_equals(len(xpl.found), 0)
# symbolic command line argument
s = arger_mips.factory.path(args = ['aaa', claripy.BVS('arg_2', 50*8)], env ={"HOME": "/home/angr"})
xpl = arger_mips.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
found = xpl.found[0]
conc = found.state.se.any_str(found.state.memory.load(found.state.registers.load('sp'), 400))
nose.tools.assert_equals("Yan is a noob" in conc, True)
def test_mipsel():
arger_mipsel = angr.Project(test_location + "/mipsel/argv_test")
r_addr = 0x400768
s = arger_mipsel.factory.path(args = ['aaa', 'Yan is a noob'], env ={"HOME": "/home/angr"})
xpl = arger_mipsel.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
nose.tools.assert_equals(len(xpl.found), 1)
s = arger_mipsel.factory.path(args = ['aaa', 'Yan is not a noob'], env ={"HOME": "/home/angr"})
xpl = arger_mipsel.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
nose.tools.assert_equals(len(xpl.found), 0)
# symbolic args
s = arger_mipsel.factory.path(args = ['aaa', claripy.BVS('arg_2', 50*8)], env ={"HOME": "/home/angr"})
xpl = arger_mipsel.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
found = xpl.found[0]
conc = found.state.se.any_str(found.state.memory.load(found.state.registers.load('sp'), 400))
nose.tools.assert_equals("Yan is a noob" in conc, True)
def test_i386():
arger_i386 = angr.Project(test_location + "/i386/argv_test")
r_addr = 0x804845B
s = arger_i386.factory.path(args = ['aaa', 'Yan is a noob'], env ={"HOME": "/home/angr"})
xpl = arger_i386.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
nose.tools.assert_equals(len(xpl.found), 1)
s = arger_i386.factory.path(args = ['aaa', 'Yan is not a noob'], env ={"HOME": "/home/angr"})
xpl = arger_i386.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
nose.tools.assert_equals(len(xpl.found), 0)
# symbolic args
s = arger_i386.factory.path(args = ['aaa', claripy.BVS('arg_2', 50*8)], env ={"HOME": "/home/angr"})
xpl = arger_i386.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
found = xpl.found[0]
conc = found.state.se.any_str(found.state.memory.load(found.state.registers.load('sp'), 400))
nose.tools.assert_equals("Yan is a noob" in conc, True)
def test_amd64():
arger_amd64 = angr.Project(test_location + "/x86_64/argv_test")
r_addr = 0x400571
s = arger_amd64.factory.path(args = ['aaa', 'Yan is a noob'], env ={"HOME": "/home/angr"})
xpl = arger_amd64.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
nose.tools.assert_equals(len(xpl.found), 1)
s = arger_amd64.factory.path(args = ['aaa', 'Yan is not a noob'], env ={"HOME": "/home/angr"})
xpl = arger_amd64.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
nose.tools.assert_equals(len(xpl.found), 0)
# symbolic args
s = arger_amd64.factory.path(args = ['aaa', claripy.BVS('arg_2', 50*8)], env ={"HOME": "/home/angr"})
xpl = arger_amd64.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
found = xpl.found[0]
conc = found.state.se.any_str(found.state.memory.load(found.state.registers.load('sp'), 400))
nose.tools.assert_equals("Yan is a noob" in conc, True)
def test_arm():
arger_arm = angr.Project(test_location + "/armel/argv_test")
r_addr = 0x1048c
s = arger_arm.factory.path(args = ['aaa', 'Yan is a noob'], env ={"HOME": "/home/angr"})
xpl = arger_arm.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
nose.tools.assert_equals(len(xpl.found), 1)
s = arger_arm.factory.path(args = ['aaa', 'Yan is not a noob'], env ={"HOME": "/home/angr"})
xpl = arger_arm.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
nose.tools.assert_equals(len(xpl.found), 0)
# symbolic args
s = arger_arm.factory.path(args = ['aaa', claripy.BVS('arg_2', 50*8)], env ={"HOME": "/home/angr"})
xpl = arger_arm.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
found = xpl.found[0]
conc = found.state.se.any_str(found.state.memory.load(found.state.registers.load('sp'), 400))
nose.tools.assert_equals("Yan is a noob" in conc, True)
def test_ppc32():
arger_ppc32 = angr.Project(test_location + "/ppc/argv_test")
r_addr = 0x10000498
s = arger_ppc32.factory.path(args = ['aaa', 'Yan is a noob'], env ={"HOME": "/home/angr"})
xpl = arger_ppc32.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
nose.tools.assert_equals(len(xpl.found), 1)
s = arger_ppc32.factory.path(args = ['aaa', 'Yan is not a noob'], env ={"HOME": "/home/angr"})
xpl = arger_ppc32.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
nose.tools.assert_equals(len(xpl.found), 0)
# symbolic args
s = arger_ppc32.factory.path(args = ['aaa', claripy.BVS('arg_2', 50*8)], env ={"HOME": "/home/angr"})
xpl = arger_ppc32.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
found = xpl.found[0]
conc = found.state.se.any_str(found.state.memory.load(found.state.registers.load('sp'), 400))
nose.tools.assert_equals("Yan is a noob" in conc, True)
if __name__ == "__main__":
test_mips()
test_mipsel()
test_arm()
test_i386()
test_amd64()
test_ppc32()
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,414
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/path.py
|
from os import urandom
import copy
import logging
import collections
l = logging.getLogger("angr.path")
import simuvex
import claripy
import mulpyplexer
#pylint:disable=unidiomatic-typecheck
UNAVAILABLE_RET_ADDR = -1
class CallFrame(object):
"""
Stores the address of the function you're in and the value of SP
at the VERY BOTTOM of the stack, i.e. points to the return address.
"""
def __init__(self, state=None, func_addr=None, stack_ptr=None, ret_addr=None, jumpkind=None):
"""
Initialize with either a state or the function address,
stack pointer, and return address
"""
self.jumpkind = jumpkind if jumpkind is not None else (state.scratch.jumpkind if state is not None else None)
if state is not None:
try:
self.func_addr = state.se.any_int(state.ip)
self.stack_ptr = state.se.any_int(state.regs.sp)
except (simuvex.SimUnsatError, simuvex.SimSolverModeError):
self.func_addr = None
self.stack_ptr = None
if self.jumpkind and self.jumpkind.startswith('Ijk_Sys'):
# syscalls
self.ret_addr = state.regs.ip_at_syscall
else:
# calls
if state.arch.call_pushes_ret:
self.ret_addr = state.memory.load(state.regs.sp, state.arch.bits / 8,
endness=state.arch.memory_endness, inspect=False
)
else:
self.ret_addr = state.regs.lr
# Try to convert the ret_addr to an integer
try:
self.ret_addr = state.se.any_int(self.ret_addr)
except (simuvex.SimUnsatError, simuvex.SimSolverModeError):
self.ret_addr = None
else:
self.func_addr = func_addr
self.stack_ptr = stack_ptr
self.ret_addr = ret_addr
self.block_counter = collections.Counter()
def __str__(self):
return "Func %#x, sp=%#x, ret=%#x" % (self.func_addr, self.stack_ptr, self.ret_addr)
def __repr__(self):
return '<CallFrame (Func %#x)>' % (self.func_addr)
def copy(self):
c = CallFrame(state=None, func_addr=self.func_addr, stack_ptr=self.stack_ptr, ret_addr=self.ret_addr,
jumpkind=self.jumpkind
)
c.block_counter = collections.Counter(self.block_counter)
return c
class CallStack(object):
"""
Represents a call stack.
"""
def __init__(self):
self._callstack = []
def __iter__(self):
"""
Iterate through the callstack, from top to bottom
(most recent first).
"""
for cf in reversed(self._callstack):
yield cf
def push(self, cf):
"""
Push the :class:`CallFrame` `cf` on the callstack.
"""
self._callstack.append(cf)
def pop(self):
"""
Pops one :class:`CallFrame` from the callstack.
:return: A CallFrame.
"""
try:
return self._callstack.pop(-1)
except IndexError:
raise ValueError("Empty CallStack")
@property
def top(self):
"""
Returns the element at the top of the callstack without removing it.
:return: A CallFrame.
"""
try:
return self._callstack[-1]
except IndexError:
raise ValueError("Empty CallStack")
def __getitem__(self, k):
"""
Returns the CallFrame at index k, indexing from the top of the stack.
"""
k = -1 - k
return self._callstack[k]
def __len__(self):
return len(self._callstack)
def __repr__(self):
return "<CallStack (depth %d)>" % len(self._callstack)
def __str__(self):
return "Backtrace:\n%s" % "\n".join(str(f) for f in self)
def __eq__(self, other):
if not isinstance(other, CallStack):
return False
if len(self) != len(other):
return False
for c1, c2 in zip(self._callstack, other._callstack):
if c1.func_addr != c2.func_addr or c1.stack_ptr != c2.stack_ptr or c1.ret_addr != c2.ret_addr:
return False
return True
def __ne__(self, other):
return self != other
def __hash__(self):
return hash(tuple((c.func_addr, c.stack_ptr, c.ret_addr) for c in self._callstack))
def copy(self):
c = CallStack()
c._callstack = [cf.copy() for cf in self._callstack]
return c
class ReverseListProxy(list):
def __iter__(self):
return reversed(self)
class Path(object):
"""
A Path represents a sequence of basic blocks for an execution of the program.
:ivar name: A string to identify the path.
:ivar state: The state of the program.
:type state: simuvex.SimState
"""
def __init__(self, project, state, path=None):
# this is the state of the path
self.state = state
self.errored = False
# project
self._project = project
if path is None:
# the path history
self.history = PathHistory()
self.callstack = CallStack()
# Note that stack pointer might be symbolic, and simply calling state.se.any_int over sp will fail in that
# case. We should catch exceptions here.
try:
stack_ptr = self.state.se.any_int(self.state.regs.sp)
except (simuvex.SimSolverModeError, simuvex.SimUnsatError):
stack_ptr = None
self.callstack.push(CallFrame(state=None, func_addr=self.addr,
stack_ptr=stack_ptr,
ret_addr=UNAVAILABLE_RET_ADDR
)
)
self.popped_callframe = None
self.callstack_backtrace = []
# the previous run
self.previous_run = None
self.history._jumpkind = state.scratch.jumpkind
# A custom information store that will be passed to all its descendents
self.info = {}
# for merging
self._upcoming_merge_points = []
else:
# the path history
self.history = PathHistory(path.history)
self.callstack = path.callstack.copy()
self.popped_callframe = path.popped_callframe
self.callstack_backtrace = list(path.callstack_backtrace)
# the previous run
self.previous_run = path._run
self.history._record_state(state)
self.history._record_run(path._run)
self._manage_callstack(state)
# A custom information store that will be passed to all its descendents
self.info = { k:copy.copy(v) for k, v in path.info.iteritems() }
self._upcoming_merge_points = list(path._upcoming_merge_points)
# for printing/ID stuff and inheritance
self.name = str(id(self))
self.path_id = urandom(8).encode('hex')
# actual analysis stuff
self._run_args = None # sim_run args, to determine caching
self._run = None
self._run_error = None
@property
def addr(self):
return self.state.se.any_int(self.state.regs.ip)
@addr.setter
def addr(self, val):
self.state.regs.ip = val
#
# Pass-throughs to history
#
@property
def length(self):
return self.history.length
@length.setter
def length(self, v):
l.warning("Manually setting length -- change this behavior.")
self.history.length = v
@property
def extra_length(self):
return self.history.extra_length
@extra_length.setter
def extra_length(self, val):
self.history.extra_length = val
@property
def weighted_length(self):
return self.history.length + self.history.extra_length
@property
def jumpkind(self):
return self.history._jumpkind
@property
def last_actions(self):
return self.history.actions
#
# History traversal
#
@property
def history_iterator(self):
return HistoryIter(self.history)
@property
def addr_trace(self):
return AddrIter(self.history)
@property
def trace(self):
return RunstrIter(self.history)
@property
def targets(self):
return TargetIter(self.history)
@property
def guards(self):
return GuardIter(self.history)
@property
def jumpkinds(self):
return JumpkindIter(self.history)
@property
def events(self):
return EventIter(self.history)
@property
def actions(self):
return ActionIter(self.history)
def trim_history(self):
self.history = self.history.copy()
self.history._parent = None
#
# Stepping methods and successor access
#
def step(self, throw=None, **run_args):
"""
Step a path forward. Optionally takes any argument applicable to project.factory.sim_run.
:param jumpkind: the jumpkind of the previous exit.
:param addr an address: to execute at instead of the state's ip.
:param stmt_whitelist: a list of stmt indexes to which to confine execution.
:param last_stmt: a statement index at which to stop execution.
:param thumb: whether the block should be lifted in ARM's THUMB mode.
:param backup_state: a state to read bytes from instead of using project memory.
:param opt_level: the VEX optimization level to use.
:param insn_bytes: a string of bytes to use for the block instead of #the project.
:param max_size: the maximum size of the block, in bytes.
:param num_inst: the maximum number of instructions.
:param traceflags: traceflags to be passed to VEX. Default: 0
:returns: An array of paths for the possible successors.
"""
if self._run_args != run_args or not self._run:
self._run_args = run_args
self._make_sim_run(throw=throw)
self.state._inspect('path_step', simuvex.BP_BEFORE)
if self._run_error:
return [ self.copy(error=self._run_error) ]
out = [ Path(self._project, s, path=self) for s in self._run.flat_successors ]
if 'insn_bytes' in run_args and 'addr' not in run_args and len(out) == 1 \
and isinstance(self._run, simuvex.SimIRSB) \
and self.addr + self._run.irsb.size == out[0].state.se.any_int(out[0].state.regs.ip):
out[0].state.regs.ip = self.addr
for p in out:
p.state._inspect('path_step', simuvex.BP_AFTER)
return out
def clear(self):
"""
This function clear the execution status.
After calling this if you call :func:`step`, successors will be recomputed. If you changed something into path
state you probably want to call this method.
"""
self._run = None
def _make_sim_run(self, throw=None):
self._run = None
self._run_error = None
try:
self._run = self._project.factory.sim_run(self.state, **self._run_args)
except (AngrError, simuvex.SimError, claripy.ClaripyError) as e:
l.debug("Catching exception", exc_info=True)
self._run_error = e
if throw:
raise
except (TypeError, ValueError, ArithmeticError, MemoryError) as e:
l.debug("Catching exception", exc_info=True)
self._run_error = e
if throw:
raise
@property
def next_run(self):
# TODO: this should be documented.
if self._run_error:
return None
if not self._run:
raise AngrPathError("Please call path.step() before accessing next_run")
return self._run
@property
def successors(self):
if not (self._run_error or self._run):
raise AngrPathError("Please call path.step() before accessing successors")
return self.step(**self._run_args)
@property
def unconstrained_successors(self):
if self._run_error:
return []
if not self._run:
raise AngrPathError("Please call path.step() before accessing successors")
return [ Path(self._project, s, path=self) for s in self._run.unconstrained_successors ]
@property
def unsat_successors(self):
if self._run_error:
return []
if not self._run:
raise AngrPathError("Please call path.step() before accessing successors")
return [ Path(self._project, s, path=self) for s in self._run.unsat_successors ]
@property
def mp_successors(self):
return mulpyplexer.MP(self.successors)
@property
def nonflat_successors(self):
if self._run_error:
return []
if not self._run:
raise AngrPathError("Please call path.step() before accessing successors")
nonflat_successors = [ ]
for s in self._run.successors + self._run.unconstrained_successors:
sp = Path(self._project, s, path=self)
nonflat_successors.append(sp)
return nonflat_successors
@property
def unconstrained_successor_states(self):
if self._run_error:
return []
if not self._run:
raise AngrPathError("Please call path.step() before accessing successors")
return self._run.unconstrained_successors
#
# Utility functions
#
def branch_causes(self):
"""
Returns the variables that have caused this path to branch.
:return: A list of tuples of (basic block address, jmp instruction address, set(variables))
"""
return [
(h.addr, h._jump_source, tuple(h._guard.variables)) for h in self.history_iterator
if h._jump_avoidable
]
def divergence_addr(self, other):
"""
Returns the basic block at which the paths diverged.
:param other: The other Path.
:returns: The address of the basic block.
"""
trace1 = self.addr_trace.hardcopy
trace2 = other.addr_trace.hardcopy
for i in range(max([len(trace1), len(trace2)])):
if i > len(trace1):
return trace2[i-1]
elif i > len(trace2):
return trace1[i-1]
elif trace1[i] != trace2[i]:
return trace1[i-1]
def detect_loops(self, n=None): #pylint:disable=unused-argument
"""
Returns the current loop iteration that a path is on.
:param n: The minimum number of iterations to check for.
:returns: The number of the loop iteration it's in.
"""
# TODO: make this work better
#addr_strs = [ "%x"%x for x in self.addr_trace ]
#bigstr = "".join(addr_strs)
#candidates = [ ]
#max_iteration_length = len(self.addr_trace) / n
#for i in range(max_iteration_length):
# candidates.append("".join(addr_strs[-i-0:]))
#for c in reversed(candidates):
# if bigstr.count(c) >= n:
# return n
#return None
mc = self.callstack.top.block_counter.most_common()
if len(mc) == 0:
return None
else:
return mc[0][1]
#
# Error checking
#
_jk_errors = set(("Ijk_EmFail", "Ijk_NoDecode", "Ijk_MapFail"))
_jk_signals = set(('Ijk_SigILL', 'Ijk_SigTRAP', 'Ijk_SigSEGV', 'Ijk_SigBUS',
'Ijk_SigFPE_IntDiv', 'Ijk_SigFPE_IntOvf'))
_jk_all_bad = _jk_errors | _jk_signals
#
# Convenience functions
#
@property
def reachable(self):
return self.history.reachable()
@property
def _s0(self):
return self.step()[0]
@property
def _s1(self):
return self.step()[1]
@property
def _s2(self):
return self.step()[2]
@property
def _s3(self):
return self.step()[3]
#
# State continuation
#
def _manage_callstack(self, state):
"""
Adds the information from the last run to the current path.
"""
# maintain the blockcounter stack
if state.scratch.bbl_addr_list is not None:
# there are more than one block - probably from Unicorn engine
block_addr_to_jumpkind = { } # cache
for i, bbl_addr in enumerate(state.scratch.bbl_addr_list):
try:
block_size, jumpkind = block_addr_to_jumpkind[bbl_addr]
except KeyError:
if self._project.is_hooked(bbl_addr):
if issubclass(self._project.hooked_by(bbl_addr), simuvex.SimProcedure):
block_size = None # it will not be used
jumpkind = 'Ijk_Ret'
else:
block_size = None # will not be used either
jumpkind = 'Ijk_Boring'
else:
block = self._project.factory.block(bbl_addr)
block_size = block.size
jumpkind = block.vex.jumpkind
block_addr_to_jumpkind[bbl_addr] = block_size, jumpkind
if jumpkind == 'Ijk_Call':
if i == len(state.scratch.bbl_addr_list) - 1:
self._manage_callstack_call(state)
else:
func_addr = state.scratch.bbl_addr_list[i + 1]
stack_ptr = state.scratch.stack_pointer_list[i + 1]
ret_addr = bbl_addr + block_size
self._manage_callstack_call(func_addr=func_addr, stack_ptr=stack_ptr, ret_addr=ret_addr)
elif jumpkind.startswith('Ijk_Sys'):
if i == len(state.scratch.bbl_addr_list) - 1:
self._manage_callstack_sys(state)
else:
func_addr = state.scratch.bbl_addr_list[i + 1]
stack_ptr = state.scratch.stack_pointer_list[i + 1]
ret_addr = bbl_addr + block_size
self._manage_callstack_sys(func_addr=func_addr, stack_ptr=stack_ptr, ret_addr=ret_addr,
jumpkind=jumpkind
)
elif jumpkind == 'Ijk_Ret':
self._manage_callstack_ret()
else:
# there is only one block
if state.scratch.jumpkind == "Ijk_Call":
self._manage_callstack_call(state)
elif state.scratch.jumpkind.startswith('Ijk_Sys'):
self._manage_callstack_sys(state)
elif state.scratch.jumpkind == "Ijk_Ret":
self._manage_callstack_ret()
self.callstack.top.block_counter[state.scratch.bbl_addr] += 1
def _manage_callstack_call(self, state=None, func_addr=None, stack_ptr=None, ret_addr=None):
if state is not None:
callframe = CallFrame(state)
else:
callframe = CallFrame(func_addr=func_addr, stack_ptr=stack_ptr, ret_addr=ret_addr, jumpkind='Ijk_Call')
self.callstack.push(callframe)
self.callstack_backtrace.append((hash(self.callstack), callframe, len(self.callstack)))
def _manage_callstack_sys(self, state=None, func_addr=None, stack_ptr=None, ret_addr=None, jumpkind=None):
if state is not None:
callframe = CallFrame(state)
else:
callframe = CallFrame(func_addr=func_addr, stack_ptr=stack_ptr, ret_addr=ret_addr, jumpkind=jumpkind)
self.callstack.push(callframe)
self.callstack_backtrace.append((hash(self.callstack), callframe, len(self.callstack)))
def _manage_callstack_ret(self):
self.popped_callframe = self.callstack.pop()
if len(self.callstack) == 0:
l.info("Path callstack unbalanced...")
self.callstack.push(CallFrame(state=None, func_addr=0, stack_ptr=0, ret_addr=0))
#
# Merging and splitting
#
def merge(*all_paths, **kwargs): #pylint:disable=no-self-argument,no-method-argument
"""
Returns a merger of this path with `*others`.
:param *paths: the paths to merge
:param common_history: a PathHistory node shared by all the paths. When this is provided, the
merging becomes more efficient, and actions and such are merged.
:returns: the merged Path
:rtype: Path
"""
common_history = kwargs.pop('common_history')
if len(kwargs) != 0:
raise ValueError("invalid arguments: %s" % kwargs.keys())
if len(set(( o.addr for o in all_paths))) != 1:
raise AngrPathError("Unable to merge paths.")
if common_history is None:
raise AngrPathError("TODO: implement mergining without a provided common history")
# get the different constraints
constraints = [ p.history.constraints_since(common_history) for p in all_paths ]
# merge the state with these constraints
new_state, merge_conditions, _ = all_paths[0].state.merge(
*[ p.state for p in all_paths[1:] ], merge_conditions=constraints
)
new_path = Path(all_paths[0]._project, new_state, path=all_paths[0])
# fix up the new path
new_path.history = PathHistory(common_history)
new_path.history.merged_from.extend(p.history for p in all_paths)
new_path.history.merge_conditions = merge_conditions
new_path.history._record_state(new_state)
new_path.history._runstr = "MERGE POINT (at %#x)" % new_path.addr
new_path.history.length -= 1
# reset the upcoming merge points
new_path._upcoming_merge_points = []
# and return
return new_path
def copy(self, error=None):
if error is None:
p = Path(self._project, self.state.copy())
else:
p = ErroredPath(error, self._project, self.state.copy())
p.history = self.history.copy()
p.callstack = self.callstack.copy()
p.callstack_backtrace = list(self.callstack_backtrace)
p.popped_callframe = self.popped_callframe
p.previous_run = self.previous_run
p._run = self._run
p.info = {k: copy.copy(v) for k, v in self.info.iteritems()}
p._upcoming_merge_points = list(self._upcoming_merge_points)
return p
def filter_actions(self, block_addr=None, block_stmt=None, insn_addr=None, read_from=None, write_to=None):
"""
Filter self.actions based on some common parameters.
:param block_addr: Only return actions generated in blocks starting at this address.
:param block_stmt: Only return actions generated in the nth statement of each block.
:param insn_addr: Only return actions generated in the assembly instruction at this address.
:param read_from: Only return actions that perform a read from the specified location.
:param write_to: Only return actions that perform a write to the specified location.
Notes:
If IR optimization is turned on, reads and writes may not occur in the instruction
they originally came from. Most commonly, If a register is read from twice in the same
block, the second read will not happen, instead reusing the temp the value is already
stored in.
Valid values for read_from and write_to are the string literals 'reg' or 'mem' (matching
any read or write to registers or memory, respectively), any string (representing a read
or write to the named register), and any integer (representing a read or write to the
memory at this address).
"""
if read_from is not None:
if write_to is not None:
raise ValueError("Can't handle read_from and write_to at the same time!")
if read_from in ('reg', 'mem'):
read_type = read_from
read_offset = None
elif isinstance(read_from, str):
read_type = 'reg'
read_offset = self._project.arch.registers[read_from][0]
else:
read_type = 'mem'
read_offset = read_from
if write_to is not None:
if write_to in ('reg', 'mem'):
write_type = write_to
write_offset = None
elif isinstance(write_to, str):
write_type = 'reg'
write_offset = self._project.arch.registers[write_to][0]
else:
write_type = 'mem'
write_offset = write_to
def addr_of_stmt(bbl_addr, stmt_idx):
if stmt_idx is None:
return None
stmts = self._project.factory.block(bbl_addr).vex.statements
if stmt_idx >= len(stmts):
return None
for i in reversed(xrange(stmt_idx + 1)):
if stmts[i].tag == 'Ist_IMark':
return stmts[i].addr + stmts[i].delta
return None
def action_reads(action):
if action.type != read_type:
return False
if action.action != 'read':
return False
if read_offset is None:
return True
addr = action.addr
if isinstance(addr, simuvex.SimActionObject):
addr = addr.ast
if isinstance(addr, claripy.ast.Base):
if addr.symbolic:
return False
addr = self.state.se.any_int(addr)
if addr != read_offset:
return False
return True
def action_writes(action):
if action.type != write_type:
return False
if action.action != 'write':
return False
if write_offset is None:
return True
addr = action.addr
if isinstance(addr, simuvex.SimActionObject):
addr = addr.ast
if isinstance(addr, claripy.ast.Base):
if addr.symbolic:
return False
addr = self.state.se.any_int(addr)
if addr != write_offset:
return False
return True
return [x for x in reversed(self.actions) if
(block_addr is None or x.bbl_addr == block_addr) and
(block_stmt is None or x.stmt_idx == block_stmt) and
(read_from is None or action_reads(x)) and
(write_to is None or action_writes(x)) and
(insn_addr is None or (x.sim_procedure is None and addr_of_stmt(x.bbl_addr, x.stmt_idx) == insn_addr))
]
def __repr__(self):
return "<Path with %d runs (at 0x%x)>" % (self.length, self.addr)
class ErroredPath(Path):
"""
ErroredPath is used for paths that have encountered and error in their symbolic execution. This kind of path cannot
be stepped further.
:ivar error: The error that was encountered.
"""
def __init__(self, error, *args, **kwargs):
super(ErroredPath, self).__init__(*args, **kwargs)
self.error = error
self.errored = True
def __repr__(self):
return "<Errored Path with %d runs (at 0x%x, %s)>" % \
(self.length, self.addr, type(self.error).__name__)
def step(self, *args, **kwargs):
# pylint: disable=unused-argument
raise AngrPathError("Cannot step forward an errored path")
def retry(self, **kwargs):
self._run_args = kwargs
self._run = self._project.factory.sim_run(self.state, **self._run_args)
return super(ErroredPath, self).step(**kwargs)
def make_path(project, runs):
"""
A helper function to generate a correct angr.Path from a list of runs corresponding to a program path.
:param runs: A list of SimRuns corresponding to a program path.
"""
if len(runs) == 0:
raise AngrPathError("Cannot generate Path from empty set of runs")
# This creates a path which state is the the first run
a_p = Path(project, runs[0].initial_state)
# And records the first node's run
a_p.history = PathHistory(a_p.history)
a_p.history._record_run(runs[0])
# We then go through all the nodes except the last one
for r in runs[1:-1]:
a_p.history._record_state(r.initial_state)
a_p._manage_callstack(r.initial_state)
a_p.history = PathHistory(a_p.history)
a_p.history._record_run(r)
# We record the last state and set it as current (it is the initial
# state of the next run).
a_p.history._record_state(runs[-1].initial_state)
a_p._manage_callstack(runs[-1].initial_state)
a_p.state = runs[-1].initial_state
return a_p
from .errors import AngrError, AngrPathError
from .path_history import * #pylint:disable=wildcard-import,unused-wildcard-import
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,415
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/simos.py
|
"""
Manage OS-level configuration.
"""
import logging
from archinfo import ArchARM, ArchMIPS32, ArchMIPS64, ArchX86, ArchAMD64, ArchPPC32, ArchPPC64, ArchAArch64
from simuvex import SimState, SimIRSB, SimStateSystem, SimActionData
from simuvex import s_options as o, s_cc
from simuvex import SimProcedures
from simuvex.s_procedure import SimProcedure, SimProcedureContinuation
from cle import MetaELF, BackedCGC
import pyvex
import claripy
from .errors import AngrSyscallError, AngrUnsupportedSyscallError, AngrCallableError, AngrSimOSError
from .tablespecs import StringTableSpec
l = logging.getLogger("angr.simos")
class IRange(object):
__slots__ = ('start', 'end')
def __init__(self, start, end):
self.start = start
self.end = end
def __contains__(self, k):
if type(k) in (int, long):
return k >= self.start and k < self.end
return False
def __getstate__(self):
return self.start, self.end
def __setstate__(self, state):
self.start, self.end = state
class SyscallEntry(object):
"""
Describes a syscall.
:ivar str name: Name of the syscall.
:ivar int pseudo_addr: The pseudo address assigned to this syscall.
:ivar simproc: The SimProcedure class for handling this syscall.
:ivar bool supported: True if this syscall is defined and has a SimProcedure implemented, False otherwise.
"""
def __init__(self, name, pseudo_addr, simproc, supported=True):
"""
Constructor.
:param str name: Syscall name.
:param int pseudo_addr: The pseudo address assigned to this syscall.
:param simproc: The SimProcedure for handling this syscall.
:param bool supported: True if this syscall is defined and there is a SimProcedure implemented for it.
"""
self.name = name
self.pseudo_addr = pseudo_addr
self.simproc = simproc
self.supported = supported
def __repr__(self):
s = "<Syscall %s @ %#x%s>" % (self.name, self.pseudo_addr, ", unsupported" if not self.supported else "")
return s
class SyscallTable(object):
"""
Represents a syscall table.
:ivar int max_syscall_number: The maximum syscall number of all supported syscalls in the platform.
:ivar int unknown_syscall_number: The syscall number of the "unknown" syscall used for unsupported syscalls.
"""
def __init__(self, max_syscall_number=None):
"""
Constructor.
:param int or None max_syscall_number: The maximum syscall number of all supported syscalls in the platform.
"""
self.max_syscall_number = max_syscall_number
self.unknown_syscall_number = None
self._table = { }
def __setitem__(self, syscall_number, syscall):
"""
Insert a syscall entry to the table.
:param int syscall_number: Number of the syscall.
:param SyscallEntry syscall: The syscall to insert.
:return: None
"""
if syscall_number > self.max_syscall_number:
self.max_syscall_number = syscall_number
self._table[syscall_number] = syscall
def __getitem__(self, syscall_number):
"""
Get a syscall entry from the table.
:param int syscall_number: Number of the syscall.
:return: The syscall entry.
:rtype: SyscallEntry
"""
if syscall_number in self._table:
return self._table[syscall_number]
raise KeyError('Syscall number %d not found in syscall table.' % syscall_number)
def __len__(self):
"""
Get the number of all syscalls supported by this syscall table.
:return: The number of all syscalls supported.
:rtype: int
"""
return len(self._table)
def __contains__(self, syscall_number):
"""
Check if the sycall number is defined in this syscall table.
:param int syscall_number: The syscall number to check.
:return: True if the syscall is defined in this table, False otherwise.
:rtype: int
"""
return syscall_number in self._table
@property
def max_syscall(self):
"""
Get the maximum syscall number, or None if the syscall table is empty and `max_syscall_number` is not set..
:return: The syscall number.
:rtype: int or None
"""
return self.max_syscall_number
@property
def unknown_syscall(self):
"""
Get the "unknown" syscall entry.
:return: The syscall entry for unknown syscalls.
:rtype: SyscallEntry
"""
if self.unknown_syscall_number is None:
raise AngrSyscallError('The unknown syscall number of this syscall table is not set.')
return self[self.unknown_syscall_number]
def supports(self, syscall_number):
"""
Check if the syscall number is defined and supported.
:param int syscall_number: The number of syscall to check.
:return: True if the syscall number is defined and supported by angr, False otherwise
:rtype: bool
"""
if syscall_number not in self._table:
return False
return self._table[syscall_number].supported
class SimOS(object):
"""
A class describing OS/arch-level configuration.
"""
def __init__(self, project, name=None):
self.arch = project.arch
self.proj = project
self.name = name
self.continue_addr = None
self.return_deadend = None
self.syscall_table = SyscallTable()
def _load_syscalls(self, syscall_table, syscall_lib):
"""
Load a table of syscalls to self.proj._syscall_obj. Each syscall entry takes 8 bytes no matter what
architecture it is on.
:param dict syscall_table: Syscall table.
:param str syscall_lib: Name of the syscall library
:return: None
"""
base_addr = self.proj._syscall_obj.rebase_addr
syscall_entry_count = 0 if not syscall_table else max(syscall_table.keys()) + 1
for syscall_number in xrange(syscall_entry_count):
syscall_addr = base_addr + syscall_number * 8
if syscall_number in syscall_table:
name, simproc_name = syscall_table[syscall_number]
if simproc_name in SimProcedures[syscall_lib]:
simproc = SimProcedures[syscall_lib][simproc_name]
else:
# no SimProcedure is implemented for this syscall
simproc = SimProcedures["syscalls"]["stub"]
self.syscall_table[syscall_number] = SyscallEntry(name, syscall_addr, simproc)
# Write it to the SimProcedure dict
self.proj._sim_procedures[syscall_addr] = (simproc, { })
else:
# no syscall number available in the pre-defined syscall table
self.syscall_table[syscall_number] = SyscallEntry("_unsupported", syscall_addr,
SimProcedures["syscalls"]["stub"],
supported=False
)
# Write it to the SimProcedure dict
self.proj._sim_procedures[syscall_addr] = (SimProcedures["syscalls"]["stub"], { })
# Now here is the fallback syscall stub
unknown_syscall_addr = base_addr + (syscall_entry_count + 1) * 8
unknown_syscall_number = syscall_entry_count + 1
self.syscall_table.unknown_syscall_number = unknown_syscall_number
self.syscall_table[unknown_syscall_number] = SyscallEntry("_unknown", unknown_syscall_addr,
SimProcedures["syscalls"]["stub"],
supported=False
)
self.proj._sim_procedures[unknown_syscall_addr] = (SimProcedures["syscalls"]["stub"], { })
def syscall_info(self, state):
"""
Get information about the syscall that is about to be called. Note that symbolic syscalls are not supported -
the syscall number *must* have only one solution.
:param simuvex.s_state.SimState state: the program state.
:return: A tuple of (cc, syscall_addr, syscall_name, syscall_class)
:rtype: tuple
"""
if state.os_name in s_cc.SyscallCC[state.arch.name]:
cc = s_cc.SyscallCC[state.arch.name][state.os_name](state.arch)
else:
# Use the default syscall calling convention - it may bring problems
cc = s_cc.SyscallCC[state.arch.name]['default'](state.arch)
syscall_num = cc.syscall_num(state)
possible = state.se.any_n_int(syscall_num, 2)
if len(possible) > 1 and len(self.syscall_table) > 0:
# Symbolic syscalls are not supported - we will create a 'unknown syscall" stub for it
n = self.syscall_table.unknown_syscall_number
elif not possible:
# The state is not satisfiable
raise AngrUnsupportedSyscallError("The program state is not satisfiable")
else:
n = possible[0]
if not self.syscall_table.supports(n):
if o.BYPASS_UNSUPPORTED_SYSCALL in state.options:
state.log.add_event('resilience', resilience_type='syscall', syscall=n, message='unsupported syscall')
syscall = self.syscall_table.unknown_syscall if n not in self.syscall_table else self.syscall_table[n]
else:
l.error("Syscall %d is not found for arch %s", n, state.arch.name)
raise AngrUnsupportedSyscallError("Syscall %d is not found for arch %s" % (n, state.arch.name))
else:
syscall = self.syscall_table[n]
return cc, syscall.pseudo_addr, syscall.name, syscall.simproc
def handle_syscall(self, state):
"""
Handle a state whose immediate preceding jumpkind is syscall by creating a new SimRun. Note that symbolic
syscalls are not supported - the syscall number *must* have only one solution.
:param simuvex.s_state.SimState state: the program state.
:return: a new SimRun instance.
:rtype: simuvex.s_procedure.SimProcedure
"""
cc, syscall_addr, syscall_name, syscall_class = self.syscall_info(state)
# The ip_at_syscall register is misused to save the return address for this syscall
ret_to = state.regs.ip_at_syscall
state.ip = syscall_addr
syscall = syscall_class(state, addr=syscall_addr, ret_to=ret_to, convention=cc, syscall_name=syscall_name)
return syscall
def configure_project(self):
"""
Configure the project to set up global settings (like SimProcedures).
"""
self.continue_addr = self.proj._extern_obj.get_pseudo_addr('angr##simproc_continue')
self.proj.hook(self.continue_addr, SimProcedureContinuation)
self.return_deadend = self.proj._extern_obj.get_pseudo_addr('angr##return_deadend')
self.proj.hook(self.return_deadend, CallReturn)
def irelative_resolver(resolver_addr):
resolver = self.proj.factory.callable(resolver_addr, concrete_only=True)
try:
val = resolver()
except AngrCallableError:
l.error("Resolver at %#x failed to resolve!", resolver_addr)
return None
if not val.singlevalued:
l.error("Resolver at %#x failed to resolve! (multivalued)", resolver_addr)
return None
return val._model_concrete.value
self.proj.loader.perform_irelative_relocs(irelative_resolver)
def state_blank(self, addr=None, initial_prefix=None, stack_size=1024*1024*8, **kwargs):
"""
Initialize a blank state.
All parameters are optional.
:param addr: The execution start address.
:param initial_prefix:
:return: The initialized SimState.
:rtype: simuvex.SimState
"""
if kwargs.get('mode', None) is None:
kwargs['mode'] = self.proj._default_analysis_mode
if kwargs.get('permissions_backer', None) is None:
# just a dict of address ranges to permission bits
permission_map = { }
for obj in self.proj.loader.all_objects:
for seg in obj.segments:
perms = 0
# bit values based off of protection bit values from sys/mman.h
if seg.is_readable:
perms |= 1 # PROT_READ
if seg.is_writable:
perms |= 2 # PROT_WRITE
if seg.is_executable:
perms |= 4 # PROT_EXEC
permission_map[(obj.rebase_addr + seg.min_addr, obj.rebase_addr + seg.max_addr)] = perms
permissions_backer = (self.proj.loader.main_bin.execstack, permission_map)
kwargs['permissions_backer'] = permissions_backer
if kwargs.get('memory_backer', None) is None:
kwargs['memory_backer'] = self.proj.loader.memory
if kwargs.get('arch', None) is None:
kwargs['arch'] = self.proj.arch
if kwargs.get('os_name', None) is None:
kwargs['os_name'] = self.name
state = SimState(**kwargs)
stack_end = state.arch.initial_sp
if o.ABSTRACT_MEMORY not in state.options:
state.memory.mem._preapproved_stack = IRange(stack_end - stack_size, stack_end)
if o.INITIALIZE_ZERO_REGISTERS in state.options:
highest_reg_offset, reg_size = max(state.arch.registers.values())
for i in range(0, highest_reg_offset + reg_size, state.arch.bytes):
state.registers.store(i, state.se.BVV(0, state.arch.bits))
state.regs.sp = stack_end
if initial_prefix is not None:
for reg in state.arch.default_symbolic_registers:
state.registers.store(reg, claripy.BVS(initial_prefix + "_" + reg,
state.arch.bits,
explicit_name=True))
for reg, val, is_addr, mem_region in state.arch.default_register_values:
region_base = None # so pycharm does not complain
if is_addr:
if isinstance(mem_region, tuple):
# unpack it
mem_region, region_base = mem_region
elif mem_region == 'global':
# Backward compatibility
region_base = 0
else:
raise AngrSimOSError('You must specify the base address for memory region "%s". ' % mem_region)
if o.ABSTRACT_MEMORY in state.options and is_addr:
address = claripy.ValueSet(state.arch.bits, mem_region, region_base, val)
state.registers.store(reg, address)
else:
state.registers.store(reg, val)
if addr is None: addr = self.proj.entry
state.regs.ip = addr
state.scratch.ins_addr = addr
state.scratch.bbl_addr = addr
state.scratch.stmt_idx = 0
state.scratch.jumpkind = 'Ijk_Boring'
state.procedure_data.hook_addr = self.continue_addr
return state
def state_entry(self, **kwargs):
return self.state_blank(**kwargs)
def state_full_init(self, **kwargs):
return self.state_entry(**kwargs)
def state_call(self, addr, *args, **kwargs):
cc = kwargs.pop('cc', s_cc.DefaultCC[self.arch.name](self.proj.arch))
state = kwargs.pop('base_state', None)
toc = kwargs.pop('toc', None)
ret_addr = kwargs.pop('ret_addr', self.return_deadend)
stack_base = kwargs.pop('stack_base', None)
alloc_base = kwargs.pop('alloc_base', None)
grow_like_stack = kwargs.pop('grow_like_stack', True)
if state is None:
state = self.state_blank(addr=addr, **kwargs)
else:
state = state.copy()
state.regs.ip = addr
cc.setup_callsite(state, ret_addr, args, stack_base, alloc_base, grow_like_stack)
if state.arch.name == 'PPC64' and toc is not None:
state.regs.r2 = toc
return state
def prepare_call_state(self, calling_state, initial_state=None,
preserve_registers=(), preserve_memory=()):
"""
This function prepares a state that is executing a call instruction.
If given an initial_state, it copies over all of the critical registers to it from the
calling_state. Otherwise, it prepares the calling_state for action.
This is mostly used to create minimalistic for CFG generation. Some ABIs, such as MIPS PIE and
x86 PIE, require certain information to be maintained in certain registers. For example, for
PIE MIPS, this function transfer t9, gp, and ra to the new state.
"""
if isinstance(self.arch, ArchMIPS32):
if initial_state is not None:
initial_state = self.state_blank()
mips_caller_saves = ('s0', 's1', 's2', 's3', 's4', 's5', 's6', 's7', 'gp', 'sp', 'bp', 'ra')
preserve_registers = preserve_registers + mips_caller_saves + ('t9',)
if initial_state is None:
new_state = calling_state.copy()
else:
new_state = initial_state.copy()
for reg in set(preserve_registers):
new_state.registers.store(reg, calling_state.registers.load(reg))
for addr, val in set(preserve_memory):
new_state.memory.store(addr, calling_state.memory.load(addr, val))
return new_state
def prepare_function_symbol(self, symbol_name):
"""
Prepare the address space with the data necessary to perform relocations pointing to the given symbol
"""
return self.proj._extern_obj.get_pseudo_addr(symbol_name)
class SimLinux(SimOS):
"""
OS-specific configuration for \\*nix-y OSes.
"""
SYSCALL_TABLE = {
'AMD64': {
0: ('read', 'read'),
1: ('write', 'write'),
2: ('open', 'open'),
3: ('close', 'close'),
4: ('stat', 'stat'),
5: ('fstat', 'fstat'),
6: ('stat', 'stat'),
9: ('mmap', 'mmap'),
11: ('munmap', 'munmap'),
12: ('brk', 'brk'),
13: ('sigaction', 'sigaction'),
14: ('sigprocmask', 'sigprocmask'),
39: ('getpid', 'getpid'),
60: ('exit', 'exit'),
158: ('arch_prctl','arch_prctl'),
186: ('gettid', 'gettid'),
231: ('exit_group', 'exit'), # really exit_group, but close enough
234: ('tgkill', 'tgkill'),
},
'X86': {
1: ('exit', 'exit'),
3: ('read', 'read'),
4: ('write', 'write'),
5: ('open', 'open'),
6: ('close', 'close'),
45: ('brk', 'brk'),
252: ('exit_group', 'exit'), # really exit_group, but close enough
},
'PPC32': {
},
'PPC64': {
},
'MIPS32': {
},
'MIPS64': {
},
'ARM': {
},
'ARMEL': {
},
'ARMHF': {
},
'AARCH64': {
}
}
def __init__(self, *args, **kwargs):
super(SimLinux, self).__init__(*args, name="Linux", **kwargs)
self._loader_addr = None
self._loader_lock_addr = None
self._loader_unlock_addr = None
self._vsyscall_addr = None
def configure_project(self):
super(SimLinux, self).configure_project()
self._loader_addr = self.proj._extern_obj.get_pseudo_addr('angr##loader')
self._loader_lock_addr = self.proj._extern_obj.get_pseudo_addr('angr##loader_lock')
self._loader_unlock_addr = self.proj._extern_obj.get_pseudo_addr('angr##loader_unlock')
self._vsyscall_addr = self.proj._extern_obj.get_pseudo_addr('angr##vsyscall')
self.proj.hook(self._loader_addr, LinuxLoader, kwargs={'project': self.proj})
self.proj.hook(self._loader_lock_addr, _dl_rtld_lock_recursive)
self.proj.hook(self._loader_unlock_addr, _dl_rtld_unlock_recursive)
self.proj.hook(self._vsyscall_addr, _vsyscall)
ld_obj = self.proj.loader.linux_loader_object
if ld_obj is not None:
tlsfunc = ld_obj.get_symbol('__tls_get_addr')
if tlsfunc is not None:
self.proj.hook(tlsfunc.rebased_addr, _tls_get_addr, kwargs={'ld': self.proj.loader})
tlsfunc2 = ld_obj.get_symbol('___tls_get_addr')
if tlsfunc2 is not None:
if self.proj.arch.name == 'X86':
self.proj.hook(tlsfunc2.rebased_addr, _tls_get_addr_tunder_x86, kwargs={'ld': self.proj.loader})
else:
l.error("Found an unknown ___tls_get_addr, please tell Andrew")
_rtld_global = ld_obj.get_symbol('_rtld_global')
if _rtld_global is not None:
if isinstance(self.proj.arch, ArchAMD64):
self.proj.loader.memory.write_addr_at(_rtld_global.rebased_addr + 0xF08, self._loader_lock_addr)
self.proj.loader.memory.write_addr_at(_rtld_global.rebased_addr + 0xF10, self._loader_unlock_addr)
_rtld_global_ro = ld_obj.get_symbol('_rtld_global_ro')
if _rtld_global_ro is not None:
pass
tls_obj = self.proj.loader.tls_object
if tls_obj is not None:
if isinstance(self.proj.arch, ArchAMD64):
self.proj.loader.memory.write_addr_at(tls_obj.thread_pointer + 0x28, 0x5f43414e4152595f)
self.proj.loader.memory.write_addr_at(tls_obj.thread_pointer + 0x30, 0x5054524755415244)
elif isinstance(self.proj.arch, ArchX86):
self.proj.loader.memory.write_addr_at(tls_obj.thread_pointer + 0x10, self._vsyscall_addr)
elif isinstance(self.proj.arch, ArchARM):
self.proj.hook(0xffff0fe0, _kernel_user_helper_get_tls, kwargs={'ld': self.proj.loader})
# Only set up ifunc resolution if we are using the ELF backend on AMD64
if isinstance(self.proj.loader.main_bin, MetaELF):
if isinstance(self.proj.arch, ArchAMD64):
for binary in self.proj.loader.all_objects:
if not isinstance(binary, MetaELF):
continue
for reloc in binary.relocs:
if reloc.symbol is None or reloc.resolvedby is None:
continue
if reloc.resolvedby.type != 'STT_GNU_IFUNC':
continue
gotaddr = reloc.addr + binary.rebase_addr
gotvalue = self.proj.loader.memory.read_addr_at(gotaddr)
if self.proj.is_hooked(gotvalue):
continue
# Replace it with a ifunc-resolve simprocedure!
kwargs = {
'proj': self.proj,
'funcaddr': gotvalue,
'gotaddr': gotaddr,
'funcname': reloc.symbol.name
}
randaddr = self.proj._extern_obj.get_pseudo_addr('ifunc_' + reloc.symbol.name)
self.proj.hook(randaddr, IFuncResolver, kwargs=kwargs)
self.proj.loader.memory.write_addr_at(gotaddr, randaddr)
self._load_syscalls(SimLinux.SYSCALL_TABLE[self.arch.name], "syscalls")
def state_blank(self, fs=None, concrete_fs=False, chroot=None, **kwargs):
state = super(SimLinux, self).state_blank(**kwargs) #pylint:disable=invalid-name
if self.proj.loader.tls_object is not None:
if isinstance(state.arch, ArchAMD64):
state.regs.fs = self.proj.loader.tls_object.user_thread_pointer
elif isinstance(state.arch, ArchX86):
state.regs.gs = self.proj.loader.tls_object.user_thread_pointer >> 16
elif isinstance(state.arch, (ArchMIPS32, ArchMIPS64)):
state.regs.ulr = self.proj.loader.tls_object.user_thread_pointer
elif isinstance(state.arch, ArchPPC32):
state.regs.r2 = self.proj.loader.tls_object.user_thread_pointer
elif isinstance(state.arch, ArchPPC64):
state.regs.r13 = self.proj.loader.tls_object.user_thread_pointer
elif isinstance(state.arch, ArchAArch64):
state.regs.tpidr_el0 = self.proj.loader.tls_object.user_thread_pointer
state.register_plugin('posix', SimStateSystem(fs=fs, concrete_fs=concrete_fs, chroot=chroot))
if self.proj.loader.main_bin.is_ppc64_abiv1:
state.libc.ppc64_abiv = 'ppc64_1'
return state
def state_entry(self, args=None, env=None, argc=None, **kwargs):
state = super(SimLinux, self).state_entry(**kwargs)
# Handle default values
if args is None:
args = []
if env is None:
env = {}
# Prepare argc
if argc is None:
argc = claripy.BVV(len(args), state.arch.bits)
elif type(argc) in (int, long): # pylint: disable=unidiomatic-typecheck
argc = claripy.BVV(argc, state.arch.bits)
# Make string table for args/env/auxv
table = StringTableSpec()
# Add args to string table
for arg in args:
table.add_string(arg)
table.add_null()
# Add environment to string table
for k, v in env.iteritems():
if type(k) is str: # pylint: disable=unidiomatic-typecheck
k = claripy.BVV(k)
elif type(k) is unicode: # pylint: disable=unidiomatic-typecheck
k = claripy.BVV(k.encode('utf-8'))
elif isinstance(k, claripy.ast.Bits):
pass
else:
raise TypeError("Key in env must be either string or bitvector")
if type(v) is str: # pylint: disable=unidiomatic-typecheck
v = claripy.BVV(v)
elif type(v) is unicode: # pylint: disable=unidiomatic-typecheck
v = claripy.BVV(v.encode('utf-8'))
elif isinstance(v, claripy.ast.Bits):
pass
else:
raise TypeError("Value in env must be either string or bitvector")
table.add_string(k.concat(claripy.BVV('='), v))
table.add_null()
# Prepare the auxiliary vector and add it to the end of the string table
# TODO: Actually construct a real auxiliary vector
# current vector is an AT_RANDOM entry where the "random" value is 0xaec0aec0aec0...
aux = [(25, ("AEC0"*8).decode('hex'))]
for a, b in aux:
table.add_pointer(a)
if isinstance(b, str):
table.add_string(b)
else:
table.add_pointer(b)
table.add_null()
table.add_null()
# Dump the table onto the stack, calculate pointers to args, env, and auxv
state.memory.store(state.regs.sp - 16, claripy.BVV(0, 8*16))
argv = table.dump(state, state.regs.sp - 16)
envp = argv + ((len(args) + 1) * state.arch.bytes)
auxv = argv + ((len(args) + len(env) + 2) * state.arch.bytes)
# Put argc on stack and fix the stack pointer
newsp = argv - state.arch.bytes
state.memory.store(newsp, argc, endness=state.arch.memory_endness)
state.regs.sp = newsp
if state.arch.name in ('PPC32',):
state.stack_push(claripy.BVV(0, 32))
state.stack_push(claripy.BVV(0, 32))
state.stack_push(claripy.BVV(0, 32))
state.stack_push(claripy.BVV(0, 32))
# store argc argv envp auxv in the posix plugin
state.posix.argv = argv
state.posix.argc = argc
state.posix.environ = envp
state.posix.auxv = auxv
self.set_entry_register_values(state)
return state
def set_entry_register_values(self, state):
for reg, val in state.arch.entry_register_values.iteritems():
if isinstance(val, (int, long)):
state.registers.store(reg, val, size=state.arch.bytes)
elif isinstance(val, (str,)):
if val == 'argc':
state.registers.store(reg, state.posix.argc, size=state.arch.bytes)
elif val == 'argv':
state.registers.store(reg, state.posix.argv)
elif val == 'envp':
state.registers.store(reg, state.posix.environ)
elif val == 'auxv':
state.registers.store(reg, state.posix.auxv)
elif val == 'ld_destructor':
# a pointer to the dynamic linker's destructor routine, to be called at exit
# or NULL. We like NULL. It makes things easier.
state.registers.store(reg, 0)
elif val == 'toc':
if self.proj.loader.main_bin.is_ppc64_abiv1:
state.registers.store(reg, self.proj.loader.main_bin.ppc64_initial_rtoc)
elif val == 'thread_pointer':
state.registers.store(reg, self.proj.loader.tls_object.user_thread_pointer)
else:
l.warning('Unknown entry point register value indicator "%s"', val)
else:
l.error('What the ass kind of default value is %s?', val)
def state_full_init(self, **kwargs):
kwargs['addr'] = self.proj._extern_obj.get_pseudo_addr('angr##loader')
return super(SimLinux, self).state_full_init(**kwargs)
def prepare_function_symbol(self, symbol_name):
"""
Prepare the address space with the data necessary to perform relocations pointing to the given symbol.
"""
if self.arch.name == 'PPC64':
pseudo_hookaddr = self.proj._extern_obj.get_pseudo_addr(symbol_name + '#func')
pseudo_toc = self.proj._extern_obj.get_pseudo_addr(symbol_name + '#func', size=0x18)
self.proj._extern_obj.memory.write_addr_at(pseudo_toc - self.proj._extern_obj.rebase_addr, pseudo_hookaddr)
return pseudo_hookaddr
else:
return self.proj._extern_obj.get_pseudo_addr(symbol_name)
class SimCGC(SimOS):
SYSCALL_TABLE = {
1: ('_terminate', '_terminate'),
2: ('transmit', 'transmit'),
3: ('receive', 'receive'),
4: ('fdwait', 'fdwait'),
5: ('allocate', 'allocate'),
6: ('deallocate', 'deallocate'),
7: ('random', 'random'),
}
def __init__(self, *args, **kwargs):
super(SimCGC, self).__init__(*args, name="CGC", **kwargs)
def configure_project(self):
super(SimCGC, self).configure_project()
self._load_syscalls(SimCGC.SYSCALL_TABLE, "cgc")
def state_blank(self, fs=None, **kwargs):
s = super(SimCGC, self).state_blank(**kwargs) # pylint:disable=invalid-name
# Special stack base for CGC binaries to work with Shellphish CRS
s.regs.sp = 0xbaaaaffc
# Map the special cgc memory
if o.ABSTRACT_MEMORY not in s.options:
s.memory.mem._preapproved_stack = IRange(0xbaaab000 - 1024*1024*8, 0xbaaab000)
s.memory.map_region(0x4347c000, 4096, 1)
# 'main' gets called with the magic page address as the first fast arg
s.regs.ecx = 0x4347c000
s.register_plugin('posix', SimStateSystem(fs=fs))
# Create the CGC plugin
s.get_plugin('cgc')
# set up the address for concrete transmits
s.unicorn.transmit_addr = self.syscall_table[2].pseudo_addr
return s
def state_entry(self, **kwargs):
if isinstance(self.proj.loader.main_bin, BackedCGC):
kwargs['permissions_backer'] = (True, self.proj.loader.main_bin.permissions_map)
kwargs['add_options'] = {o.CGC_ZERO_FILL_UNCONSTRAINED_MEMORY} | kwargs.get('add_options', set())
state = super(SimCGC, self).state_entry(**kwargs)
if isinstance(self.proj.loader.main_bin, BackedCGC):
for reg, val in self.proj.loader.main_bin.initial_register_values():
if reg in state.arch.registers:
setattr(state.regs, reg, val)
elif reg == 'eflags':
pass
elif reg == 'fctrl':
state.regs.fpround = (val & 0xC00) >> 10
elif reg == 'fstat':
state.regs.fc3210 = (val & 0x4700)
elif reg == 'ftag':
empty_bools = [((val >> (x*2)) & 3) == 3 for x in xrange(8)]
tag_chars = [claripy.BVV(0 if x else 1, 8) for x in empty_bools]
for i, tag in enumerate(tag_chars):
setattr(state.regs, 'fpu_t%d' % i, tag)
elif reg in ('fiseg', 'fioff', 'foseg', 'fooff', 'fop'):
pass
elif reg == 'mxcsr':
state.regs.sseround = (val & 0x600) >> 9
else:
l.error("What is this register %s I have to translate?", reg)
# Update allocation base
state.cgc.allocation_base = self.proj.loader.main_bin.current_allocation_base
# Do all the writes
writes_backer = self.proj.loader.main_bin.writes_backer
stdout = 1
for size in writes_backer:
if size == 0:
continue
str_to_write = state.posix.files[1].content.load(state.posix.files[1].pos, size)
a = SimActionData(state, 'file_1_0', 'write', addr=claripy.BVV(state.posix.files[1].pos, state.arch.bits), data=str_to_write, size=size)
state.posix.write(stdout, str_to_write, size)
state.log.add_action(a)
else:
# Set CGC-specific variables
state.regs.eax = 0
state.regs.ebx = 0
state.regs.ecx = 0x4347c000
state.regs.edx = 0
state.regs.edi = 0
state.regs.esi = 0
state.regs.esp = 0xbaaaaffc
state.regs.ebp = 0
state.regs.cc_dep1 = 0x202 # default eflags
state.regs.cc_op = 0 # OP_COPY
state.regs.cc_dep2 = 0 # doesn't matter
state.regs.cc_ndep = 0 # doesn't matter
# fpu values
state.regs.mm0 = 0
state.regs.mm1 = 0
state.regs.mm2 = 0
state.regs.mm3 = 0
state.regs.mm4 = 0
state.regs.mm5 = 0
state.regs.mm6 = 0
state.regs.mm7 = 0
state.regs.fpu_tags = 0
state.regs.fpround = 0
state.regs.fc3210 = 0x0300
state.regs.ftop = 0
# sse values
state.regs.sseround = 0
state.regs.xmm0 = 0
state.regs.xmm1 = 0
state.regs.xmm2 = 0
state.regs.xmm3 = 0
state.regs.xmm4 = 0
state.regs.xmm5 = 0
state.regs.xmm6 = 0
state.regs.xmm7 = 0
# segmentation registers
state.regs.ds = 0
state.regs.es = 0
state.regs.fs = 0
state.regs.gs = 0
state.regs.ss = 0
state.regs.cs = 0
return state
#
# Loader-related simprocedures
#
class IFuncResolver(SimProcedure):
NO_RET = True
# pylint: disable=arguments-differ,unused-argument
def run(self, proj=None, funcaddr=None, gotaddr=None, funcname=None):
resolve = proj.factory.callable(funcaddr, concrete_only=True)
try:
value = resolve()
except AngrCallableError:
l.critical("Ifunc \"%s\" failed to resolve!", funcname)
#import IPython; IPython.embed()
raise
self.state.memory.store(gotaddr, value, endness=self.state.arch.memory_endness)
self.add_successor(self.state, value, claripy.true, 'Ijk_Boring')
def __repr__(self):
return '<IFuncResolver %s>' % self.kwargs.get('funcname', None)
class LinuxLoader(SimProcedure):
NO_RET = True
# pylint: disable=unused-argument,arguments-differ,attribute-defined-outside-init
local_vars = ('initializers',)
def run(self, project=None):
self.initializers = project.loader.get_initializers()
self.run_initializer(project)
def run_initializer(self, project=None):
if len(self.initializers) == 0:
project._simos.set_entry_register_values(self.state)
self.jump(project.entry)
else:
addr = self.initializers.pop(0)
self.call(addr, (self.state.posix.argc, self.state.posix.argv, self.state.posix.environ), 'run_initializer')
class _tls_get_addr(SimProcedure):
# pylint: disable=arguments-differ
def run(self, ptr, ld=None):
module_id = self.state.se.any_int(self.state.memory.load(ptr, self.state.arch.bytes, endness=self.state.arch.memory_endness))
offset = self.state.se.any_int(self.state.memory.load(ptr+self.state.arch.bytes, self.state.arch.bytes, endness=self.state.arch.memory_endness))
return claripy.BVV(ld.tls_object.get_addr(module_id, offset), self.state.arch.bits)
class _tls_get_addr_tunder_x86(SimProcedure):
# pylint: disable=arguments-differ
def run(self, ld=None):
ptr = self.state.regs.eax
return self.inline_call(_tls_get_addr, ptr, ld=ld).ret_expr
class _dl_rtld_lock_recursive(SimProcedure):
# pylint: disable=arguments-differ, unused-argument
def run(self, lock):
# For future reference:
# ++((pthread_mutex_t *)(lock))->__data.__count;
return
class _dl_rtld_unlock_recursive(SimProcedure):
def run(self):
return
class _vsyscall(SimProcedure):
NO_RET = True
# This is pretty much entirely copied from SimProcedure.ret
def run(self):
if self.cleanup:
self.state.options.discard(o.AST_DEPS)
self.state.options.discard(o.AUTO_REFS)
ret_irsb = pyvex.IRSB(self.state.arch.ret_instruction, self.addr, self.state.arch)
ret_simirsb = SimIRSB(self.state, ret_irsb, inline=True, addr=self.addr)
if not ret_simirsb.flat_successors + ret_simirsb.unsat_successors:
ret_state = ret_simirsb.default_exit
else:
ret_state = (ret_simirsb.flat_successors + ret_simirsb.unsat_successors)[0]
if self.cleanup:
self.state.options.add(o.AST_DEPS)
self.state.options.add(o.AUTO_REFS)
self.add_successor(ret_state, ret_state.scratch.target, ret_state.scratch.guard, 'Ijk_Sys')
class _kernel_user_helper_get_tls(SimProcedure):
# pylint: disable=arguments-differ
def run(self, ld=None):
self.state.regs.r0 = ld.tls_object.user_thread_pointer
return
class CallReturn(SimProcedure):
NO_RET = True
def run(self):
l.info("A factory.call_state-created path returned!")
return
os_mapping = {
'unix': SimLinux,
'unknown': SimOS,
'windows': SimOS,
'cgc': SimCGC,
}
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,416
|
MayfeelYang/angr
|
refs/heads/master
|
/tests/test_block_cache.py
|
import angr
import logging
l = logging.getLogger("angr.tests")
import os
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
def test_block_cache():
p = angr.Project(os.path.join(test_location, "x86_64", "fauxware"), translation_cache=True)
b = p.factory.block(p.entry)
assert p.factory.block(p.entry) is b
p = angr.Project(os.path.join(test_location, "x86_64", "fauxware"), translation_cache=False)
b = p.factory.block(p.entry)
assert p.factory.block(p.entry) is not b
if __name__ == "__main__":
test_block_cache()
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,417
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/analyses/girlscout.py
|
import logging
import string
import math
import re
import os
import pickle
from datetime import datetime
from collections import defaultdict
import networkx
import progressbar
import simuvex
import cle
import pyvex
from ..errors import AngrError
from ..analysis import Analysis, register_analysis
from ..surveyors import Explorer, Slicecutor
from ..annocfg import AnnotatedCFG
l = logging.getLogger("angr.analyses.girlscout")
class GirlScout(Analysis):
"""
We find functions inside the given binary, try to decide the base address if needed, and build a control-flow
graph on top of that to see if there is an entry or not. Obviously if the binary is not loaded as a blob (not
using Blob as its backend), GirlScout will not try to determine the base address.
It's also optional to perform a full code scan of the binary to show where all codes are. By default we don't scan
the entire binary since it's time consuming.
You probably need a BoyScout to determine the possible architecture and endianess of your binary blob.
"""
def __init__(self, binary=None, start=None, end=None, pickle_intermediate_results=False, perform_full_code_scan=False):
self._binary = binary if binary is not None else self.project.loader.main_bin
self._start = start if start is not None else (self._binary.rebase_addr + self._binary.get_min_addr())
self._end = end if end is not None else (self._binary.rebase_addr + self._binary.get_max_addr())
self._pickle_intermediate_results = pickle_intermediate_results
self._perform_full_code_scan = perform_full_code_scan
l.debug("Starts at 0x%08x and ends at 0x%08x.", self._start, self._end)
# Valid memory regions
self._valid_memory_regions = sorted(
[ (self._binary.rebase_addr+start, self._binary.rebase_addr+start+len(cbacker))
for start, cbacker in self.project.loader.memory.cbackers ],
key=lambda x: x[0]
)
self._valid_memory_region_size = sum([ (end - start) for start, end in self._valid_memory_regions ])
# Size of each basic block
self._block_size = { }
self._next_addr = self._start - 1
# Starting point of functions
self.functions = None
# Calls between functions
self.call_map = networkx.DiGraph()
# A CFG - this is not what you get from project.analyses.CFG() !
self.cfg = networkx.DiGraph()
# Create the segment list
self._seg_list = SegmentList()
self._read_addr_to_run = defaultdict(list)
self._write_addr_to_run = defaultdict(list)
# All IRSBs with an indirect exit target
self._indirect_jumps = set()
self._unassured_functions = set()
self.base_address = None
# Start working!
self._reconnoiter()
@property
def call_map(self):
return self.call_map
def _get_next_addr_to_search(self, alignment=None):
# TODO: Take care of those functions that are already generated
curr_addr = self._next_addr
if self._seg_list.has_blocks:
curr_addr = self._seg_list.next_free_pos(curr_addr)
if alignment is not None:
if curr_addr % alignment > 0:
curr_addr = curr_addr - curr_addr % alignment + alignment
# Make sure curr_addr exists in binary
accepted = False
for start, end in self._valid_memory_regions:
if curr_addr >= start and curr_addr < end:
# accept
accepted = True
break
if curr_addr < start:
# accept, but we are skipping the gap
accepted = True
curr_addr = start
if not accepted:
# No memory available!
return None
self._next_addr = curr_addr
if self._end is None or curr_addr < self._end:
l.debug("Returning new recon address: 0x%08x", curr_addr)
return curr_addr
else:
l.debug("0x%08x is beyond the ending point.", curr_addr)
return None
def _get_next_code_addr(self, initial_state):
"""
Besides calling _get_next_addr, we will check if data locates at that address seems to be code or not. If not,
we'll move on to request for next valid address.
"""
next_addr = self._get_next_addr_to_search()
if next_addr is None:
return None
start_addr = next_addr
sz = ""
is_sz = True
while is_sz:
# Get data until we meet a 0
while next_addr in initial_state.memory:
try:
l.debug("Searching address %x", next_addr)
val = initial_state.mem_concrete(next_addr, 1)
if val == 0:
if len(sz) < 4:
is_sz = False
else:
reach_end = True
break
if chr(val) not in string.printable:
is_sz = False
break
sz += chr(val)
next_addr += 1
except simuvex.SimValueError:
# Not concretizable
l.debug("Address 0x%08x is not concretizable!", next_addr)
break
if len(sz) > 0 and is_sz:
l.debug("Got a string of %d chars: [%s]", len(sz), sz)
# l.debug("Occpuy %x - %x", start_addr, start_addr + len(sz) + 1)
self._seg_list.occupy(start_addr, len(sz) + 1)
sz = ""
next_addr = self._get_next_addr_to_search()
if next_addr is None:
return None
# l.debug("next addr = %x", next_addr)
start_addr = next_addr
if is_sz:
next_addr += 1
instr_alignment = initial_state.arch.instruction_alignment
if start_addr % instr_alignment > 0:
start_addr = start_addr - start_addr % instr_alignment + \
instr_alignment
l.debug('_get_next_code_addr() returns 0x%x', start_addr)
return start_addr
def _symbolic_reconnoiter(self, addr, target_addr, max_depth=10):
"""
When an IRSB has more than two exits (for example, a jumptable), we
cannot concretize their exits in concrete mode. Hence we statically
execute the function from beginning in this method, and then switch to
symbolic mode for the final IRSB to get all possible exits of that
IRSB.
"""
state = self.project.factory.blank_state(addr=addr,
mode="symbolic",
add_options={simuvex.o.CALLLESS}
)
initial_exit = self.project.factory.path(state)
explorer = Explorer(self.project,
start=initial_exit,
max_depth=max_depth,
find=(target_addr), num_find=1).run()
if len(explorer.found) > 0:
path = explorer.found[0]
last_run = path.last_run
return last_run.flat_exits()
else:
return []
def _static_memory_slice(self, run):
if isinstance(run, simuvex.SimIRSB):
for stmt in run.statements:
refs = stmt.actions
if len(refs) > 0:
real_ref = refs[-1]
if type(real_ref) == simuvex.SimActionData:
if real_ref.action == 'write':
addr = real_ref.addr
if not run.initial_state.se.symbolic(addr):
concrete_addr = run.initial_state.se.any_int(addr)
self._write_addr_to_run[addr].append(run.addr)
elif real_ref.action == 'read':
addr = real_ref.addr
if not run.initial_state.se.symbolic(addr):
concrete_addr = run.initial_state.se.any_int(addr)
self._read_addr_to_run[addr].append(run.addr)
def _scan_code(self, traced_addresses, function_exits, initial_state, starting_address):
# Saving tuples like (current_function_addr, next_exit_addr)
# Current_function_addr == -1 for exits not inside any function
remaining_exits = set()
next_addr = starting_address
# Initialize the remaining_exits set
remaining_exits.add((next_addr,
next_addr,
next_addr,
initial_state.copy()))
while len(remaining_exits):
current_function_addr, previous_addr, parent_addr, state = \
remaining_exits.pop()
if previous_addr in traced_addresses:
continue
# Add this node to the CFG first, in case this is a dangling node
self.cfg.add_node(previous_addr)
if current_function_addr != -1:
l.debug("Tracing new exit 0x%08x in function 0x%08x",
previous_addr, current_function_addr)
else:
l.debug("Tracing new exit 0x%08x", previous_addr)
traced_addresses.add(previous_addr)
self._scan_block(previous_addr, state, current_function_addr, function_exits, remaining_exits, traced_addresses)
def _scan_block(self, addr, state, current_function_addr, function_exits, remaining_exits, traced_addresses):
# Let's try to create the pyvex IRSB directly, since it's much faster
try:
irsb = self.project.factory.block(addr).vex
# Log the size of this basic block
self._block_size[addr] = irsb.size
# Occupy the block
self._seg_list.occupy(addr, irsb.size)
except (AngrTranslationError, AngrMemoryError):
return
# Get all possible successors
next, jumpkind = irsb.next, irsb.jumpkind
successors = [ (i.dst, i.jumpkind) for i in irsb.statements if type(i) is pyvex.IRStmt.Exit]
successors.append((next, jumpkind))
# Process each successor
for suc in successors:
target, jumpkind = suc
if type(target) is pyvex.IRExpr.Const:
next_addr = target.con.value
else:
next_addr = None
if jumpkind == 'Ijk_Boring' and next_addr is not None:
remaining_exits.add((current_function_addr, next_addr,
addr, None))
elif jumpkind == 'Ijk_Call' and next_addr is not None:
# Log it before we cut the tracing :)
if jumpkind == "Ijk_Call":
if current_function_addr != -1:
self.functions.add(current_function_addr)
self.functions.add(next_addr)
self.call_map.add_edge(current_function_addr, next_addr)
else:
self.functions.add(next_addr)
self.call_map.add_node(next_addr)
elif jumpkind == "Ijk_Boring" or \
jumpkind == "Ijk_Ret":
if current_function_addr != -1:
function_exits[current_function_addr].add(next_addr)
# If we have traced it before, don't trace it anymore
if next_addr in traced_addresses:
return
remaining_exits.add((next_addr, next_addr, addr, None))
l.debug("Function calls: %d", len(self.call_map.nodes()))
def _scan_block_(self, addr, state, current_function_addr, function_exits, remaining_exits, traced_addresses):
# Get a basic block
state.ip = addr
s_path = self.project.factory.path(state)
try:
s_run = s_path.next_run
except simuvex.SimIRSBError, ex:
l.debug(ex)
return
except AngrError, ex:
# "No memory at xxx"
l.debug(ex)
return
except (simuvex.SimValueError, simuvex.SimSolverModeError), ex:
# Cannot concretize something when executing the SimRun
l.debug(ex)
return
except simuvex.SimError as ex:
# Catch all simuvex errors
l.debug(ex)
return
if type(s_run) is simuvex.SimIRSB:
# Calculate its entropy to avoid jumping into uninitialized/all-zero space
bytes = s_run.irsb._state[1]['bytes']
size = s_run.irsb.size
ent = self._calc_entropy(bytes, size=size)
if ent < 1.0 and size > 40:
# Skipping basic blocks that have a very low entropy
return
# self._static_memory_slice(s_run)
# Mark that part as occupied
if isinstance(s_run, simuvex.SimIRSB):
self._seg_list.occupy(addr, s_run.irsb.size)
successors = s_run.flat_successors + s_run.unsat_successors
has_call_exit = False
tmp_exit_set = set()
for suc in successors:
if suc.scratch.jumpkind == "Ijk_Call":
has_call_exit = True
for suc in successors:
jumpkind = suc.scratch.jumpkind
if has_call_exit and jumpkind == "Ijk_Ret":
jumpkind = "Ijk_FakeRet"
if jumpkind == "Ijk_Ret":
continue
try:
# Try to concretize the target. If we can't, just move on
# to the next target
next_addr = suc.se.exactly_n_int(suc.ip, 1)[0]
except (simuvex.SimValueError, simuvex.SimSolverModeError) as ex:
# Undecidable jumps (might be a function return, or a conditional branch, etc.)
# We log it
self._indirect_jumps.add((suc.scratch.jumpkind, addr))
l.info("IRSB 0x%x has an indirect exit %s.", addr, suc.scratch.jumpkind)
continue
self.cfg.add_edge(addr, next_addr, jumpkind=jumpkind)
# Log it before we cut the tracing :)
if jumpkind == "Ijk_Call":
if current_function_addr != -1:
self.call_map.add_edge(current_function_addr, next_addr)
else:
self.call_map.add_node(next_addr)
elif jumpkind == "Ijk_Boring" or \
jumpkind == "Ijk_Ret":
if current_function_addr != -1:
function_exits[current_function_addr].add(next_addr)
# If we have traced it before, don't trace it anymore
if next_addr in traced_addresses:
continue
# If we have traced it in current loop, don't tract it either
if next_addr in tmp_exit_set:
continue
tmp_exit_set.add(next_addr)
if jumpkind == "Ijk_Call":
# This is a call. Let's record it
new_state = suc.copy()
# Unconstrain those parameters
# TODO: Support other archs as well
# if 12 + 16 in new_state.registers.mem:
# del new_state.registers.mem[12 + 16]
#if 16 + 16 in new_state.registers.mem:
# del new_state.registers.mem[16 + 16]
#if 20 + 16 in new_state.registers.mem:
# del new_state.registers.mem[20 + 16]
# 0x8000000: call 0x8000045
remaining_exits.add((next_addr, next_addr, addr, new_state))
l.debug("Function calls: %d", len(self.call_map.nodes()))
elif jumpkind == "Ijk_Boring" or \
jumpkind == "Ijk_Ret" or \
jumpkind == "Ijk_FakeRet":
new_state = suc.copy()
l.debug("New exit with jumpkind %s", jumpkind)
# FIXME: should not use current_function_addr if jumpkind is "Ijk_Ret"
remaining_exits.add((current_function_addr, next_addr,
addr, new_state))
elif jumpkind == "Ijk_NoDecode":
# That's something VEX cannot decode!
# We assume we ran into a deadend
pass
elif jumpkind.startswith("Ijk_Sig"):
# Should not go into that exit
pass
elif jumpkind == "Ijk_TInval":
# ppc32: isync
# FIXME: It is the same as Ijk_Boring! Process it later
pass
elif jumpkind == 'Ijk_Sys_syscall':
# Let's not jump into syscalls
pass
elif jumpkind == 'Ijk_InvalICache':
pass
elif jumpkind == 'Ijk_MapFail':
pass
elif jumpkind == 'Ijk_EmWarn':
pass
else:
raise Exception("NotImplemented")
def _scan_function_prologues(self, traced_address, function_exits, initial_state):
"""
Scan the entire program space for prologues, and start code scanning at those positions
:param traced_address:
:param function_exits:
:param initial_state:
:param next_addr:
:returns:
"""
# Precompile all regexes
regexes = set()
for ins_regex in self.project.arch.function_prologs:
r = re.compile(ins_regex)
regexes.add(r)
# TODO: Make sure self._start is aligned
# Construct the binary blob first
# TODO: We shouldn't directly access the _memory of main_bin. An interface
# to that would be awesome.
strides = self.project.loader.main_bin.memory.stride_repr
for start_, end_, bytes in strides:
for regex in regexes:
# Match them!
for mo in regex.finditer(bytes):
position = mo.start() + start_
if position % self.project.arch.instruction_alignment == 0:
if position not in traced_address:
percentage = self._seg_list.occupied_size * 100.0 / (self._valid_memory_region_size)
l.info("Scanning %xh, progress %0.04f%%", position, percentage)
self._unassured_functions.add(position)
self._scan_code(traced_address, function_exits, initial_state, position)
else:
l.info("Skipping %xh", position)
def _process_indirect_jumps(self):
"""
Execute each basic block with an indeterminiable exit target
:returns:
"""
function_starts = set()
print "We have %d indirect jumps" % len(self._indirect_jumps)
for jumpkind, irsb_addr in self._indirect_jumps:
# First execute the current IRSB in concrete mode
if len(function_starts) > 20:
break
if jumpkind == "Ijk_Call":
state = self.project.factory.blank_state(addr=irsb_addr, mode="concrete",
add_options={simuvex.o.SYMBOLIC_INITIAL_VALUES}
)
path = self.project.factory.path(state)
print hex(irsb_addr)
try:
r = (path.next_run.successors + path.next_run.unsat_successors)[0]
ip = r.se.exactly_n_int(r.ip, 1)[0]
function_starts.add(ip)
continue
except simuvex.SimSolverModeError as ex:
pass
# Not resolved
# Do a backward slicing from the call
irsb = self.project.factory.block(irsb_addr).vex
stmts = irsb.statements
# Start slicing from the "next"
b = Blade(self.cfg, irsb.addr, -1, project=self.project)
# Debugging output
for addr, stmt_idx in sorted(list(b.slice.nodes())):
irsb = self.project.factory.block(addr).vex
stmts = irsb.statements
print "%x: %d | " % (addr, stmt_idx),
print "%s" % stmts[stmt_idx],
print "%d" % b.slice.in_degree((addr, stmt_idx))
print ""
# Get all sources
sources = [n for n in b.slice.nodes() if b.slice.in_degree(n) == 0]
# Create the annotated CFG
annotatedcfg = AnnotatedCFG(self.project, None, target_irsb_addr=irsb_addr, detect_loops=False)
annotatedcfg.from_digraph(b.slice)
for src_irsb, src_stmt_idx in sources:
# Use slicecutor to execute each one, and get the address
# We simply give up if any exception occurs on the way
start_state = self.project.factory.blank_state(addr=src_irsb,
add_options=
{simuvex.o.DO_RET_EMULATION,
simuvex.o.TRUE_RET_EMULATION_GUARD}
)
start_path = self.project.factory.path(start_state)
# Create the slicecutor
slicecutor = Slicecutor(self.project, annotatedcfg, start=start_path, targets=(irsb_addr,))
# Run it!
try:
slicecutor.run()
except KeyError as ex:
# This is because the program slice is incomplete.
# Blade will support more IRExprs and IRStmts
l.debug("KeyError occurred due to incomplete program slice.", exc_info=ex)
continue
# Get the jumping targets
for r in slicecutor.reached_targets:
if r.next_run.successors:
target_ip = r.next_run.successors[0].ip
se = r.next_run.successors[0].se
if not se.symbolic(target_ip):
concrete_ip = se.exactly_n_int(target_ip, 1)[0]
function_starts.add(concrete_ip)
l.info("Found a function address %x", concrete_ip)
return function_starts
def _solve_forbase_address(self, function_starts, functions):
"""
Voting for the most possible base address.
:param function_starts:
:param functions:
:returns:
"""
pseudo_base_addr = self.project.loader.main_bin.get_min_addr()
base_addr_ctr = { }
for s in function_starts:
for f in functions:
base_addr = s - f + pseudo_base_addr
ctr = 1
for k in function_starts:
if k - base_addr + pseudo_base_addr in functions:
ctr += 1
if ctr > 5:
base_addr_ctr[base_addr] = ctr
if len(base_addr_ctr):
base_addr, hits = sorted([(k, v) for k, v in base_addr_ctr.iteritems()], key=lambda x: x[1], reverse=True)[0]
return base_addr
else:
return None
def _reconnoiter(self):
if type(self._binary) is cle.blob.Blob:
self._determinebase_address()
if self._perform_full_code_scan:
self._full_code_scan()
def _determinebase_address(self):
"""
The basic idea is simple: start from a specific point, try to construct
functions as much as we can, and maintain a function distribution graph
and a call graph simultaneously. Repeat searching until we come to the
end that there is no new function to be found.
A function should start with:
# some addresses that a call exit leads to, or
# certain instructions. They are recoreded in SimArch.
For a better performance, instead of blindly scanning the entire process
space, we first try to search for instruction patterns that a function
may start with, and start scanning at those positions. Then we try to
decode anything that is left.
"""
traced_address = set()
self.functions = set()
self.call_map = networkx.DiGraph()
self.cfg = networkx.DiGraph()
initial_state = self.project.factory.blank_state(mode="fastpath")
initial_options = initial_state.options - { simuvex.o.TRACK_CONSTRAINTS } - simuvex.o.refs
initial_options |= { simuvex.o.SUPER_FASTPATH }
# initial_options.remove(simuvex.o.COW_STATES)
initial_state.options = initial_options
# Sadly, not all calls to functions are explicitly made by call
# instruction - they could be a jmp or b, or something else. So we
# should record all exits from a single function, and then add
# necessary calling edges in our call map during the post-processing
# phase.
function_exits = defaultdict(set)
dump_file_prefix = self.project.filename
if self._pickle_intermediate_results and \
os.path.exists(dump_file_prefix + "_indirect_jumps.angr"):
l.debug("Loading existing intermediate results.")
self._indirect_jumps = pickle.load(open(dump_file_prefix + "_indirect_jumps.angr", "rb"))
self.cfg = pickle.load(open(dump_file_prefix + "_coercecfg.angr", "rb"))
self._unassured_functions = pickle.load(open(dump_file_prefix + "_unassured_functions.angr", "rb"))
else:
# Performance boost :-)
# Scan for existing function prologues
self._scan_function_prologues(traced_address, function_exits, initial_state)
if self._pickle_intermediate_results:
l.debug("Dumping intermediate results.")
pickle.dump(self._indirect_jumps, open(dump_file_prefix + "_indirect_jumps.angr", "wb"), -1)
pickle.dump(self.cfg, open(dump_file_prefix + "_coercecfg.angr", "wb"), -1)
pickle.dump(self._unassured_functions, open(dump_file_prefix + "_unassured_functions.angr", "wb"), -1)
if len(self._indirect_jumps):
# We got some indirect jumps!
# Gotta execute each basic block and see where it wants to jump to
function_starts = self._process_indirect_jumps()
self.base_address = self._solve_forbase_address(function_starts, self._unassured_functions)
l.info("Base address should be 0x%x", self.base_address)
else:
l.debug("No indirect jumps are found. We switch to the slowpath mode.")
# TODO: Slowpath mode...
while True:
next_addr = self._get_next_code_addr(initial_state)
percentage = self._seg_list.occupied_size * 100.0 / (self._valid_memory_region_size)
l.info("Analyzing %xh, progress %0.04f%%", next_addr, percentage)
if next_addr is None:
break
self.call_map.add_node(next_addr)
self._scan_code(traced_address, function_exits, initial_state, next_addr)
# Post-processing: Map those calls that are not made by call/blr
# instructions to their targets in our map
for src, s in function_exits.items():
if src in self.call_map:
for target in s:
if target in self.call_map:
self.call_map.add_edge(src, target)
nodes = sorted(self.call_map.nodes())
for i in range(len(nodes) - 1):
if nodes[i] >= nodes[i + 1] - 4:
for dst in self.call_map.successors(nodes[i + 1]):
self.call_map.add_edge(nodes[i], dst)
for src in self.call_map.predecessors(nodes[i + 1]):
self.call_map.add_edge(src, nodes[i])
self.call_map.remove_node(nodes[i + 1])
l.debug("Construction finished.")
def _full_code_scan(self):
"""
Perform a full code scan on the target binary.
"""
# We gotta time this function
start_time = datetime.now()
traced_address = set()
self.functions = set()
self.call_map = networkx.DiGraph()
self.cfg = networkx.DiGraph()
initial_state = self.project.factory.blank_state(mode="fastpath")
initial_options = initial_state.options - {simuvex.o.TRACK_CONSTRAINTS} - simuvex.o.refs
initial_options |= {simuvex.o.SUPER_FASTPATH}
# initial_options.remove(simuvex.o.COW_STATES)
initial_state.options = initial_options
# Sadly, not all calls to functions are explicitly made by call
# instruction - they could be a jmp or b, or something else. So we
# should record all exits from a single function, and then add
# necessary calling edges in our call map during the post-processing
# phase.
function_exits = defaultdict(set)
widgets = [progressbar.Percentage(),
' ',
progressbar.Bar(marker=progressbar.RotatingMarker()),
' ',
progressbar.Timer(),
' ',
progressbar.ETA()
]
pb = progressbar.ProgressBar(widgets=widgets, maxval=10000 * 100).start()
while True:
next_addr = self._get_next_code_addr(initial_state)
percentage = self._seg_list.occupied_size * 100.0 / (self._valid_memory_region_size)
if percentage > 100.0: percentage = 100.0
pb.update(percentage * 10000)
if next_addr is not None:
l.info("Analyzing %xh, progress %0.04f%%", next_addr, percentage)
else:
l.info('No more addr to analyze. Progress %0.04f%%', percentage)
break
self.call_map.add_node(next_addr)
self._scan_code(traced_address, function_exits, initial_state, next_addr)
pb.finish()
end_time = datetime.now()
l.info("A full code scan takes %d seconds.", (end_time - start_time).seconds)
def _calc_entropy(self, data, size=None):
if not data:
return 0
entropy = 0
if size is None: size = len(data)
data = str(pyvex.ffi.buffer(data, size))
for x in xrange(0, 256):
p_x = float(data.count(chr(x)))/size
if p_x > 0:
entropy += - p_x * math.log(p_x, 2)
return entropy
def _dbg_output(self):
ret = ""
ret += "Functions:\n"
function_list = list(self.functions)
# Sort it
function_list = sorted(function_list)
for f in function_list:
ret += "0x%08x" % f
return ret
def genenare_callmap_sif(self, filepath):
"""
Generate a sif file from the call map
"""
graph = self.call_map
if graph is None:
raise AngrGirlScoutError('Please generate the call graph first.')
f = open(filepath, "wb")
for src, dst in graph.edges():
f.write("0x%x\tDirectEdge\t0x%x\n" % (src, dst))
f.close()
def generate_code_cover(self):
"""
Generate a list of all recovered basic blocks.
"""
lst = [ ]
for irsb_addr in self.cfg.nodes():
if irsb_addr not in self._block_size:
continue
irsb_size = self._block_size[irsb_addr]
lst.append((irsb_addr, irsb_size))
lst = sorted(lst, key=lambda x: x[0])
return lst
register_analysis(GirlScout, 'GirlScout')
from ..blade import Blade
from ..errors import AngrGirlScoutError, AngrTranslationError, AngrMemoryError
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,418
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/errors.py
|
class AngrError(Exception):
pass
class AngrValueError(AngrError, ValueError):
pass
class AngrMemoryError(AngrError):
pass
class AngrTranslationError(AngrError):
pass
class AngrLifterError(AngrError):
pass
class AngrExitError(AngrError):
pass
class AngrPathError(AngrError):
pass
class AngrPathGroupError(AngrError):
pass
class AngrInvalidArgumentError(AngrError):
pass
class AngrSurveyorError(AngrError):
pass
class AngrAnalysisError(AngrError):
pass
class PathUnreachableError(AngrError):
pass
class AngrBladeError(AngrError):
pass
class AngrBladeSimProcError(AngrBladeError):
pass
class AngrAnnotatedCFGError(AngrError):
pass
class AngrBackwardSlicingError(AngrError):
pass
class AngrGirlScoutError(AngrError):
pass
class AngrCallableError(AngrSurveyorError):
pass
class AngrCallableMultistateError(AngrCallableError):
pass
class AngrSyscallError(AngrError):
pass
class AngrUnsupportedSyscallError(AngrSyscallError):
pass
class AngrSimOSError(AngrError):
pass
# Congruency check failure
class AngrIncongruencyError(AngrAnalysisError):
pass
#
# ForwardAnalysis errors
#
class AngrForwardAnalysisError(AngrError):
pass
class AngrSkipEntryNotice(AngrForwardAnalysisError):
pass
class AngrJobMergingFailureNotice(AngrForwardAnalysisError):
pass
#
# CFG errors
#
class AngrCFGError(AngrError):
pass
#
# VFG Errors and notices
#
class AngrVFGError(AngrError):
pass
class AngrVFGRestartAnalysisNotice(AngrVFGError):
pass
#
# Data graph errors
#
class AngrDataGraphError(AngrAnalysisError):
# TODO: deprecated
pass
class AngrDDGError(AngrAnalysisError):
pass
#
# Exploration techniques
#
class AngrExplorationTechniqueError(AngrError):
def __str__(self):
return "<OtiegnqwvkError %s>" % self.message
class AngrExplorerError(AngrExplorationTechniqueError):
def __str(self):
return "<OtiegnqwvkExplorerError %s>" % self.message
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,419
|
MayfeelYang/angr
|
refs/heads/master
|
/tests/test_signed_div.py
|
import nose
import angr
import subprocess
import logging
l = logging.getLogger('angr.tests.test_signed_div')
import os
test_location = str(os.path.dirname(os.path.realpath(__file__)))
def run_signed_div():
test_bin = os.path.join(test_location, "../../binaries-private/tests/i386/test_signed_div")
b = angr.Project(test_bin)
pg = b.factory.path_group()
pg.explore()
out_angr = pg.deadended[0].state.posix.dumps(1)
proc = subprocess.Popen(test_bin, stdout=subprocess.PIPE)
stdout_real, _ = proc.communicate()
nose.tools.assert_equal(out_angr, stdout_real)
def test_signed_div():
yield run_signed_div
if __name__ == "__main__":
run_signed_div()
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,420
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/project.py
|
#!/usr/bin/env python
# pylint: disable=W0703
import os
import types
import logging
import weakref
import cle
import simuvex
import archinfo
l = logging.getLogger("angr.project")
projects = weakref.WeakValueDictionary()
def fake_project_unpickler(name):
if name not in projects:
raise AngrError("Project %s has not been opened." % name)
return projects[name]
fake_project_unpickler.__safe_for_unpickling__ = True
def deprecated(f):
def deprecated_wrapper(*args, **kwargs):
print "ERROR: FUNCTION %s IS DEPRECATED. PLEASE UPDATE YOUR CODE." % f
return f(*args, **kwargs)
return deprecated_wrapper
class Project(object):
"""
This is the main class of the angr module. It is meant to contain a set of binaries and the relationships between
them, and perform analyses on them.
:ivar analyses: The available analyses.
:type analyses: angr.analysis.Analyses
:ivar entry: The program entrypoint.
:ivar factory: Provides access to important analysis elements such as path groups and symbolic execution results.
:type factory: AngrObjectFactory
:ivar filename: The filename of the executable.
:ivar loader: The program loader.
:type loader: cle.Loader
:ivar surveyor: The available surveyors.
:type surveyor: angr.surveyor.Surveyors
"""
def __init__(self, thing,
default_analysis_mode=None,
ignore_functions=None,
use_sim_procedures=True,
exclude_sim_procedures_func=None,
exclude_sim_procedures_list=(),
arch=None, simos=None,
load_options=None,
translation_cache=True,
support_selfmodifying_code=False):
"""
:param thing: The path to the main executable object to analyze, or a CLE Loader object.
The following parameters are optional.
:param default_analysis_mode: The mode of analysis to use by default. Defaults to 'symbolic'.
:param ignore_functions: A list of function names that, when imported from shared libraries, should
never be stepped into in analysis (calls will return an unconstrained value).
:param use_sim_procedure: Whether to replace resolved dependencies for which simprocedures are
available with said simprocedures.
:param exclude_sim_procedures_func: A function that, when passed a function name, returns whether or not to wrap
it with a simprocedure.
:param exclude_sim_procedures_list: A list of functions to *not* wrap with simprocedures.
:param arch: The target architecture (auto-detected otherwise).
:param simos: a SimOS class to use for this project.
:param load_options: a dict of keyword arguments to the CLE loader. See CLE's docs.
:param translation_cache: If True, cache translated basic blocks rather than re-translating them.
:param support_selfmodifying_code: Whether we support self-modifying code. When enabled, Project.sim_block()
will try to read code from the current state instead of the original memory
regions.
:type support_selfmodifying_code: bool
A sample `load_options` value could be:
::
{ 'auto_load_libs': False,
'skip_libs': 'ld.so.2',
'lib_opts': {
'libc.so.6': {
'custom_base_addr': 0x55555400
}
}
}
"""
# Step 1: Load the binary
if load_options is None: load_options = {}
if isinstance(thing, cle.Loader):
self.loader = thing
self.filename = self.loader._main_binary_path
elif hasattr(thing, 'read') and hasattr(thing, 'seek'):
l.info("Loading binary from stream")
self.filename = None
self.loader = cle.Loader(thing, **load_options)
elif not isinstance(thing, (unicode, str)) or not os.path.exists(thing) or not os.path.isfile(thing):
raise Exception("Not a valid binary file: %s" % repr(thing))
else:
# use angr's loader, provided by cle
l.info("Loading binary %s", thing)
self.filename = thing
self.loader = cle.Loader(self.filename, **load_options)
# Step 2: determine its CPU architecture, ideally falling back to CLE's guess
if isinstance(arch, str):
self.arch = archinfo.arch_from_id(arch) # may raise ArchError, let the user see this
elif isinstance(arch, archinfo.Arch):
self.arch = arch
elif arch is None:
self.arch = self.loader.main_bin.arch
else:
raise ValueError("Invalid arch specification.")
# Step 3: Set some defaults and set the public and private properties
if not default_analysis_mode:
default_analysis_mode = 'symbolic'
if not ignore_functions:
ignore_functions = []
if isinstance(exclude_sim_procedures_func, types.LambdaType):
l.warning("Passing a lambda type as the exclude_sim_procedures_func argument to Project causes the resulting object to be un-serializable.")
self._sim_procedures = {}
self._default_analysis_mode = default_analysis_mode
self._exclude_sim_procedures_func = exclude_sim_procedures_func
self._exclude_sim_procedures_list = exclude_sim_procedures_list
self._should_use_sim_procedures = use_sim_procedures
self._support_selfmodifying_code = support_selfmodifying_code
self._ignore_functions = ignore_functions
self._extern_obj = AngrExternObject(self.arch)
self._extern_obj.provides = 'angr externs'
self.loader.add_object(self._extern_obj)
self._syscall_obj = AngrExternObject(self.arch)
self._syscall_obj.provides = 'angr syscalls'
self.loader.add_object(self._syscall_obj)
self._cfg = None
self._vfg = None
self._cdg = None
self.entry = self.loader.main_bin.entry
self.factory = AngrObjectFactory(self, translation_cache=translation_cache)
self.analyses = Analyses(self)
self.surveyors = Surveyors(self)
self.kb = KnowledgeBase(self, self.loader.main_bin)
if self.filename is not None:
projects[self.filename] = self
# Step 5: determine the host OS and perform additional initialization
# in the SimOS constructor
if isinstance(simos, type) and issubclass(simos, SimOS):
self._simos = simos(self) #pylint:disable=invalid-name
elif simos is None:
self._simos = os_mapping[self.loader.main_bin.os](self)
else:
raise ValueError("Invalid OS specification or non-matching architecture.")
# Step 4: Register simprocedures as appropriate for library functions
self._use_sim_procedures()
self._simos.configure_project()
def _use_sim_procedures(self):
"""
This is all the automatic simprocedure related initialization work
It's too big to just get pasted into the initializer.
"""
# Step 1: Get the appropriate libraries of SimProcedures from simuvex
libs = []
for lib_name in self.loader.requested_objects:
if isinstance(self.loader.main_bin, cle.backends.pe.PE):
# File names are case-insensitive on Windows. Make them all lowercase
lib_name = lib_name.lower()
# Hack that should go somewhere else:
if lib_name in [ 'libc.so.0', 'libc.so' ]:
lib_name = 'libc.so.6'
if lib_name == 'ld-uClibc.so.0':
lib_name = 'ld-uClibc.so.6'
if lib_name not in simuvex.procedures.SimProcedures:
l.debug("There are no simprocedures for library %s :(", lib_name)
else:
libs.append(lib_name)
# Step 2: Categorize every "import" symbol in each object.
# If it's IGNORED, mark it for stubbing
# If it's blacklisted, don't process it
# If it matches a simprocedure we have, replace it
already_resolved = set()
for obj in self.loader.all_objects:
unresolved = []
for reloc in obj.imports.itervalues():
func = reloc.symbol
if func.name in already_resolved:
continue
if not func.is_function:
continue
elif func.name in self._ignore_functions:
unresolved.append(func)
continue
elif self._should_exclude_sim_procedure(func.name):
continue
elif self._should_use_sim_procedures:
for lib in libs:
simfuncs = simuvex.procedures.SimProcedures[lib]
if func.name in simfuncs:
l.info("Providing %s from %s with SimProcedure", func.name, lib)
self.hook_symbol(func.name, simfuncs[func.name])
already_resolved.add(func.name)
break
else: # we could not find a simprocedure for this function
if not func.resolved: # the loader couldn't find one either
unresolved.append(func)
# in the case that simprocedures are off and an object in the PLT goes
# unresolved, we still want to replace it with a retunconstrained.
elif not func.resolved and func.name in obj.jmprel:
unresolved.append(func)
# Step 3: Stub out unresolved symbols
# This is in the form of a SimProcedure that either doesn't return
# or returns an unconstrained value
for func in unresolved:
# Don't touch weakly bound symbols, they are allowed to go unresolved
if func.is_weak:
continue
l.info("[U] %s", func.name)
procedure = simuvex.SimProcedures['stubs']['NoReturnUnconstrained']
if func.name not in procedure.use_cases:
procedure = simuvex.SimProcedures['stubs']['ReturnUnconstrained']
self.hook_symbol(func.name, procedure, {'resolves': func.name})
already_resolved.add(func.name)
def _should_exclude_sim_procedure(self, f):
"""
Has symbol name `f` been marked for exclusion by any of the user
parameters?
"""
return (f in self._exclude_sim_procedures_list) or \
( self._exclude_sim_procedures_func is not None and \
self._exclude_sim_procedures_func(f)
)
#
# Public methods
# They're all related to hooking!
#
def hook(self, addr, func, length=0, kwargs=None):
"""
Hook a section of code with a custom function.
If `func` is a function, it takes a :class:`SimState` and the given `kwargs`. It can return None, in which case
it will generate a single exit to the instruction at ``addr+length``, or it can return an array of successor
states.
If func is a :class:`SimProcedure`, it will be run instead of a :class:`SimBlock` at that address.
If `length` is zero the block at the hooked address will be executed immediately after the hook function.
:param addr: The address to hook.
:param func: The function that will perform an action when execution reaches the hooked address.
:param length: How many bytes you'd like to skip over with your hook. Can be zero.
:param kwargs: Any additional keyword arguments will be passed to your function or your
:class:`SimProcedure`'s run function.
"""
l.debug('hooking %#x with %s', addr, func)
if kwargs is None: kwargs = {}
if self.is_hooked(addr):
l.warning("Address is already hooked [hook(%#x, %s, %s()]", addr, func, kwargs.get('funcname', func.__name__))
return
if isinstance(func, type):
proc = func
elif hasattr(func, '__call__'):
proc = simuvex.procedures.stubs.UserHook.UserHook
kwargs = {
'user_func': func,
'user_kwargs': kwargs,
'default_return_addr': addr+length,
'length': length,
}
else:
raise AngrError("%s is not a valid object to execute in a hook", func)
self._sim_procedures[addr] = (proc, kwargs)
def is_hooked(self, addr):
"""
Returns True if `addr` is hooked.
:param addr: An address.
:returns: True if addr is hooked, False otherwise.
"""
return addr in self._sim_procedures
def is_symbol_hooked(self, symbol_name):
"""
Check if a symbol is already hooked.
:param str symbol_name: Name of the symbol.
:return: True if the symbol can be resolved and is hooked, False otherwise.
:rtype: bool
"""
ident = self._symbol_name_to_ident(symbol_name)
# TODO: this method does not follow the SimOS.prepare_function_symbol() path. We should fix it later.
if not self._extern_obj.contains_identifier(ident):
return False
return True
def hooked_symbol_addr(self, symbol_name):
"""
Check if a symbol is hooked or not, and if it is hooked, return the address of the symbol.
:param str symbol_name: Name of the symbol.
:return: Address of the symbol if it is hooked, None otherwise.
:rtype: int or None
"""
if not self.is_symbol_hooked(symbol_name):
return None
ident = self._symbol_name_to_ident(symbol_name)
return self._extern_obj.get_pseudo_addr_for_symbol(ident)
def unhook(self, addr):
"""
Remove a hook.
:param addr: The address of the hook.
"""
if not self.is_hooked(addr):
l.warning("Address %#x not hooked", addr)
return
del self._sim_procedures[addr]
def hooked_by(self, addr):
"""
Returns the current hook for `addr`.
:param addr: An address.
:returns: None if the address is not hooked.
"""
if not self.is_hooked(addr):
l.warning("Address %#x is not hooked", addr)
return None
return self._sim_procedures[addr][0]
def hook_symbol(self, symbol_name, obj, kwargs=None):
"""
Resolve a dependency in a binary. Uses the "externs object" (project._extern_obj) to provide addresses for
hook functions.
:param symbol_name: The name of the dependency to resolve.
:param obj: The thing with which to satisfy the dependency. May be a SimProcedure class or a python
function (as an appropriate argument to hook()), or a python integer/long.
:param kwargs: Any additional keyword arguments will be passed to the SimProcedure's run() method.
:returns: The pseudo address of this new symbol.
:rtype: int
"""
if kwargs is None: kwargs = {}
ident = self._symbol_name_to_ident(symbol_name, kwargs)
if not isinstance(obj, (int, long)):
pseudo_addr = self._simos.prepare_function_symbol(ident)
pseudo_vaddr = pseudo_addr - self._extern_obj.rebase_addr
if self.is_hooked(pseudo_addr):
l.warning("Re-hooking symbol " + symbol_name)
self.unhook(pseudo_addr)
self.hook(pseudo_addr, obj, kwargs=kwargs)
else:
# This is pretty intensely sketchy
pseudo_addr = obj
pseudo_vaddr = obj - self._extern_obj.rebase_addr
self.loader.provide_symbol(self._extern_obj, symbol_name, pseudo_vaddr)
return pseudo_addr
#
# Private methods related to hooking
#
@staticmethod
def _symbol_name_to_ident(symbol_name, kwargs=None):
"""
Convert a symbol name to an identifier that are used by hooking.
:param str symbol_name: Name of the symbol.
:param dict kwargs: Any additional keyword arguments.
:return: An identifier.
:rtype: str
"""
ident = 'symbol hook: ' + symbol_name
if kwargs and 'resolves' in kwargs:
ident += '.' + kwargs['resolves']
return ident
#
# Pickling
#
def __getstate__(self):
try:
factory, analyses, surveyors = self.factory, self.analyses, self.surveyors
self.factory, self.analyses, self.surveyors = None, None, None
return dict(self.__dict__)
finally:
self.factory, self.analyses, self.surveyors = factory, analyses, surveyors
def __setstate__(self, s):
self.__dict__.update(s)
self.factory = AngrObjectFactory(self)
self.analyses = Analyses(self)
self.surveyors = Surveyors(self)
from .errors import AngrError
from .factory import AngrObjectFactory
from .simos import SimOS, os_mapping
from .extern_obj import AngrExternObject
from .analysis import Analyses
from .surveyor import Surveyors
from .knowledge_base import KnowledgeBase
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,421
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/knowledge_base.py
|
"""Representing the artifacts of a project."""
from .knowledge.data import Data
from .knowledge.function_manager import FunctionManager
class KnowledgeBase(object):
"""Represents a "model" of knowledge about an artifact.
Contains things like a CFG, data references, etc.
"""
def __init__(self, project, obj):
self._project = project
self.obj = obj
self.data = Data(self)
self.functions = FunctionManager(self)
# a set of unresolved and a set of resolved indirect jumps
self._unresolved_indirect_jumps = set()
self._resolved_indirect_jumps = set()
@property
def callgraph(self):
return self.functions.callgraph
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,422
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/surveyors/executor.py
|
import logging
from ..surveyor import Surveyor
l = logging.getLogger("angr.surveyors.executor")
class Executor(Surveyor):
"""
This class handles pure concrete execution related issues.
No state splitting is ever allowed.
"""
def __init__(self, project, start, final_addr=None, \
pickle_paths=None, max_run=50000):
Surveyor.__init__(self, project, start=start, pickle_paths=pickle_paths)
self._project = project
self._final_addr = final_addr
self._max_run = max_run
self._done = False
self._error_occured = False
self._run_counter = 0
self.found = []
@property
def done(self):
if self.error_occured:
return True
if len(self.active) > 1:
raise Exception("We have more than one path in concrete mode." + \
" Something is wrong.")
elif len(self.active) == 0:
return True
else:
path = self.active[0]
if path.state is not None and \
path.state.se.is_true(path.state.ip == self._final_addr):
self.found.append(self.active[0])
self.active = []
return True
return False
@property
def error_occured(self):
if len(self.active) == 0:
# The path ends before get to the target
return True
elif self._run_counter > self._max_run:
return True
return False
@property
def last_state(self):
if self.done or self.error_occured:
return None
return self.active[0].state
def tick(self):
self._run_counter += 1
Surveyor.tick(self)
if len(self.active) > 0:
l.debug("Ran %d run, %s is active...", self._run_counter, self.active[0].previous_run)
else:
l.debug("Ran %d run, no more actives...", self._run_counter)
def __repr__(self):
return "%d active, %d spilled, %d found, %d deadended, %d errored, %d unconstrained" % (
len(self.active), len(self.spilled), len(self.found), len(self.deadended), len(self.errored), len(self.unconstrained))
from . import all_surveyors
all_surveyors['Executor'] = Executor
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,423
|
MayfeelYang/angr
|
refs/heads/master
|
/tests/test_self_modifying_code.py
|
import angr
from simuvex import o
import claripy
import nose
import os
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
def test_self_modifying_code():
p = angr.Project(os.path.join(test_location, 'i386/stuff'))
pg = p.factory.path_group(p.factory.entry_state(add_options={o.STRICT_PAGE_ACCESS}))
pg.step(until=lambda lpg: len(lpg.active) != 1)
retval = pg.one_deadended.state.regs.ebx
nose.tools.assert_true(claripy.is_true(retval == 65))
pgu = p.factory.path_group(p.factory.entry_state(add_options={o.STRICT_PAGE_ACCESS} | o.unicorn))
pgu.step(until=lambda lpg: len(lpg.active) != 1)
retval = pgu.one_deadended.state.regs.ebx
nose.tools.assert_true(claripy.is_true(retval == 65))
nose.tools.assert_true(pg.one_deadended.addr_trace.hardcopy == pgu.one_deadended.addr_trace.hardcopy)
if __name__ == '__main__':
test_self_modifying_code()
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,424
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/helpers.py
|
#!/usr/bin/env python
import logging
l = logging.getLogger("angr.helpers")
def once(f):
name = f.__name__
def func(self, *args, **kwargs):
if len(args) + len(kwargs) == 0:
if hasattr(self, "_" + name):
return getattr(self, "_" + name)
a = f(self, *args, **kwargs)
setattr(self, "_" + name, a)
return a
else:
return f(self, *args, **kwargs)
func.__name__ = f.__name__
return func
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,425
|
MayfeelYang/angr
|
refs/heads/master
|
/tests/test_hook.py
|
import nose
import angr
import os
location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
def test_mips():
MAIN_END = 0x4007D8
INNER_LOOP = 0x40069C
OUTER_LOOP = 0x40076C
p = angr.Project(location + '/mips/test_loops')
output = []
def hook1(_):
output.append(1)
def hook2(state):
output.append(2)
num = state.se.any_int(state.regs.a1)
string = '%d ' % num
state.posix.files[1].write(state.se.BVV(string), state.se.BVV(len(string), 32))
p.hook(INNER_LOOP, hook1)
p.hook(OUTER_LOOP, hook2, length=0x14)
s = p.surveyors.Explorer(start=p.factory.path(), find=[MAIN_END])
s.run()
nose.tools.assert_equal(len(s.found), 1)
nose.tools.assert_equal(s.found[0].state.posix.dumps(1), ''.join('%d ' % x for x in xrange(100)) + '\n')
nose.tools.assert_equal(output, [1]*100 + [2]*100)
# print 'Executed %d blocks' % len(s._f.trace)
if __name__ == '__main__':
test_mips()
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,426
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/surveyors/slicecutor.py
|
#!/usr/bin/env python
import logging
l = logging.getLogger("angr.surveyors.slicecutor")
from ..surveyor import Surveyor
from ..errors import AngrExitError
from collections import defaultdict
# pylint: disable=W0212,
#
# HappyGraph is just here for testing. Please ignore it!
#
class HappyGraph(object):
def __init__(self, path = None, paths=None, strict=False):
if not strict:
self.jumps = defaultdict(lambda: False)
else:
self.jumps = { }
if paths is None:
paths = [ ]
if path is not None:
paths.append(path)
for p in paths:
for i in range(len(p.addr_trace) - 1):
self.jumps[(p.addr_trace[i], p.addr_trace[i+1])] = True
self.jumps[(p.addr_trace[-1], p.addr)] = True
self._merge_points = [ ]
def filter_path(self, path): # pylint: disable=W0613,R0201,
return True
def should_take_exit(self, src, dst): # pylint: disable=W0613,R0201,
return self.jumps[(src, dst)]
def get_whitelisted_statements(self, addr): # pylint: disable=W0613,R0201,
return None
def get_last_statement_index(self, addr): # pylint: disable=W0613,R0201,
return None
def merge_points(self, path): # pylint: disable=W0613,R0201,
return self._merge_points
def path_priority(self, path): # pylint: disable=W0613,R0201,
return 1
class Slicecutor(Surveyor):
"""The Slicecutor is a surveyor that executes provided code slices."""
def __init__(self, project, annotated_cfg, start=None, targets=None, max_concurrency=None, max_active=None,
max_loop_iterations=None, pickle_paths=None, merge_countdown=10):
Surveyor.__init__(self, project, start=start, max_concurrency=max_concurrency, max_active=max_active, pickle_paths=pickle_paths)
# the loop limiter
self._max_loop_iterations = max_loop_iterations if max_loop_iterations else None
# the project we're slicing up!
self._project = project
# the annotated cfg to determine what to execute
self._annotated_cfg = annotated_cfg
# these are paths that are taking exits that the annotated CFG does not
# know about
self.mysteries = [ ]
# these are paths that we cut due to the slicing
self.cut = [ ]
# those that have reached one of our targets
self.reached_targets = []
if targets is not None:
self._targets = targets
else:
self._targets = []
# mergesanity!
self._merge_candidates = defaultdict(list)
self._merge_countdowns = { }
self.merge_countdown = merge_countdown
def filter_path(self, path):
l.debug("Checking path %s for filtering...", path)
if not self._annotated_cfg.filter_path(path):
l.debug("... %s is cut by AnnoCFG explicitly.", path)
self.cut.append(self.suspend_path(path))
return False
l.debug("... checking loop iteration limit")
if self._max_loop_iterations is not None and path.detect_loops() > self._max_loop_iterations:
l.debug("... limit reached")
return False
l.debug("... checking if %s should wait for a merge.", path)
if path.addr in path._upcoming_merge_points:
l.debug("... it should!")
if path.addr not in self._merge_candidates:
self._merge_candidates[path.addr] = [ ]
self._merge_candidates[path.addr].append(path)
self._merge_countdowns[path.addr] = self.merge_countdown
return False
return True
def tick_path(self, path):
path._upcoming_merge_points = self._annotated_cfg.merge_points(path)
path_successors = Surveyor.tick_path(self, path)
new_paths = [ ]
mystery = False
cut = False
# No new paths if the current path is already the target
if not path.errored and path.addr in self._targets:
self.reached_targets.append(self.suspend_path(path))
return []
l.debug("%s ticking path %s, last run is %s", self, path, path.previous_run)
for successor in path_successors:
dst_addr = successor.addr
l.debug("... checking exit to 0x%x from %s", dst_addr, path.previous_run)
try:
taken = self._annotated_cfg.should_take_exit(path.addr, dst_addr)
except AngrExitError: # TODO: which exception?
l.debug("... annotated CFG did not know about it!")
mystery = True
continue
if taken:
l.debug("... taking the exit.")
new_paths.append(successor)
# the else case isn't here, because the path should set errored in this
# case and we'll catch it below
else:
l.debug("... not taking the exit.")
cut = True
if mystery: self.mysteries.append(self.suspend_path(path))
if cut: self.cut.append(self.suspend_path(path))
return new_paths
def pre_tick(self):
# Set whitelists and last statements
for p in self.active:
addr = p.state.se.exactly_n_int(p.state.ip, 1)[0]
whitelist = self._annotated_cfg.get_whitelisted_statements(addr)
last_stmt = self._annotated_cfg.get_last_statement_index(addr)
p.stmt_whitelist = whitelist
p.last_stmt = last_stmt
done_addrs = [ ]
for addr, count in self._merge_countdowns.iteritems():
l.debug("Checking merge point 0x%x with countdown %d.", addr, count)
if count == 0:
to_merge = self._merge_candidates[addr]
l.debug("... merging %d paths!", len(to_merge))
if len(to_merge) > 1:
new_path = to_merge[0].merge(*(to_merge[1:]))
else:
new_path = to_merge[0]
new_path.extra_length += self.merge_countdown
done_addrs.append(addr)
self.active.append(new_path)
else:
self._merge_countdowns[addr] -= 1
for d in done_addrs:
del self._merge_candidates[d]
del self._merge_countdowns[d]
@property
def done(self):
return (len(self.active) + len(self._merge_countdowns)) == 0
def _step_path(self, p): #pylint:disable=no-self-use
p.step(stmt_whitelist=p.stmt_whitelist, last_stmt=p.last_stmt)
def path_comparator(self, a, b):
if a.weighted_length != b.weighted_length:
return b.weighted_length - a.weighted_length
return a.addr_trace.count(a.addr_trace[-1]) - b.addr_trace.count(b.addr_trace[-1])
#return self._annotated_cfg.path_priority(a) - self._annotated_cfg.path_priority(b)
def __repr__(self):
return "<Slicecutor with paths: %s, %d cut, %d mysteries, %d reached targets, %d waiting to merge>" % (Surveyor.__repr__(self), len(self.cut), len(self.mysteries), len(self.reached_targets), sum(len(i) for i in self._merge_candidates.values()))
from . import all_surveyors
all_surveyors['Slicecutor'] = Slicecutor
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,427
|
MayfeelYang/angr
|
refs/heads/master
|
/tests/broken_variableseekr.py
|
#!/usr/bin/env python
import logging
l = logging.getLogger("angr.tests")
import nose
import angr, simuvex
from angr import AngrError
# load the tests
import os
test_location = str(os.path.dirname(os.path.realpath(__file__)))
projects = {}
projects['fauxwares'] = {}
projects['cfg_1'] = {}
projects['allcmps'] = {}
projects['basic_buffer_overflows'] = {}
projects['uninitialized_reads'] = {}
def setup_x86():
fauxware = projects['fauxwares']
fauxware['x86'] = angr.Project(test_location + "/blob/i386/fauxware", arch="X86")
def setup_amd64():
fauxware = projects['fauxwares']
cfg_1 = projects['cfg_1']
all_cmps = projects['allcmps']
basic_buffer_overflows = projects['basic_buffer_overflows']
uninitialized_reads = projects['uninitialized_reads']
fauxware['amd64'] = angr.Project(test_location + "/blob/x86_64/fauxware", arch="AMD64")
cfg_1['amd64'] = angr.Project(test_location + "/blob/x86_64/cfg_1", arch="AMD64")
all_cmps['amd64'] = angr.Project(test_location + '/blob/x86_64/allcmps', arch="AMD64")
basic_buffer_overflows['amd64'] = angr.Project(test_location + '/blob/x86_64/basic_buffer_overflows')
uninitialized_reads['amd64'] = angr.Project(test_location + '/blob/x86_64/uninitialized_reads')
def setup_ppc32():
fauxware = projects['fauxwares']
fauxware['ppc32'] = angr.Project(test_location + "/blob/ppc/fauxware", arch="PPC32")
def setup_mipsel():
fauxware = projects['fauxwares']
fauxware['mipsel'] = angr.Project(test_location + "/blob/mipsel/fauxware", arch=simuvex.SimMIPS32(endness="Iend_LE"))
def setup_arm():
fauxware = projects['fauxwares']
fauxware['arm'] = angr.Project(test_location + "/blob/armel/fauxware", arch=simuvex.SimARM(endness="Iend_LE"))
def setup_module():
setup_x86()
setup_amd64()
setup_arm()
setup_ppc32()
setup_mipsel()
def test_fauxware(arch, start):
fauxware = projects['fauxwares']
cfg = fauxware[arch].analyses.CFG()
vfg = fauxware[arch].analyses.VFG(start=start)
variable_seekr = angr.VariableSeekr(fauxware[arch], cfg, vfg)
variable_seekr.construct(func_start=start)
function_manager = cfg.function_manager
for func_addr, _ in function_manager.functions.items():
l.info("Function %08xh", func_addr)
variable_manager = variable_seekr.get_variable_manager(func_addr)
if variable_manager is None:
continue
# TODO: Check the result returned
l.info("Variables: ")
for var in variable_manager.variables:
if isinstance(var, angr.StackVariable):
l.info(var.detail_str())
else:
l.info("%s(%d), referenced at %08x", var, var._size, var._inst_addr)
def test_cfg_1(arch, start):
cfg_1 = projects['cfg_1']
cfg = cfg_1[arch].analyses.CFG()
vfg = cfg_1[arch].analyses.VFG(start=start)
variable_seekr = angr.VariableSeekr(cfg_1[arch], cfg, vfg)
variable_seekr.construct(func_start=start)
function_manager = cfg.function_manager
for func_addr, _ in function_manager.functions.items():
l.info("Function %08xh", func_addr)
variable_manager = variable_seekr.get_variable_manager(func_addr)
if variable_manager is None:
continue
# TODO: Check the result returned
l.info("Variables: ")
for var in variable_manager.variables:
if isinstance(var, angr.StackVariable):
l.info(var.detail_str())
else:
l.info("%s(%d), referenced at %08x", var, var._size, var._inst_addr)
def test_allcmps(arch, starts):
allcmps = projects['allcmps']
cfg = allcmps[arch].analyses.CFG()
for start in starts:
allcmps[arch].analyses.VFG(start=start)
vfg = allcmps[arch].vfg
variable_seekr = angr.VariableSeekr(allcmps[arch], cfg, vfg)
for start in starts:
variable_seekr.construct(func_start=start)
function_manager = cfg.function_manager
for func_addr, _ in function_manager.functions.items():
l.info("Function %xh", func_addr)
variable_manager = variable_seekr.get_variable_manager(func_addr)
if variable_manager is None:
continue
# TODO: Check the result returned
l.info("Variables: ")
for var in variable_manager.variables:
if isinstance(var, angr.StackVariable):
l.info(var.detail_str())
else:
l.info("%s(%d), referenced at %08x", var, var._size, var._inst_addr)
def test_basic_buffer_overflows(arch, starts):
basic_buffer_overflows = projects['basic_buffer_overflows']
cfg = basic_buffer_overflows[arch].analyses.CFG()
for start in starts:
basic_buffer_overflows[arch].analyses.VFG(start=start)
vfg = basic_buffer_overflows[arch].vfg
variable_seekr = angr.VariableSeekr(basic_buffer_overflows[arch], cfg, vfg)
for start in starts:
variable_seekr.construct(func_start=start)
function_manager = cfg.function_manager
for func_addr, _ in function_manager.functions.items():
l.info("Function %xh", func_addr)
variable_manager = variable_seekr.get_variable_manager(func_addr)
if variable_manager is None:
continue
# TODO: Check the result returned
l.info("Variables: ")
for var in variable_manager.variables:
if isinstance(var, angr.StackVariable):
l.info(var.detail_str())
else:
l.info("%s(%d), referenced at %08x", var, var._size, var._inst_addr)
def test_uninitialized_reads(arch, starts):
uninitialized_reads = projects['uninitialized_reads']
cfg = uninitialized_reads[arch].analyses.CFG()
for start in starts:
uninitialized_reads[arch].analyses.VFG(start=start)
vfg = uninitialized_reads[arch].vfg
variable_seekr = angr.VariableSeekr(uninitialized_reads[arch], cfg, vfg)
for start in starts:
try:
variable_seekr.construct(func_start=start)
except AngrError:
l.info('AngrError...')
continue
function_manager = cfg.function_manager
for func_addr, _ in function_manager.functions.items():
l.info("Function %xh", func_addr)
variable_manager = variable_seekr.get_variable_manager(func_addr)
if variable_manager is None:
continue
# TODO: Check the result returned
l.info("Variables: ")
for var in variable_manager.variables:
if isinstance(var, angr.StackVariable):
l.info(var.detail_str())
else:
l.info("%s(%d), referenced at %08x", var, var._size, var._inst_addr)
import ipdb; ipdb.set_trace()
if __name__ == "__main__":
try:
__import__('standard_logging')
__import__('angr_debug')
except ImportError:
pass
logging.getLogger('angr.cfg').setLevel(logging.DEBUG)
logging.getLogger('angr.vfg').setLevel(logging.DEBUG)
logging.getLogger('simuvex.plugins.symbolic_memory').setLevel(logging.INFO)
#logging.getLogger('simuvex.plugins.abstract_memory').setLevel(logging.DEBUG)
logging.getLogger('claripy.claripy').setLevel(logging.ERROR)
l.setLevel(logging.DEBUG)
setup_amd64()
l.info("LOADED")
#test_fauxware('amd64', 0x40071d)
#test_basic_buffer_overflows('amd64', (0x40068f, 0x40055c, 0x4005b6, 0x40063e))
test_uninitialized_reads('amd64', (0x40052c, 0x40056c))
l.info("DONE")
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,428
|
MayfeelYang/angr
|
refs/heads/master
|
/tests/test_argc_sym.py
|
import angr
import claripy
import logging
l = logging.getLogger("angr_tests")
import os
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
def _verify_results(pg, sargc, length=400):
argcs = pg.mp_found.state.se.any_int(sargc)
strs = pg.mp_found.state.se.any_str(pg.mp_found.state.memory.load(pg.mp_found.state.regs.sp, length))
for a,s in zip(argcs.mp_items, strs.mp_items):
assert a in (0,1,2)
assert "Good man" in s if a == 1 else "Very Good man" if a == 2 else True
def test_mips():
arger_mips = angr.Project(test_location + "/mips/argc_symbol")
r_addr = [0x400720, 0x40076c, 0x4007bc]
sargc = claripy.BVS('argc', 32)
s = arger_mips.factory.path(args = [claripy.BVS('arg_0', 40*8), claripy.BVS('arg_1', 40*8), claripy.BVS('arg_2', 40*8)], env ={"HOME": "/home/angr"}, argc=sargc)
pg = arger_mips.factory.path_group(s).explore(find=r_addr, num_find=100)
_verify_results(pg, sargc)
def test_mipsel():
arger_mipsel = angr.Project(test_location + "/mipsel/argc_symbol")
r_addr = [0x400720, 0x40076c, 0x4007bc]
sargc = claripy.BVS('argc', 32)
s = arger_mipsel.factory.path(args = [claripy.BVS('arg_0', 40*8), claripy.BVS('arg_1', 40*8), claripy.BVS('arg_2', 40*8)], env ={"HOME": "/home/angr"}, argc=sargc)
pg = arger_mipsel.factory.path_group(s).explore(find=r_addr, num_find=100)
_verify_results(pg, sargc)
def test_i386():
arger_i386 = angr.Project(test_location + "/i386/argc_symbol")
r_addr = [0x08048411, 0x08048437, 0x08048460]
sargc = claripy.BVS('argc', 32)
s = arger_i386.factory.path(args = [claripy.BVS('arg_0', 40*8), claripy.BVS('arg_1', 40*8), claripy.BVS('arg_2', 40*8)], env ={"HOME": "/home/angr"}, argc=sargc)
pg = arger_i386.factory.path_group(s).explore(find=r_addr, num_find=100)
_verify_results(pg, sargc)
def test_amd64():
arger_amd64 = angr.Project(test_location + "/x86_64/argc_symbol", load_options={'auto_load_libs': False})
r_addr = [0x40051B, 0x400540, 0x400569]
sargc = claripy.BVS('argc', 64)
s = arger_amd64.factory.path(args = [claripy.BVS('arg_0', 40*8), claripy.BVS('arg_1', 40*8), claripy.BVS('arg_2', 40*8)], env ={"HOME": "/home/angr"}, argc=sargc)
pg = arger_amd64.factory.path_group(s).explore(find=r_addr, num_find=100)
_verify_results(pg, sargc, length=800)
def test_arm():
arger_arm = angr.Project(test_location + "/armel/argc_symbol")
r_addr = [0x00010444, 0x00010478, 0x000104B0]
sargc = claripy.BVS('argc', 32)
s = arger_arm.factory.path(args = [claripy.BVS('arg_0', 40*8), claripy.BVS('arg_1', 40*8), claripy.BVS('arg_2', 40*8)], env ={"HOME": "/home/angr"}, argc=sargc)
pg = arger_arm.factory.path_group(s).explore(find=r_addr, num_find=100)
_verify_results(pg, sargc)
def test_ppc32():
arger_ppc32 = angr.Project(test_location + "/ppc/argc_symbol")
r_addr = [0x1000043C, 0x10000474, 0x100004B0]
sargc = claripy.BVS('argc', 32)
s = arger_ppc32.factory.path(args = [claripy.BVS('arg_0', 40*8), claripy.BVS('arg_1', 40*8), claripy.BVS('arg_2', 40*8)], env ={"HOME": "/home/angr"}, argc=sargc)
pg = arger_ppc32.factory.path_group(s).explore(find=r_addr, num_find=100)
_verify_results(pg, sargc)
if __name__ == "__main__":
test_mips()
test_mipsel()
test_arm()
test_i386()
test_amd64()
test_ppc32()
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,429
|
MayfeelYang/angr
|
refs/heads/master
|
/tests/test_cfg_path.py
|
import angr
import logging
import os
l = logging.getLogger("angr_tests")
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)),
'../../binaries/tests'))
def test_cfg_paths():
p = angr.Project(os.path.join(test_location, "x86_64/track_user_input"))
main_addr = p.loader.main_bin.get_symbol("main").addr
printf_addr = 0x400470
cfg = p.analyses.CFGAccurate(keep_state=True)
paths = cfg.get_paths(main_addr, printf_addr)
if __name__ == '__main__':
test_cfg_paths()
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,430
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/analyses/veritesting.py
|
import logging
from collections import defaultdict
import networkx
from simuvex import SimProcedures, o
from ..errors import AngrError, AngrCFGError
from ..analysis import Analysis, register_analysis
from ..path_group import PathGroup
from ..path import Path, AngrPathError
l = logging.getLogger('angr.analyses.veritesting')
class VeritestingError(Exception):
pass
class CallTracingFilter(object):
whitelist = {
SimProcedures['cgc']['receive'],
SimProcedures['cgc']['transmit'],
SimProcedures['libc.so.6']['read'],
}
cfg_cache = { }
def __init__(self, project, depth, blacklist=None):
self.project = project
self.blacklist = [ ] if blacklist is None else blacklist
self._skipped_targets = set()
self.depth = depth
def filter(self, call_target_state, jumpkind):
"""
The call will be skipped if it returns True.
:param call_target_state: The new state of the call target.
:param jumpkind: The Jumpkind of this call.
:returns: True if we want to skip this call, False otherwise.
"""
ACCEPT = False
REJECT = True
l.debug('Filtering calling target %s', call_target_state.ip)
# Currently we always skip the call, unless the target function satisfies one of the following conditions:
# 1) It's a SimProcedure that are in the whitelist
# 2) It's a function that has no loops, and no calls/syscalls,
# 3) It's a function that has no loops, and only has calls to another function that will not be filtered out by
# this filter
# Generate a CFG
ip = call_target_state.ip
if self.depth >= 5:
l.debug('Rejecting target %s - too deep, depth is %d', ip, self.depth)
return REJECT
try:
addr = call_target_state.se.exactly_int(ip)
except (SimValueError, SimSolverModeError):
self._skipped_targets.add(-1)
l.debug('Rejecting target %s - cannot be concretized', ip)
return REJECT
# Is it in our blacklist?
if addr in self.blacklist:
self._skipped_targets.add(addr)
l.debug('Rejecting target 0x%x - blacklisted', addr)
return REJECT
# If the target is a SimProcedure, is it on our whitelist?
if self.project.is_hooked(addr) and type(self.project._sim_procedures[addr][0]) in CallTracingFilter.whitelist:
# accept!
l.debug('Accepting target 0x%x, jumpkind %s', addr, jumpkind)
return ACCEPT
# If it's a syscall, let's see if the real syscall is inside our whitelist
if jumpkind.startswith('Ijk_Sys'):
call_target_state.scratch.jumpkind = jumpkind
tmp_path = self.project.factory.path(call_target_state)
tmp_path.step()
next_run = tmp_path.next_run
if type(next_run) in CallTracingFilter.whitelist:
# accept!
l.debug('Accepting target 0x%x, jumpkind %s', addr, jumpkind)
return ACCEPT
else:
# reject
l.debug('Rejecting target 0x%x - syscall %s not in whitelist', addr, type(next_run))
return REJECT
cfg_key = (addr, jumpkind)
if cfg_key not in self.cfg_cache:
new_blacklist = self.blacklist[ :: ]
new_blacklist.append(addr)
tracing_filter = CallTracingFilter(self.project, depth=self.depth + 1, blacklist=new_blacklist)
cfg = self.project.analyses.CFGAccurate(starts=((addr, jumpkind),),
initial_state=call_target_state,
context_sensitivity_level=0,
call_depth=1,
call_tracing_filter=tracing_filter.filter
)
self.cfg_cache[cfg_key] = (cfg, tracing_filter)
try:
cfg.unroll_loops(1)
except AngrCFGError:
# Exceptions occurred during loop unrolling
# reject
l.debug('Rejecting target 0x%x - loop unrolling failed', addr)
return REJECT
else:
l.debug('Loading CFG from CFG cache')
cfg, tracing_filter = self.cfg_cache[cfg_key]
if cfg._loop_back_edges:
# It has loops!
self._skipped_targets.add(addr)
l.debug('Rejecting target 0x%x - it has loops', addr)
return REJECT
sim_procedures = [ n for n in cfg.graph.nodes() if n.simprocedure_name is not None ]
for sp_node in sim_procedures:
if not self.project.is_hooked(sp_node.addr):
# This is probably a PathTerminator
# Just skip it for now
continue
if self.project._sim_procedures[sp_node.addr][0] not in CallTracingFilter.whitelist:
self._skipped_targets.add(addr)
l.debug('Rejecting target 0x%x - contains SimProcedures outside whitelist', addr)
return REJECT
if len(tracing_filter._skipped_targets):
# Bummer
self._skipped_targets.add(addr)
l.debug('Rejecting target 0x%x - should be skipped', addr)
return REJECT
# accept!
l.debug('Accepting target 0x%x, jumpkind %s', addr, jumpkind)
return ACCEPT
class Veritesting(Analysis):
# A cache for CFG we generated before
cfg_cache = { }
# Names of all stashes we will return from Veritesting
all_stashes = ('successful', 'errored', 'deadended', 'deviated', 'unconstrained')
def __init__(
self, input_path, boundaries=None, loop_unrolling_limit=10, enable_function_inlining=False,
terminator=None, deviation_filter=None, path_callback=None
):
"""
SSE stands for Static Symbolic Execution, and we also implemented an extended version of Veritesting (Avgerinos,
Thanassis, et al, ICSE 2014).
:param input_path: The initial path to begin the execution with.
:param boundaries: Addresses where execution should stop.
:param loop_unrolling_limit: The maximum times that Veritesting should unroll a loop for.
:param enable_function_inlining: Whether we should enable function inlining and syscall inlining.
:param terminator: A callback function that takes a path as parameter. Veritesting will terminate
if this function returns True.
:param deviation_filter: A callback function that takes a path as parameter. Veritesting will put the
path into "deviated" stash if this function returns True.
:param path_callback: A callback function that takes a path as parameter. Veritesting will call this
function on every single path after their next_run is created.
"""
self._input_path = input_path.copy()
self._boundaries = boundaries if boundaries is not None else [ ]
self._loop_unrolling_limit = loop_unrolling_limit
self._enable_function_inlining = enable_function_inlining
self._terminator = terminator
self._deviation_filter = deviation_filter
self._path_callback = path_callback
# set up the cfg stuff
self._cfg, self._loop_graph = self._make_cfg()
self._loop_backedges = self._cfg._loop_back_edges
self._loop_heads = set([ dst.addr for _, dst in self._loop_backedges ])
l.info("Static symbolic execution starts at 0x%x", self._input_path.addr)
l.debug(
"The execution will terminate at the following addresses: [ %s ]",
", ".join([ hex(i) for i in self._boundaries ])
)
l.debug("A loop will be unrolled by a maximum of %d times.", self._loop_unrolling_limit)
if self._enable_function_inlining:
l.debug("Function inlining is enabled.")
else:
l.debug("Function inlining is disabled.")
self.result, self.final_path_group = self._veritesting()
def _veritesting(self):
"""
Perform static symbolic execution starting from the given point.
"""
p = self._input_path.copy()
try:
new_path_group = self._execute_and_merge(p)
except (ClaripyError, SimError, AngrError):
if not BYPASS_VERITESTING_EXCEPTIONS in p.state.options:
raise
else:
l.warning("Veritesting caught an exception.", exc_info=True)
return False, PathGroup(self.project, stashes={'deviated', p})
except VeritestingError as ex:
l.warning("Exception occurred: %s", str(ex))
return False, PathGroup(self.project, stashes={'deviated', p})
l.info(
'Returning new paths: (successful: %s, deadended: %s, errored: %s, deviated: %s)',
len(new_path_group.successful), len(new_path_group.deadended),
len(new_path_group.errored), len(new_path_group.deviated)
)
return True, new_path_group
def _execute_and_merge(self, path):
"""
Symbolically execute the program in a static manner. The basic idea is that we look ahead by creating a CFG,
then perform a _controlled symbolic exploration_ based on the CFG, one path at a time. The controlled symbolic
exploration stops when it sees a branch whose both directions are all feasible, or it shall wait for a merge
from another path.
A basic block will not be executed for more than *loop_unrolling_limit* times. If that is the case, a new state
will be returned.
:param path: The initial path to start the execution.
:returns: A list of new states.
"""
# Remove path._run
path._run = None
# Find all merge points
merge_points = self._get_all_merge_points(self._cfg, self._loop_graph)
l.debug('Merge points: %s', [ hex(i[0]) for i in merge_points ])
#
# Controlled symbolic exploration
#
# Initialize the beginning path
initial_path = path
initial_path.info['loop_ctrs'] = defaultdict(int)
path_group = PathGroup(
self.project,
active_paths=[ initial_path ],
immutable=False,
resilience=o.BYPASS_VERITESTING_EXCEPTIONS in initial_path.state.options
)
# Initialize all stashes
for stash in self.all_stashes:
path_group.stashes[stash] = [ ]
# immediate_dominators = cfg.immediate_dominators(cfg.get_any_node(ip_int))
while path_group.active:
# Step one step forward
l.debug('Steps %s with %d active paths: [ %s ]',
path_group,
len(path_group.active),
path_group.active)
# Apply self.deviation_func on every single active path, and move them to deviated stash if needed
if self._deviation_filter is not None:
path_group.stash(filter_func=self._deviation_filter, from_stash='active', to_stash='deviated')
# Mark all those paths that are out of boundaries as successful
path_group.stash(
filter_func=self.is_path_overbound,
from_stash='active', to_stash='successful'
)
path_group.step(
successor_func=lambda p: self.generate_successors(p, path_group),
check_func=self.is_path_errored
)
if self._terminator is not None and self._terminator(path_group):
for p in path_group.unfuck:
self._unfuck(p)
break
# Stash all paths that we do not see in our CFG
path_group.stash(
filter_func=self._path_not_in_cfg,
to_stash="deviated"
)
# Stash all paths that we do not care about
path_group.stash(
filter_func= lambda p: (
p.state.scratch.jumpkind not in
('Ijk_Boring', 'Ijk_Call', 'Ijk_Ret', 'Ijk_NoHook')
and not p.state.scratch.jumpkind.startswith('Ijk_Sys')
),
to_stash="deadended"
)
if path_group.deadended:
l.debug('Now we have some deadended paths: %s', path_group.deadended)
# Stash all possible paths that we should merge later
for merge_point_addr, merge_point_looping_times in merge_points:
path_group.stash_addr(
merge_point_addr,
to_stash="_merge_%x_%d" % (merge_point_addr, merge_point_looping_times)
)
# Try to merge a set of previously stashed paths, and then unstash them
if not path_group.active:
merged_anything = False
for merge_point_addr, merge_point_looping_times in merge_points:
if merged_anything:
break
stash_name = "_merge_%x_%d" % (merge_point_addr, merge_point_looping_times)
if stash_name not in path_group.stashes:
continue
stash_size = len(path_group.stashes[stash_name])
if stash_size == 0:
continue
if stash_size == 1:
l.info("Skipping merge of 1 path in stash %s.", stash_size)
path_group.move(stash_name, 'active')
continue
# let everyone know of the impending disaster
l.info("Merging %d paths in stash %s", stash_size, stash_name)
# Try to prune the stash, so unsatisfiable paths will be thrown away
path_group.prune(from_stash=stash_name, to_stash='pruned')
if 'pruned' in path_group.stashes and len(path_group.pruned):
l.debug('... pruned %d paths from stash %s', len(path_group.pruned), stash_name)
# Remove the pruned stash to save memory
path_group.drop(stash='pruned')
# merge things callstack by callstack
while len(path_group.stashes[stash_name]):
r = path_group.stashes[stash_name][0]
path_group.move(
stash_name, 'merge_tmp',
lambda p: p.callstack == r.callstack #pylint:disable=cell-var-from-loop
)
old_count = len(path_group.merge_tmp)
l.debug("... trying to merge %d paths.", old_count)
# merge the loop_ctrs
new_loop_ctrs = defaultdict(int)
for m in path_group.merge_tmp:
for head_addr, looping_times in m.info['loop_ctrs'].iteritems():
new_loop_ctrs[head_addr] = max(
looping_times,
m.info['loop_ctrs'][head_addr]
)
path_group.merge(stash='merge_tmp')
for m in path_group.merge_tmp:
m.info['loop_ctrs'] = new_loop_ctrs
new_count = len(path_group.stashes['merge_tmp'])
l.debug("... after merge: %d paths.", new_count)
merged_anything |= new_count != old_count
if len(path_group.merge_tmp) > 1:
l.warning("More than 1 path after Veritesting merge.")
path_group.move('merge_tmp', 'active')
elif any(
loop_ctr >= self._loop_unrolling_limit + 1 for loop_ctr in
path_group.one_merge_tmp.info['loop_ctrs'].itervalues()
):
l.debug("... merged path is overlooping")
path_group.move('merge_tmp', 'deadended')
else:
l.debug('... merged path going to active stash')
path_group.move('merge_tmp', 'active')
if any(len(path_group.stashes[stash_name]) for stash_name in self.all_stashes):
# Remove all stashes other than errored or deadended
path_group.stashes = {
name: stash for name, stash in path_group.stashes.items()
if name in self.all_stashes
}
for stash in path_group.stashes:
path_group.apply(self._unfuck, stash=stash)
return path_group
#
# Path management
#
def is_path_errored(self, path):
if path.errored:
return True
elif len(path.jumpkinds) > 0 and path.jumpkinds[-1] in Path._jk_all_bad:
l.debug("Errored jumpkind %s", path.jumpkinds[-1])
path._error = AngrPathError('path has a failure jumpkind of %s' % path.jumpkinds[-1])
else:
try:
if path._run is None:
ip = path.addr
# FIXME: cfg._nodes should also be updated when calling cfg.normalize()
size_of_next_irsb = [ n for n in self._cfg.graph.nodes() if n.addr == ip ][0].size
path.step(max_size=size_of_next_irsb)
except (AngrError, SimError, ClaripyError) as ex:
l.debug('is_path_errored(): caxtching exception %s', ex)
path._error = ex
except (TypeError, ValueError, ArithmeticError, MemoryError) as ex:
l.debug("is_path_errored(): catching exception %s", ex)
path._error = ex
return False
def _path_not_in_cfg(self, p):
"""
Returns if p.addr is not a proper node in our CFG.
:param p: The Path instance to test.
:returns: False if our CFG contains p.addr, True otherwise.
"""
n = self._cfg.get_any_node(p.addr, is_syscall=p.jumpkinds[-1].startswith('Ijk_Sys'))
if n is None:
return True
if n.simprocedure_name == 'PathTerminator':
return True
return False
def generate_successors(self, path, path_group):
ip = path.addr
l.debug("Pushing 0x%x one step forward...", ip)
# FIXME: cfg._nodes should also be updated when calling cfg.normalize()
size_of_next_irsb = [ n for n in self._cfg.graph.nodes() if n.addr == ip ][0].size
# It has been called by is_path_errored before, but I'm doing it here anyways. Who knows how the logic in
# PathGroup will change in the future...
path.step(max_size=size_of_next_irsb)
# Now it's safe to call anything that may access Path.next_run
if self._path_callback:
copied_path = path.copy()
self._unfuck(copied_path)
self._path_callback(copied_path)
successors = path.successors
# Get all unconstrained successors, and save them out
if path.next_run:
for s in path.next_run.unconstrained_successors:
u_path = Path(self.project, s, path=path)
path_group.stashes['unconstrained'].append(u_path)
l.debug("... new successors: %s", successors)
return successors
def is_path_overbound(self, path):
"""
Filter out all paths that run out of boundaries or loop too many times.
"""
ip = path.addr
if ip in self._boundaries:
l.debug("... terminating Veritesting due to overbound")
return True
if (
ip in self._loop_heads # This is the beginning of the loop
or path.jumpkind == 'Ijk_Call' # We also wanna catch recursive function calls
):
path.info['loop_ctrs'][ip] += 1
if path.info['loop_ctrs'][ip] >= self._loop_unrolling_limit + 1:
l.debug('... terminating Veritesting due to overlooping')
return True
l.debug('... accepted')
return False
@staticmethod
def _unfuck(p):
del p.info['loop_ctrs']
return p
#
# Merge point determination
#
def _make_cfg(self):
"""
Builds a CFG from the current function.
"""
path = self._input_path
state = path.state
ip_int = path.addr
cfg_key = (ip_int, path.jumpkind)
if cfg_key in self.cfg_cache:
cfg, cfg_graph_with_loops = self.cfg_cache[cfg_key]
else:
if self._enable_function_inlining:
call_tracing_filter = CallTracingFilter(self.project, depth=0)
filter = call_tracing_filter.filter #pylint:disable=redefined-builtin
else:
filter = None
# To better handle syscalls, we make a copy of all registers if they are not symbolic
cfg_initial_state = self.project.factory.blank_state(mode='fastpath')
# FIXME: This is very hackish
# FIXME: And now only Linux-like syscalls are supported
if self.project.arch.name == 'X86':
if not state.se.symbolic(state.regs.eax):
cfg_initial_state.regs.eax = state.regs.eax
elif self.project.arch.name == 'AMD64':
if not state.se.symbolic(state.regs.rax):
cfg_initial_state.regs.rax = state.regs.rax
cfg = self.project.analyses.CFGAccurate(
starts=((ip_int, path.jumpkind),),
context_sensitivity_level=0,
call_depth=1,
call_tracing_filter=filter,
initial_state=cfg_initial_state
)
cfg.normalize()
cfg_graph_with_loops = networkx.DiGraph(cfg.graph)
cfg.unroll_loops(self._loop_unrolling_limit)
self.cfg_cache[cfg_key] = (cfg, cfg_graph_with_loops)
return cfg, cfg_graph_with_loops
@staticmethod
def _post_dominate(reversed_graph, n1, n2):
"""
Checks whether `n1` post-dominates `n2` in the *original* (not reversed) graph.
:param reversed_graph: The reversed networkx.DiGraph instance.
:param n1: Node 1.
:param n2: Node 2.
:returns: True/False.
"""
ds = networkx.dominating_set(reversed_graph, n1)
return n2 in ds
def _get_all_merge_points(self, cfg, graph_with_loops):
"""
Return all possible merge points in this CFG.
:param cfg: The control flow graph, which must be acyclic.
:returns: A list of merge points.
"""
graph = networkx.DiGraph(cfg.graph)
reversed_cyclic_graph = networkx.reverse(graph_with_loops, copy=False)
# Remove all "FakeRet" edges
fakeret_edges = [
(src, dst) for src, dst, data in graph.edges_iter(data=True)
if data['jumpkind'] == 'Ijk_FakeRet'
]
graph.remove_edges_from(fakeret_edges)
# Remove all "FakeRet" edges from cyclic_graph as well
fakeret_edges = [
(src, dst) for src, dst, data in reversed_cyclic_graph.edges_iter(data=True)
if data['jumpkind'] == 'Ijk_FakeRet'
]
reversed_cyclic_graph.remove_edges_from(fakeret_edges)
# Perform a topological sort
sorted_nodes = networkx.topological_sort(graph)
nodes = [ n for n in sorted_nodes if graph.in_degree(n) > 1 and n.looping_times == 0 ]
# Reorder nodes based on post-dominance relations
nodes = sorted(nodes, cmp=lambda n1, n2: (
1 if self._post_dominate(reversed_cyclic_graph, n1, n2)
else (-1 if self._post_dominate(reversed_cyclic_graph, n2, n1) else 0)
))
return [ (n.addr, n.looping_times) for n in nodes ]
register_analysis(Veritesting, 'Veritesting')
from simuvex import SimValueError, SimSolverModeError, SimError
from simuvex.s_options import BYPASS_VERITESTING_EXCEPTIONS
from claripy import ClaripyError
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,431
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/knowledge/data.py
|
class Data(object):
def __init__(self, model):
self._model = model
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,432
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/entry_wrapper.py
|
import copy
import logging
from itertools import dropwhile
import simuvex
l = logging.getLogger(name="angr.entry_wrapper")
# TODO: Make callsite an object and use it in SimRunKey and FunctionKey
class SimRunKey(object):
"""
A context-sensitive key for a SimRun object.
"""
def __init__(self, addr, callsite_tuples, jump_type):
self.addr = addr
self.callsite_tuples = callsite_tuples
self.jump_type = jump_type
self._hash = None
def callsite_repr(self):
if self.callsite_tuples is None:
return "None"
s = [ ]
format_addr = lambda addr: 'None' if addr is None else hex(addr)
for i in xrange(0, len(self.callsite_tuples), 2):
s.append('@'.join(map(format_addr, self.callsite_tuples[i:i+2])))
return " -> ".join(s)
def __repr__(self):
return "<SRKey %#08x (%s) %% %s>" % (self.addr, self.callsite_repr(), self.jump_type)
def __hash__(self):
if self._hash is None:
self._hash = hash((self.callsite_tuples,) + (self.addr, self.jump_type, ))
return self._hash
def __eq__(self, other):
return isinstance(other, SimRunKey) and \
self.addr == other.addr and self.callsite_tuples == other.callsite_tuples and \
self.jump_type == other.jump_type
def __ne__(self, other):
return not self == other
@staticmethod
def new(addr, callstack_suffix, jumpkind):
if jumpkind.startswith('Ijk_Sys') or jumpkind == 'syscall':
jump_type = 'syscall'
elif jumpkind in ('Ijk_Exit', 'exit'):
jump_type = 'exit'
else:
jump_type = "normal"
return SimRunKey(addr, callstack_suffix, jump_type)
@property
def func_addr(self):
if self.callsite_tuples:
return self.callsite_tuples[-1]
else:
return None
class FunctionKey(object):
"""
A context-sensitive key for a function.
"""
def __init__(self, addr, callsite_tuples):
self.addr = addr
self.callsite_tuples = callsite_tuples
self._hash = None
def callsite_repr(self):
if self.callsite_tuples is None:
return "None"
s = []
format_addr = lambda addr: 'None' if addr is None else hex(addr)
for i in xrange(0, len(self.callsite_tuples), 2):
s.append('@'.join(map(format_addr, self.callsite_tuples[i:i + 2])))
return " -> ".join(s)
def __repr__(self):
s = "<FuncKey %#08x (%s)>" % (self.addr, self.callsite_repr())
return s
def __hash__(self):
if self._hash is None:
self._hash = hash((self.callsite_tuples, ) + (self.addr, ))
return self._hash
def __eq__(self, other):
return isinstance(other, FunctionKey) and \
self.addr == other.addr and self.callsite_tuples == other.callsite_tuples
@staticmethod
def new(addr, callsite_tuples):
return FunctionKey(addr, callsite_tuples)
class CallStackFrame(object):
"""
CallStackFrame represents a stack frame in the call stack.
"""
def __init__(self, call_site, call_target, function_address, return_target, stack_pointer=None, accessed_registers=None):
"""
Constructor.
:param int call_site: Address of the call site.
:param int call_target: Target of the call. Usually it is the address of a function.
:param int function_address: Address of the current function. Note that it may not always be the same as
call_target (consider situations like PLT entries and tail-call optimization).
:param int return_target: Target address of returning.
:param int stack_pointer: Value of the stack pointer.
:param set accessed_registers: A set of registers that are accessed.
:return: None
"""
self.call_site = call_site
self.call_target = call_target
self.function_address = function_address
self.return_target = return_target
self.stack_pointer = stack_pointer
self.accessed_registers = set() if accessed_registers is None else accessed_registers
def __repr__(self):
"""
Get a string representation.
:return: A printable representation of the CallStackFrame object.
:rtype: str
"""
return "CallStackFrame (calling %s from %s, returning to %s, function %s)" % (
("%#x" % self.call_target) if self.call_target is not None else "None",
("%#x" % self.call_site) if self.call_site is not None else "None",
("%#x" % self.return_target) if self.return_target is not None else "None",
("%#x" % self.function_address) if self.function_address is not None else "None",
)
def copy(self):
"""
Make a copy of the call stack frame.
:return: A new stack frame
:rtype: CallStackFrame
"""
return CallStackFrame(self.call_site,
self.call_target,
self.function_address,
self.return_target,
stack_pointer=self.stack_pointer,
accessed_registers=self.accessed_registers.copy()
)
class CallStack(object):
"""
CallStack is a representation of a call stack along a specific execution path.
"""
def __init__(self, stack=None):
"""
Constructor.
:param list stack: A list representing the stack, where each element is a CallStackFrame instance.
:return: None
"""
self._stack = [ ] if stack is None else stack
#
# Static methods
#
@staticmethod
def stack_suffix_to_string(stack_suffix):
"""
Convert a stack suffix to a human-readable string representation.
:param tuple stack_suffix: The stack suffix.
:return: A string representation
:rtype: str
"""
s = "[" + ",".join([("0x%x" % i) if i is not None else "Unspecified" for i in stack_suffix]) + "]"
return s
@staticmethod
def _rfind(lst, item):
"""
Reverse look-up.
:param list lst: The list to look up in.
:param item: The item to look for.
:return: Offset of the item if found. A ValueError is raised if the item is not in the list.
:rtype: int
"""
try:
return dropwhile(lambda x: lst[x] != item,
reversed(xrange(len(lst)))).next()
except Exception:
raise ValueError("%s not in the list" % item)
#
# Overriden properties
#
def __len__(self):
"""
Get how many frames there are in the current stack
:return: Number of frames
:rtype: int
"""
return len(self._stack)
def __repr__(self):
"""
Get a string representation.
:return: A printable representation of the CallStack object
:rtype: str
"""
return "<CallStack of %d frames>" % len(self._stack)
#
# Properties
#
@property
def current_function_address(self):
"""
Address of the current function.
:return: the address of the function
:rtype: int
"""
if len(self._stack) == 0:
return 0 # This is the root level
else:
frame = self._stack[-1]
return frame.function_address
@current_function_address.setter
def current_function_address(self, function_address):
"""
Set the address of the current function. Note that we must make a copy of the CallStackFrame as CallStackFrame
is considered to be immutable.
:param int function_address: The function address.
:return: None
"""
frame = self._stack[-1].copy()
frame.function_address = function_address
self._stack[-1] = frame
@property
def all_function_addresses(self):
"""
Get all function addresses called in the path, from the earliest one to the most recent one
:return: a list of function addresses
:rtype: list
"""
return [ frame.function_address for frame in self._stack ]
@property
def current_stack_pointer(self):
"""
Get the value of the stack pointer.
:return: Value of the stack pointer
:rtype: int
"""
if len(self._stack) == 0:
return None
else:
frame = self._stack[-1]
return frame.stack_pointer
@property
def current_function_accessed_registers(self):
"""
Get all accessed registers of the function.
:return: A set of register offsets
:rtype: set
"""
if len(self._stack) == 0:
return set()
else:
frame = self._stack[-1]
return frame.accessed_registers
@property
def current_return_target(self):
"""
Get the return target.
:return: The address of return target.
:rtype: int
"""
if len(self._stack) == 0:
return None
return self._stack[-1].return_target
#
# Private methods
#
def _rfind_return_target(self, target):
"""
Check if the return target exists in the stack, and return the index if exists. We always search from the most
recent call stack frame since the most recent frame has a higher chance to be hit in normal CFG recovery.
:param int target: Target of the return.
:return: The index of the object
:rtype: int
"""
for i in xrange(len(self._stack) - 1, -1, -1):
frame = self._stack[i]
if frame.return_target == target:
return i
return None
#
# Public methods
#
def dbg_repr(self):
"""
Debugging representation of this CallStack object.
:return: Details of this CalLStack
:rtype: str
"""
stack = [ ]
for i, frame in enumerate(reversed(self._stack)):
s = "%d | %s -> %s, returning to %s" % (
i,
"None" if frame.call_site is None else "%#x" % (frame.call_site),
"None" if frame.function_address is None else "%#x" % (frame.function_address),
"None" if frame.return_target is None else "%#x" % (frame.return_target)
)
stack.append(s)
return "\n".join(stack)
def clear(self):
"""
Clear the call stack.
:return: None
"""
self._stack = [ ]
def stack_suffix(self, context_sensitivity_level):
"""
Generate the stack suffix. A stack suffix can be used as the key to a SimRun in CFG recovery.
:param int context_sensitivity_level: Level of context sensitivity.
:return: A tuple of stack suffix.
:rtype: tuple
"""
length = len(self._stack)
ret = ()
for i in xrange(context_sensitivity_level):
index = length - i - 1
if index < 0:
ret = (None, None) + ret
else:
frame = self._stack[index]
ret = (frame.call_site, frame.call_target) + ret
return ret
def call(self, callsite_addr, addr, retn_target=None, stack_pointer=None):
"""
Push a stack frame into the call stack. This method is called when calling a function in CFG recovery.
:param int callsite_addr: Address of the call site
:param int addr: Address of the call target
:param int retn_target: Address of the return target
:param int stack_pointer: Value of the stack pointer
:return: None
"""
frame = CallStackFrame(callsite_addr, addr, addr, retn_target, stack_pointer=stack_pointer)
self._stack.append(frame)
def ret(self, retn_target):
"""
Pop one or many call frames from the stack. This method is called when returning from a function in CFG
recovery.
:param int retn_target: The target to return to.
:return: None
"""
return_target_index = self._rfind_return_target(retn_target)
if return_target_index is not None:
# We may want to return to several levels up there, not only a
# single stack frame
levels = return_target_index
# Remove all frames higher than the level
self._stack = self._stack[ : levels]
else:
l.warning("Returning to an unexpected address %#x", retn_target)
# For Debugging
# raise Exception()
# There are cases especially in ARM where return is used as a jump
# So we don't pop anything out
def copy(self):
"""
Make a copy of this CallStack object.
Note that although the stack is copied, each stack frame inside the stack is not duplicated.
:return: A new copy
:rtype: CallStack
"""
return CallStack(stack=self._stack[::])
class BBLStack(object):
def __init__(self, stack_dict=None):
if stack_dict is None:
self._stack_dict = { }
else:
self._stack_dict = stack_dict
@staticmethod
def _get_key(callstack_suffix, func_addr):
if len(callstack_suffix) > 0:
key = callstack_suffix
else:
key = func_addr
return key
def copy(self):
return BBLStack(copy.deepcopy(self._stack_dict))
def call(self, callstack_suffix, func_addr):
key = self._get_key(callstack_suffix, func_addr)
# Create a stack with respect to that function
self._stack_dict[key] = []
def ret(self, callstack_suffix, func_addr):
key = self._get_key(callstack_suffix, func_addr)
if key in self._stack_dict:
# Return from a function. Remove the corresponding stack
del self._stack_dict[key]
else:
l.warning("Attempting to ret from a non-existing stack frame %s.", hex(key) if isinstance(key, (int, long)) else key)
def push(self, callstack_suffix, func_addr, bbl):
key = self._get_key(callstack_suffix, func_addr)
if key not in self._stack_dict:
l.warning("Key %s is not in stack dict. It might be caused by " + \
"an unexpected exit target.", hex(key) if isinstance(key, (int, long)) else key)
self.call(callstack_suffix, func_addr)
self._stack_dict[key].append(bbl)
def in_stack(self, callstack_suffix, func_addr, bbl):
key = self._get_key(callstack_suffix, func_addr)
if key in self._stack_dict:
return bbl in self._stack_dict[key]
return False
def __repr__(self):
s = [ ]
for key, stack in self._stack_dict.iteritems():
s_ = ", ".join([ (hex(k) if k is not None else "None") for k in key ])
s_ = "[" + s_ + "]:\n "
s_ += " -> ".join([ hex(k) for k in stack ])
s.append(s_)
return "\n".join(s)
class EntryWrapper(object):
"""
Describes an entry in CFG or VFG. Only used internally by the analysis.
"""
def __init__(self, addr, path, context_sensitivity_level, simrun_key=None, src_simrun_key=None,
src_exit_stmt_idx=None, jumpkind=None, call_stack=None, bbl_stack=None, is_narrowing=False,
skip=False, final_return_address=None):
self.addr = addr # Note that addr may not always be equal to self.path.addr (for syscalls, for example)
self._path = path
self.jumpkind = jumpkind
self.src_simrun_key = src_simrun_key
self.src_exit_stmt_idx = src_exit_stmt_idx
self.skip = skip
self._simrun_key = simrun_key
# Other parameters
self._context_sensitivity_level = context_sensitivity_level
self.is_narrowing = is_narrowing
if call_stack is None:
self._call_stack = CallStack()
# Added the function address of the current exit to callstack
se = self._path.state.se
sp_expr = self._path.state.regs.sp
# If the sp_expr cannot be concretized, the stack pointer cannot be traced anymore.
try:
sp = se.exactly_n_int(sp_expr, 1)[0]
except (simuvex.SimValueError, simuvex.SimSolverModeError):
l.warning("Stack pointer cannot be concretized. CallStack cannot track the stack pointer changes.")
# Set the stack pointer to None
sp = None
self._call_stack.call(None, self._path.addr, retn_target=final_return_address, stack_pointer=sp)
else:
self._call_stack = call_stack
if bbl_stack is None:
self._bbl_stack = BBLStack()
# Initialize the BBL stack
self._bbl_stack.call(self._call_stack.stack_suffix(self._context_sensitivity_level), path.addr)
else:
self._bbl_stack = bbl_stack
assert self._call_stack is not None and self._bbl_stack is not None
@property
def path(self):
return self._path
@property
def call_stack(self):
return self._call_stack
def call_stack_copy(self):
return self._call_stack.copy()
def get_call_stack_suffix(self):
return self._call_stack.stack_suffix(self._context_sensitivity_level)
def bbl_stack_push(self, call_stack_suffix, function_addr, bbl_addr):
self._bbl_stack.push(call_stack_suffix, function_addr, bbl_addr)
def bbl_in_stack(self, call_stack_suffix, function_addr, bbl_addr):
return self._bbl_stack.in_stack(call_stack_suffix, function_addr, bbl_addr)
def bbl_stack(self):
return self._bbl_stack
def bbl_stack_copy(self):
return self._bbl_stack.copy()
@property
def func_addr(self):
return self._call_stack.current_function_address
@property
def current_stack_pointer(self):
return self._call_stack.current_stack_pointer
@property
def accessed_registers_in_function(self):
return self._call_stack.current_function_accessed_registers
def __repr__(self):
return "<Entry %#08x %% %s>" % (self.addr, self.jumpkind)
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,433
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/analyses/code_location.py
|
class CodeLocation(object):
"""
Stands for a specific program point by specifying basic block address and statement ID (for IRSBs), or SimProcedure
name (for SimProcedures).
"""
def __init__(self, simrun_addr, stmt_idx, sim_procedure=None, ins_addr=None, **kwargs):
"""
Constructor.
:param simrun_addr: Address of the SimRun
:param stmt_idx: Statement ID. None for SimProcedures
:param sim_procedure: The corresponding SimProcedure class.
:param ins_addr: The instruction address. Optional.
:param kwargs: Optional arguments, will be stored, but not used in __eq__ or __hash__.
"""
self.simrun_addr = simrun_addr
self.stmt_idx = stmt_idx
self.sim_procedure = sim_procedure
self.ins_addr = ins_addr
self.info = { }
self._store_kwargs(**kwargs)
def __repr__(self):
if self.simrun_addr is None:
return '<%s>' % self.sim_procedure
else:
if self.stmt_idx is None:
s = "<%s%#x(-)" % (
("%#x " % self.ins_addr) if self.ins_addr else "",
self.simrun_addr,
)
else:
s = "<%s%#x(%d)" % (
("%#x " % self.ins_addr) if self.ins_addr else "",
self.simrun_addr,
self.stmt_idx,
)
ss = [ ]
if self.info:
for k, v in self.info.iteritems():
ss.append("%s=%s" % (k, v))
s += " with %s" % ", ".join(ss)
s += ">"
return s
def __eq__(self, other):
"""
Check if self is the same as other.
"""
return self.simrun_addr == other.simrun_addr and self.stmt_idx == other.stmt_idx and \
self.sim_procedure is other.sim_procedure
def __hash__(self):
"""
returns the hash value of self.
"""
return hash((self.simrun_addr, self.stmt_idx, self.sim_procedure))
def _store_kwargs(self, **kwargs):
for k, v in kwargs.iteritems():
self.info[k] = v
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,434
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/analyses/congruency_check.py
|
import logging
import claripy
import simuvex
from ..analysis import Analysis, register_analysis
l = logging.getLogger('angr.analyses.congruency_check')
#l.setLevel(logging.DEBUG)
class CongruencyCheck(Analysis):
"""
This is an analysis to ensure that angr executes things identically with different execution backends (i.e., unicorn vs vex).
"""
def __init__(self, throw=False):
"""
Initializes a CongruencyCheck analysis.
:param throw: whether to raise an exception if an incongruency is found.
"""
self._throw = throw
self.pg = None
self.prev_pg = None
def set_state_options(self, left_add_options=None, left_remove_options=None, right_add_options=None, right_remove_options=None):
"""
Checks that the specified state options result in the same states over the next `depth` states.
"""
s_right = self.project.factory.full_init_state(
add_options=right_add_options, remove_options=right_remove_options
)
s_left = self.project.factory.full_init_state(
add_options=left_add_options, remove_options=left_remove_options
)
return self.set_states(s_left, s_right)
def set_states(self, left_state, right_state):
"""
Checks that the specified states stay the same over the next `depth` states.
"""
p_right = self.project.factory.path(right_state)
p_left = self.project.factory.path(left_state)
return self.set_paths(p_left, p_right)
def set_paths(self, left_path, right_path):
"""
Checks that the specified paths stay the same over the next `depth` states.
"""
pg = self.project.factory.path_group(right_path)
pg.stash(to_stash='right')
pg.active.append(left_path)
pg.stash(to_stash='left')
pg.stash(to_stash='stashed_left')
pg.stash(to_stash='stashed_right')
return self.set_path_group(pg)
def set_path_group(self, pg):
self.pg = pg
return self
@staticmethod
def _sync_steps(pg, max_steps=None):
l.debug("Sync-stepping pathgroup...")
l.debug(
"... left width: %s, right width: %s",
pg.left[0].weighted_length if len(pg.left) > 0 else None,
pg.right[0].weighted_length if len(pg.right) > 0 else None,
)
if len(pg.errored) != 0 and (len(pg.left) == 0 or len(pg.right) == 0):
l.debug("... looks like a path errored")
return pg
if len(pg.left) == 0 and len(pg.right) != 0:
l.debug("... left is deadended; stepping right %s times", max_steps)
npg = pg.step(stash='right', n=max_steps)
elif len(pg.right) == 0 and len(pg.left) != 0:
l.debug("... right is deadended; stepping left %s times", max_steps)
npg = pg.step(stash='left', n=max_steps)
elif len(pg.right) == 0 and len(pg.left) == 0:
l.debug("... both deadended.")
return pg
elif pg.left[0].weighted_length == pg.right[0].weighted_length:
l.debug("... synced")
return pg
elif pg.left[0].weighted_length < pg.right[0].weighted_length:
l.debug("... right is ahead; stepping left up to %s times", max_steps)
npg = pg.step(
stash='left',
until=lambda lpg: lpg.left[0].weighted_length >= pg.right[0].weighted_length,
n=max_steps
)
elif pg.right[0].weighted_length < pg.left[0].weighted_length:
l.debug("... left is ahead; stepping right up to %s times", max_steps)
npg = pg.step(
stash='right',
until=lambda lpg: lpg.right[0].weighted_length >= pg.left[0].weighted_length,
n=max_steps
)
return CongruencyCheck._sync_steps(npg)
def _validate_incongruency(self):
"""
Checks that a detected incongruency is not caused by translation backends having a different
idea of what constitutes a basic block.
"""
ot = self._throw
try:
self._throw = False
l.debug("Validating incongruency.")
if ("UNICORN" in self.pg.right[0].state.options) ^ ("UNICORN" in self.pg.left[0].state.options):
if "UNICORN" in self.pg.right[0].state.options:
unicorn_stash = 'right'
normal_stash = 'left'
else:
unicorn_stash = 'left'
normal_stash = 'right'
unicorn_path = self.pg.stashes[unicorn_stash][0]
normal_path = self.pg.stashes[normal_stash][0]
if unicorn_path.state.arch.name in ("X86", "AMD64"):
# unicorn "falls behind" on loop and rep instructions, since
# it sees them as ending a basic block. Here, we will
# step the unicorn until it's caught up
npg = self.project.factory.path_group(unicorn_path)
npg.explore(find=lambda p: p.addr == normal_path.addr, n=200)
if len(npg.found) == 0:
l.debug("Validator failed to sync paths.")
return True
new_unicorn = npg.found[0]
delta = new_unicorn.weighted_length - normal_path.weighted_length
normal_path.extra_length += delta
new_normal = normal_path
elif unicorn_path.state.arch.name == "MIPS32":
# unicorn gets ahead here, because VEX falls behind for unknown reasons
# for example, this block:
#
# 0x1016f20: lui $gp, 0x17
# 0x1016f24: addiu $gp, $gp, -0x35c0
# 0x1016f28: addu $gp, $gp, $t9
# 0x1016f2c: addiu $sp, $sp, -0x28
# 0x1016f30: sw $ra, 0x24($sp)
# 0x1016f34: sw $s0, 0x20($sp)
# 0x1016f38: sw $gp, 0x10($sp)
# 0x1016f3c: lw $v0, -0x6cf0($gp)
# 0x1016f40: move $at, $at
npg = self.project.factory.path_group(normal_path)
npg.explore(find=lambda p: p.addr == unicorn_path.addr, n=200)
if len(npg.found) == 0:
l.debug("Validator failed to sync paths.")
return True
new_normal = npg.found[0]
delta = new_normal.weighted_length - unicorn_path.weighted_length
unicorn_path.extra_length += delta
new_unicorn = unicorn_path
else:
l.debug("Dunno!")
return True
if self.compare_paths(new_unicorn, new_normal):
l.debug("Divergence accounted for by unicorn.")
self.pg.stashes[unicorn_stash][0] = new_unicorn
self.pg.stashes[normal_stash][0] = new_normal
return False
else:
l.warning("Divergence unaccounted for by unicorn.")
return True
else:
# no idea
l.warning("Divergence unaccounted for.")
return True
finally:
self._throw = ot
def _report_incongruency(self, *args):
l.warning(*args)
if self._throw:
raise AngrIncongruencyError(*args)
def run(self, depth=None):
"""
Checks that the paths in the specified path group stay the same over the next
`depth` bytes.
The path group should have a "left" and a "right" stash, each with a single
path.
"""
#pg_history = [ ]
if len(self.pg.right) != 1 or len(self.pg.left) != 1:
self._report_incongruency("Single path in pg.left and pg.right required.")
return False
if "UNICORN" in self.pg.one_right.state.options and depth is not None:
self.pg.one_right.state.unicorn.max_steps = depth
if "UNICORN" in self.pg.one_left.state.options and depth is not None:
self.pg.one_left.state.unicorn.max_steps = depth
l.debug("Performing initial path comparison.")
if not self.compare_paths(self.pg.left[0], self.pg.right[0]):
self._report_incongruency("Initial path comparison check failed.")
return False
while len(self.pg.left) > 0 and len(self.pg.right) > 0:
if depth is not None:
self._update_progress(100. * float(self.pg.one_left.weighted_length) / depth)
if len(self.pg.deadended) != 0:
self._report_incongruency("Unexpected deadended paths before step.")
return False
if len(self.pg.right) == 0 and len(self.pg.left) == 0:
l.debug("All done!")
return True
if len(self.pg.right) != 1 or len(self.pg.left) != 1:
self._report_incongruency("Different numbers of paths in left and right stash..")
return False
# do a step
l.debug(
"Stepping right path with weighted length %d/%s",
self.pg.right[0].weighted_length,
depth
)
self.prev_pg = self.pg.copy() #pylint:disable=unused-variable
self.pg.step(stash='right')
CongruencyCheck._sync_steps(self.pg)
if len(self.pg.errored) != 0:
self._report_incongruency("Unexpected errored paths.")
return False
try:
if not self.compare_path_group(self.pg) and self._validate_incongruency():
self._report_incongruency("Path group comparison failed.")
return False
except AngrIncongruencyError:
if self._validate_incongruency():
raise
if depth is not None:
self.pg.drop(stash='left', filter_func=lambda p: p.weighted_length >= depth)
self.pg.drop(stash='right', filter_func=lambda p: p.weighted_length >= depth)
self.pg.right.sort(key=lambda p: p.addr)
self.pg.left.sort(key=lambda p: p.addr)
self.pg.stashed_right[:] = self.pg.stashed_right[::-1]
self.pg.stashed_left[:] = self.pg.stashed_left[::-1]
self.pg.move('stashed_right', 'right')
self.pg.move('stashed_left', 'left')
if len(self.pg.left) > 1:
self.pg.split(from_stash='left', limit=1, to_stash='stashed_left')
self.pg.split(from_stash='right', limit=1, to_stash='stashed_right')
def compare_path_group(self, pg):
if len(pg.left) != len(pg.right):
self._report_incongruency("Number of left and right paths differ.")
return False
if len(pg.deadended) % 2 != 0:
self._report_incongruency("Odd number of deadended paths after step.")
return False
pg.drop(stash='deadended')
if len(pg.left) == 0 and len(pg.right) == 0:
return True
# make sure the paths are the same
for pl,pr in zip(sorted(pg.left, key=lambda p: p.addr), sorted(pg.right, key=lambda p: p.addr)):
if not self.compare_paths(pl, pr):
self._report_incongruency("Differing paths.")
return False
return True
def compare_states(self, sl, sr):
"""
Compares two states for similarity.
"""
joint_solver = claripy.Solver()
# make sure the canonicalized constraints are the same
n_map, n_counter, n_canon_constraint = claripy.And(*sr.se.constraints).canonicalize() #pylint:disable=no-member
u_map, u_counter, u_canon_constraint = claripy.And(*sl.se.constraints).canonicalize() #pylint:disable=no-member
n_canoner_constraint = sr.se.simplify(n_canon_constraint)
u_canoner_constraint = sl.se.simplify(u_canon_constraint)
joint_solver.add((n_canoner_constraint, u_canoner_constraint))
if n_canoner_constraint is not u_canoner_constraint:
self._report_incongruency("Different constraints!")
return False
# get the differences in registers and memory
mem_diff = sr.memory.changed_bytes(sl.memory)
reg_diff = sr.registers.changed_bytes(sl.registers)
# this is only for unicorn
if "UNICORN" in sl.options | sr.options:
if sl.arch.name == "X86":
reg_diff -= set(range(40, 52)) #ignore cc psuedoregisters
reg_diff -= set(range(320, 324)) #some other VEX weirdness
reg_diff -= set(range(340, 344)) #ip_at_syscall
elif sl.arch.name == "AMD64":
reg_diff -= set(range(144, 168)) #ignore cc psuedoregisters
# make sure the differences in registers and memory are actually just renamed
# versions of the same ASTs
for diffs,(um,nm) in (
(reg_diff, (sl.registers, sr.registers)),
(mem_diff, (sl.memory, sr.memory)),
):
for i in diffs:
bn = nm.load(i, 1)
bu = um.load(i, 1)
bnc = bn.canonicalize(var_map=n_map, counter=n_counter)[-1]
buc = bu.canonicalize(var_map=u_map, counter=u_counter)[-1]
if bnc is not buc:
self._report_incongruency("Different memory or registers (index %d, values %r and %r)!", i, bn, bu)
return False
# make sure the flags are the same
if sl.arch.name in ("AMD64", "X86", "ARM", "AARCH64"):
n_flags = simuvex.vex.ccall._get_flags(sr)[0].canonicalize(var_map=n_map, counter=n_counter)[-1]
u_flags = simuvex.vex.ccall._get_flags(sl)[0].canonicalize(var_map=u_map, counter=u_counter)[-1]
if n_flags is not u_flags and sl.se.simplify(n_flags) is not sr.se.simplify(u_flags):
self._report_incongruency("Different flags!")
return False
return True
def compare_paths(self, pl, pr):
l.debug("Comparing paths...")
if not self.compare_states(pl.state, pr.state):
self._report_incongruency("Failed state similarity check!")
return False
if pr.weighted_length != pl.weighted_length:
self._report_incongruency("Different weights!")
return False
if pl.addr != pr.addr:
self._report_incongruency("Different addresses!")
return False
return True
from ..errors import AngrIncongruencyError
register_analysis(CongruencyCheck, 'CongruencyCheck')
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,435
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/knowledge/__init__.py
|
from .codenode import CodeNode, BlockNode, HookNode
from .data import Data
from .function_manager import FunctionManager
from .function import Function
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,436
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/analyses/cfg_node.py
|
import pyvex
import simuvex
class CFGNode(object):
"""
This class stands for each single node in CFG.
"""
def __init__(self,
addr,
size,
cfg,
callstack_key=None,
input_state=None,
simprocedure_name=None,
syscall_name=None,
looping_times=0,
no_ret=False,
is_syscall=False,
syscall=None,
simrun=None,
function_address=None,
final_states=None,
simrun_key=None,
irsb=None,
instruction_addrs=None):
"""
Note: simprocedure_name is not used to recreate the SimProcedure object. It's only there for better
__repr__.
"""
self.callstack_key = callstack_key
self.addr = addr
self.input_state = input_state
self.simprocedure_name = simprocedure_name
self.syscall_name = syscall_name
self.size = size
self.looping_times = looping_times
self.no_ret = no_ret
self.is_syscall = is_syscall
self.syscall = syscall
self._cfg = cfg
self.function_address = function_address
self.simrun_key = simrun_key
self.name = simprocedure_name or cfg.project.loader.find_symbol_name(addr)
if function_address and self.name is None:
self.name = cfg.project.loader.find_symbol_name(function_address)
if self.name is not None:
offset = addr - function_address
self.name = "%s%+#x" % (self.name, offset)
# If this CFG contains an Ijk_Call, `return_target` stores the returning site.
# Note: this is regardless of whether the call returns or not. You should always check the `no_ret` property if
# you are using `return_target` to do some serious stuff.
self.return_target = None
self.instruction_addrs = instruction_addrs if instruction_addrs is not None else [ ]
if not instruction_addrs and not self.is_simprocedure:
# We have to collect instruction addresses by ourselves
# Try to grab all instruction addresses out!
if simrun is not None:
# This is a SimIRSB
irsb = simrun.irsb
if irsb is not None:
self.instruction_addrs = [ s.addr for s in irsb.statements if type(s) is pyvex.IRStmt.IMark ] # pylint:disable=unidiomatic-typecheck
self.final_states = [ ] if final_states is None else final_states
self.irsb = irsb
self.has_return = False
@property
def successors(self):
return self._cfg.get_successors(self)
@property
def predecessors(self):
return self._cfg.get_predecessors(self)
@property
def is_simprocedure(self):
return self.simprocedure_name is not None
def downsize(self):
"""
Drop saved states.
"""
self.input_state = None
self.final_states = [ ]
def copy(self):
c = CFGNode(self.addr,
self.size,
self._cfg,
callstack_key=self.callstack_key,
input_state=self.input_state,
simprocedure_name=self.simprocedure_name,
looping_times=self.looping_times,
no_ret=self.no_ret,
is_syscall=self.is_syscall,
syscall=self.syscall,
function_address=self.function_address,
final_states=self.final_states[ :: ]
)
c.instruction_addrs = self.instruction_addrs[ :: ]
return c
def __repr__(self):
if self.name is not None:
s = "<CFGNode %s (0x%x) [%d]>" % (self.name, self.addr, self.looping_times)
else:
s = "<CFGNode 0x%x (%d) [%d]>" % (self.addr, self.size, self.looping_times)
return s
def __eq__(self, other):
if isinstance(other, simuvex.SimIRSB) or isinstance(other, simuvex.SimProcedure):
raise ValueError("You do not want to be comparing a SimRun to a CFGNode.")
if not isinstance(other, CFGNode):
return False
return (self.callstack_key == other.callstack_key and
self.addr == other.addr and
self.size == other.size and
self.looping_times == other.looping_times and
self.simprocedure_name == other.simprocedure_name
)
def __hash__(self):
return hash((self.callstack_key, self.addr, self.looping_times, self.simprocedure_name))
def to_codenode(self):
if self.is_simprocedure:
return HookNode(self.addr, self.size, self.simprocedure_name)
else:
return BlockNode(self.addr, self.size)
from ..knowledge.codenode import BlockNode, HookNode
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,437
|
MayfeelYang/angr
|
refs/heads/master
|
/tests/test_scanf.py
|
import nose
import logging
l = logging.getLogger('angr.tests.scanf')
import os
import string
import angr
test_location = str(os.path.dirname(os.path.realpath(__file__)))
class Checker(object):
def __init__(self, check_func, length=None, base=10, dummy=False):
self._check_func = check_func
self._length = length
self._base = base
self._dummy = dummy
def _extract_integer(self, s):
charset = string.digits if self._base == 10 else string.digits + "abcdefABCDEF"
component = ""
digit_start_pos = None
for i, c in enumerate(s):
if digit_start_pos is not None:
if c not in charset:
component = s[:i]
break
else:
if c in charset and s[i:i+2] not in ("0x", "0X"):
digit_start_pos = c
if not component:
component = s
return component
def check(self, path):
if self._dummy:
return True
stdin_input = path.state.posix.files[0].content.load(1, 10) # skip the first char used in switch
some_strings = path.state.se.any_n_str(stdin_input, 1000)
for s in some_strings:
if self._length is not None:
s = s[ : self._length]
component = self._extract_integer(s)
if not self._check_func(component):
return False
return True
def run_scanf(threads):
test_bin = os.path.join(test_location, "../../binaries/tests/x86_64/scanf_test")
b = angr.Project(test_bin)
pg = b.factory.path_group(immutable=False, threads=threads)
# find the end of main
expected_outputs = {
"%%07x\n": Checker(lambda s: int(s, 16) == 0xaaaa, length=7, base=16),
"%%07x and negative numbers\n": Checker(lambda s: int(s, 16) == -0xcdcd, length=7, base=16),
"nope 0\n": Checker(None, dummy=True),
"%%d\n": Checker(lambda s: int(s) == 133337),
"%%d and negative numbers\n": Checker(lambda s: int(s) == -1337),
"nope 1\n": Checker(None, dummy=True),
"%%u\n": Checker(lambda s: int(s) == 0xaaaa),
"%%u and negative numbers\n": Checker(lambda s: int(s) == -0xcdcd),
"nope 2\n": Checker(None, dummy=True),
"Unsupported switch\n": Checker(None, dummy=True),
}
pg.explore(find=0x4007f3, num_find=len(expected_outputs))
# check the outputs
total_outputs = 0
for path in pg.found:
test_output = path.state.posix.dumps(1)
if test_output in expected_outputs:
nose.tools.assert_true(expected_outputs[test_output].check(path),
"Test case failed. Output is %s." % test_output)
total_outputs += 1
# check that all of the outputs were seen
nose.tools.assert_equal(total_outputs, len(expected_outputs))
def test_scanf():
yield run_scanf, None
# yield run_scanf, 8
if __name__ == "__main__":
run_scanf(1)
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,438
|
MayfeelYang/angr
|
refs/heads/master
|
/tests/test_vfg_path.py
|
import angr
import logging
import os
l = logging.getLogger("angr_tests")
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)),
'../../binaries/tests'))
def test_vfg_paths():
p = angr.Project(os.path.join(test_location, "x86_64/track_user_input"))
main_addr = p.loader.main_bin.get_symbol("main").addr
printf_addr = 0x4005e1 # actually where it returns
vfg = p.analyses.VFG(context_sensitivity_level=1, interfunction_level=4)
paths = vfg.get_paths(main_addr, printf_addr)
if __name__ == '__main__':
test_vfg_paths()
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,439
|
MayfeelYang/angr
|
refs/heads/master
|
/tests/test_serialization.py
|
import cPickle as pickle
import nose
import angr
import ana
import os
import tempfile
internaltest_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
internaltest_files = [ 'argc_decide', 'argc_symbol', 'argv_test', 'counter', 'fauxware', 'fauxware.idb', 'manysum', 'pw', 'strlen', 'test_arrays', 'test_division', 'test_loops' ]
internaltest_arch = [ 'i386', 'armel' ]
def internaltest_vfg(p, cfg):
state = tempfile.TemporaryFile()
vfg = p.analyses.VFG(cfg=cfg)
pickle.dump(vfg, state, -1)
state.seek(0)
vfg2 = pickle.load(state)
nose.tools.assert_equals(vfg.final_states, vfg2.final_states)
nose.tools.assert_equals(set(vfg.graph.nodes()), set(vfg2.graph.nodes()))
def internaltest_cfg(p):
state = tempfile.TemporaryFile()
cfg = p.analyses.CFGAccurate()
pickle.dump(cfg, state, -1)
state.seek(0)
cfg2 = pickle.load(state)
nose.tools.assert_equals(set(cfg.nodes()), set(cfg2.nodes()))
nose.tools.assert_equals(cfg.unresolvables, cfg2.unresolvables)
nose.tools.assert_set_equal(set(cfg.deadends), set(cfg2.deadends))
return cfg
def internaltest_cfgfast(p):
state = tempfile.TemporaryFile()
cfg = p.analyses.CFGFast()
# generate capstone blocks
main_function = cfg.functions.function(name='main')
for b in main_function.blocks:
c = b.capstone # pylint:disable=unused-variable
pickle.dump(cfg, state, -1)
state.seek(0)
cfg2 = pickle.load(state)
nose.tools.assert_equals(set(cfg.nodes()), set(cfg2.nodes()))
def internaltest_project(p):
state = tempfile.TemporaryFile()
pickle.dump(p, state, -1)
state.seek(0)
loaded_p = pickle.load(state)
nose.tools.assert_equals(p.arch, loaded_p.arch)
nose.tools.assert_equals(p.filename, loaded_p.filename)
nose.tools.assert_equals(p.entry, loaded_p.entry)
def test_serialization():
ana.set_dl(ana.DirDataLayer('/tmp/ana'))
for d in internaltest_arch:
for f in internaltest_files:
fpath = os.path.join(internaltest_location, d,f)
if os.path.isfile(fpath) and os.access(fpath, os.X_OK):
p = angr.Project(fpath)
internaltest_project(p)
p = angr.Project(os.path.join(internaltest_location, 'i386/fauxware'), load_options={'auto_load_libs': False})
internaltest_cfgfast(p)
cfg = internaltest_cfg(p)
internaltest_vfg(p, cfg)
if __name__ == '__main__':
test_serialization()
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,440
|
MayfeelYang/angr
|
refs/heads/master
|
/tests/test_mem_funcs.py
|
import nose
import angr
import logging
l = logging.getLogger("angr_tests")
import os
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
def test_memmove():
memmove_amd64 = angr.Project(test_location + "/x86_64/memmove", load_options={'auto_load_libs': True}, exclude_sim_procedures_list=['memmove'])
explorer = angr.surveyors.Explorer(memmove_amd64, find=[0x4005D7]).run()
s = explorer.found[0].state
result = s.se.any_str(s.memory.load(s.registers.load(16), 13))
nose.tools.assert_equals(result, 'very useful.\x00')
def test_memcpy():
memcpy_amd64 = angr.Project(test_location + "/x86_64/memcpy", load_options={'auto_load_libs': True}, exclude_sim_procedures_list=['memcpy'])
explorer = angr.surveyors.Explorer(memcpy_amd64, find=[0x40065A]).run()
s = explorer.found[0].state
result = s.se.any_str(s.memory.load(s.registers.load(16), 19))
nose.tools.assert_equals(result, "let's test memcpy!\x00")
def test_memset():
memset_amd64 = angr.Project(test_location + "/x86_64/memset", load_options={'auto_load_libs': True}, exclude_sim_procedures_list=['memset'])
explorer = angr.surveyors.Explorer(memset_amd64, find=[0x400608]).run()
s = explorer.found[0].state
result = s.se.any_str(s.memory.load(s.registers.load(16), 50))
nose.tools.assert_equals(result, 'BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB\x00')
if __name__ == "__main__":
test_memmove()
test_memcpy()
test_memset()
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,441
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/extern_obj.py
|
from cle import Backend, Clemory, Segment
class AngrExternObject(Backend):
def __init__(self, arch, alloc_size=0x4000, granularity=16):
super(AngrExternObject, self).__init__('##angr_externs##')
self._next_addr = 0
self._lookup_table = {}
self._arch = arch
self._alloc_size = alloc_size
self._granularity = granularity
self.memory = Clemory(arch)
self.memory.add_backer(0, '\0'*alloc_size)
self.segments = [Segment(0, 0, 0, alloc_size)]
self.segments[0].is_readable = True
self.segments[0].is_writable = False
self.segments[0].is_executable = True
def get_max_addr(self):
return self._alloc_size + self.rebase_addr
def get_min_addr(self):
return self.rebase_addr
def contains_addr(self, addr):
return addr >= self.get_min_addr() and addr < self.get_max_addr()
def get_pseudo_addr(self, ident, size=16):
if ident not in self._lookup_table:
self._lookup_table[ident] = self._next_addr
self._next_addr += size + ((self._granularity - size) % self._granularity)
return self._lookup_table[ident] + self.rebase_addr
def contains_identifier(self, ident):
return ident in self._lookup_table
def get_pseudo_addr_for_symbol(self, ident):
if ident not in self._lookup_table:
return None
return self._lookup_table[ident] + self.rebase_addr
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,442
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/exploration_techniques/__init__.py
|
class ExplorationTechnique(object):
"""
An otiegnqwvk is a set of hooks for path groups that assists
in the implementation of new techniques in symbolic exploration.
TODO: choose actual name for the functionality (techniques? something?)
Any number of these methods may be overridden by a subclass.
To use an exploration technique, call ``pg.use_technique``.
"""
# pylint: disable=unused-argument, no-self-use
def __init__(self):
# this attribute will be set from above by the path group
self.project = None
def setup(self, pg):
"""
Perform any initialization on this path group you might need to do.
"""
pass
def step_path(self, path):
"""
Perform the process of stepping a path forward.
If the stepping fails, return None to fall back to a default stepping procedure.
Otherwise, return a tuple of lists: successors, unconstrained, unsat, pruned, errored
"""
return None
def step(self, pg, stash, **kwargs):
"""
Step this stash of this path group forward.
Return the stepped path group.
"""
return pg.step(stash=stash, **kwargs)
def filter(self, path):
"""
Perform filtering on a path.
If the path should not be filtered, return None.
If the path should be filtered, return the name of the stash to move the path to.
If you want to modify the path before filtering it, return a tuple of the stash to move the path to and the
modified path.
"""
return None
def complete(self, pg):
"""
Return whether or not this path group has reached a "completed" state, i.e. ``pathgroup.run()`` should halt.
"""
return False
def _condition_to_lambda(self, condition, default=False):
"""
Translates an integer, set or list into a lambda that checks a path address against the given addresses, and the
other ones from the same basic block
:param condition: An integer, set, or list to convert to a lambda.
:param default: The default return value of the lambda (in case condition is None). Default: false.
:returns: A lambda that takes a path and returns the set of addresses that it matched from the condition
"""
if condition is None:
condition_function = lambda p: default
elif isinstance(condition, (int, long)):
return self._condition_to_lambda((condition,))
elif isinstance(condition, (tuple, set, list)):
addrs = set(condition)
def condition_function(p):
if p.addr in addrs:
# returning {p.addr} instead of True to properly handle find/avoid conflicts
return {p.addr}
try:
# If the address is not in the set (which could mean it is
# not at the top of a block), check directly in the blocks
# (Blocks are repeatedly created for every check, but with
# the IRSB cache in angr lifter it should be OK.)
return addrs.intersection(set(self.project.factory.block(p.addr).instruction_addrs))
except AngrError:
return False
elif hasattr(condition, '__call__'):
condition_function = condition
else:
raise AngrExplorationTechniqueError("ExplorationTechnique is unable to convert given type (%s) to a callable condition function." % condition.__class__)
return condition_function
#registered_actions = {}
#registered_surveyors = {}
#
#def register_action(name, strat):
# registered_actions[name] = strat
#
#def register_surveyor(name, strat):
# registered_surveyors[name] = strat
from .explorer import Explorer
from .threading import Threading
from .dfs import DFS
from .looplimiter import LoopLimiter
from .veritesting import Veritesting
from .oppologist import Oppologist
from ..errors import AngrError, AngrExplorationTechniqueError
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,443
|
MayfeelYang/angr
|
refs/heads/master
|
/tests/test_accuracy.py
|
import nose
import angr
import simuvex
import os
test_location = os.path.join(os.path.dirname(os.path.realpath(str(__file__))), '../../binaries/tests/')
arch_data = { # (steps, [hit addrs], finished)
'x86_64': (330, (0x1021c20, 0x1021980, 0x1021be0, 0x4004b0, 0x400440, 0x400570), True), # Finishes
'i386': (213, (0x90198e0, 0x90195c0, 0x9019630, 0x90198a0, 0x8048370, 0x80482f8, 0x8048440), False), # blocked on syscalls
'ppc': (62, (0x11022f50, 0x11022eb0, 0x10000340, 0x100002e8), False), # blocked on syscalls
'ppc64': (183, (0x11047490, 0x100003fc, 0x10000368), False), # blocked on syscalls
'mips': (159, (0x1016f20, 0x400500, 0x400470), False), # blocked on some very weird TLS initialization?
'mips64': (324, (0x12103b828, 0x120000870, 0x1200007e0), False), # blocked on some io initialization
'armel': (153, (0x10154b8, 0x1108244, 0x83a8, 0x8348, 0x84b0), False), # blocked on __kuser_cmpxchg
'aarch64': (197, (0x1020b04, 0x400430, 0x4003b8, 0x400538), False), # blocked on syscalls
}
def emulate(arch):
steps, hit_addrs, finished = arch_data[arch]
filepath = test_location + arch + '/test_arrays'
p = angr.Project(filepath, use_sim_procedures=False)
#if arch not in ('x86_64', 'i386'):
# state = p.factory.full_init_state(args=['./test_arrays'])
#else:
# state = p.factory.full_init_state(args=['./test_arrays'], add_options={simuvex.o.STRICT_PAGE_ACCESS})
state = p.factory.full_init_state(args=['./test_arrays'], add_options={simuvex.o.STRICT_PAGE_ACCESS, simuvex.o.CGC_ZERO_FILL_UNCONSTRAINED_MEMORY})
pg = p.factory.path_group(state)
pg2 = pg.step(until=lambda lpg: len(lpg.active) != 1,
step_func=lambda lpg: lpg if len(lpg.active) == 1 else lpg.prune()
)
is_finished = False
if len(pg2.active) > 0:
path = pg2.active[0]
elif len(pg2.deadended) > 0:
path = pg2.deadended[0]
is_finished = True
elif len(pg2.errored) > 0:
path = pg2.errored[0]
else:
raise ValueError("This pathgroup does not contain a path we can use for this test?")
nose.tools.assert_greater_equal(path.length, steps)
# this is some wonky control flow that asserts that the items in hit_addrs appear in the path in order.
trace = path.addr_trace.hardcopy
reqs = list(hit_addrs)
while len(reqs) > 0:
req = reqs.pop(0)
while True:
nose.tools.assert_greater(len(trace), 0)
trace_head = trace.pop(0)
if trace_head == req:
break
nose.tools.assert_not_in(trace_head, reqs)
if finished:
nose.tools.assert_true(is_finished)
def test_emulation():
for arch in arch_data:
yield emulate, arch
def test_locale():
p = angr.Project(test_location + 'i386/isalnum', use_sim_procedures=False)
state = p.factory.full_init_state(args=['./isalnum'], add_options={simuvex.o.STRICT_PAGE_ACCESS})
pg = p.factory.path_group(state)
pg2 = pg.step(until=lambda lpg: len(lpg.active) != 1,
step_func=lambda lpg: lpg if len(lpg.active) == 1 else lpg.prune()
)
nose.tools.assert_equal(len(pg2.active), 0)
nose.tools.assert_equal(len(pg2.deadended), 1)
nose.tools.assert_equal(pg2.deadended[0].events[-1].type, 'terminate')
nose.tools.assert_equal(pg2.deadended[0].events[-1].objects['exit_code'].ast._model_concrete.value, 0)
if __name__ == '__main__':
print 'locale'
test_locale()
print 'x86_64'
emulate('x86_64')
print 'i386'
emulate('i386')
print 'ppc'
emulate('ppc')
print 'ppc64'
emulate('ppc64')
print 'mips'
emulate('mips')
print 'mips64'
emulate('mips64')
print 'armel'
emulate('armel')
print 'aarch64'
emulate('aarch64')
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,444
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/surveyors/__init__.py
|
all_surveyors = { }
from .explorer import Explorer
from .executor import Executor
from .escaper import Escaper
from .slicecutor import Slicecutor, HappyGraph
from .caller import Caller
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,445
|
MayfeelYang/angr
|
refs/heads/master
|
/tests/test_static_hooker.py
|
import angr
import simuvex
import os
import nose
test_location = os.path.join(os.path.dirname(os.path.realpath(str(__file__))), '../../binaries/tests/')
def test_static_hooker():
test_file = os.path.join(test_location, 'x86_64/static')
p = angr.Project(test_file)
sh = p.analyses.StaticHooker()
nose.tools.assert_in(4197616, sh.results)
nose.tools.assert_is(sh.results[4197616], simuvex.SimProcedures['libc.so.6']['__libc_start_main'])
nose.tools.assert_is(p.hooked_by(4197616), simuvex.SimProcedures['libc.so.6']['__libc_start_main'])
if __name__ == '__main__':
test_static_hooker()
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,446
|
MayfeelYang/angr
|
refs/heads/master
|
/tests/test_simcc.py
|
import nose
import angr
from simuvex.s_cc import SimCCSystemVAMD64
import logging
l = logging.getLogger("angr.tests.test_simcc")
import os
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
def test_simcc_x86_64():
binary_path = test_location + "/x86_64/simcc"
p = angr.Project(binary_path)
p.analyses.CFGAccurate()
f_arg1 = p.kb.functions['arg1']
nose.tools.assert_not_equal(f_arg1, None)
nose.tools.assert_equal(type(f_arg1.call_convention), SimCCSystemVAMD64)
nose.tools.assert_equal(len(f_arg1.arguments), 1)
nose.tools.assert_equal(f_arg1.arguments[0].reg_name, 'rdi')
f_arg7 = p.kb.functions['arg7']
nose.tools.assert_not_equal(f_arg7, None)
nose.tools.assert_equal(type(f_arg7.call_convention), SimCCSystemVAMD64)
nose.tools.assert_equal(len(f_arg7.arguments), 7)
nose.tools.assert_equal(f_arg7.arguments[1].reg_name, 'rsi')
f_arg9 = p.kb.functions.function(name='arg9')
nose.tools.assert_not_equal(f_arg9, None)
nose.tools.assert_equal(type(f_arg9.call_convention), SimCCSystemVAMD64)
nose.tools.assert_equal(len(f_arg9.arguments), 9)
nose.tools.assert_equal(f_arg9.arguments[8].stack_offset, 0x8 + 0x8 * 2)
def run_all():
functions = globals()
all_functions = dict(filter((lambda (k, v): k.startswith('test_') and hasattr(v, '__call__')), functions.items()))
for f in sorted(all_functions.keys()):
all_functions[f]()
if __name__ == "__main__":
logging.getLogger("angr.analyses.cfg").setLevel(logging.DEBUG)
run_all()
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,447
|
MayfeelYang/angr
|
refs/heads/master
|
/tests/test_cfgfast.py
|
import os
import logging
import nose.tools
import angr
from angr.analyses.cfg_fast import Segment, SegmentList
l = logging.getLogger("angr.tests.test_cfgfast")
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
def cfg_fast_functions_check(arch, binary_path, func_addrs, func_features):
"""
Generate a fast CFG on the given binary, and test if all specified functions are found
:param str arch: the architecture, will be prepended to `binary_path`
:param str binary_path: path to the binary under the architecture directory
:param dict func_addrs: A collection of function addresses that should be recovered
:param dict func_features: A collection of features for some of the functions
:return: None
"""
path = os.path.join(test_location, arch, binary_path)
proj = angr.Project(path, load_options={'auto_load_libs': False})
cfg = proj.analyses.CFGFast()
nose.tools.assert_true(set([ k for k in cfg.kb.functions.keys() ]).issuperset(func_addrs))
for func_addr, feature_dict in func_features.iteritems():
returning = feature_dict.get("returning", "undefined")
if returning is not "undefined":
nose.tools.assert_is(cfg.kb.functions.function(addr=func_addr).returning, returning)
# Segment only
cfg = proj.analyses.CFGFast(force_segment=True)
nose.tools.assert_true(set([ k for k in cfg.kb.functions.keys() ]).issuperset(func_addrs))
for func_addr, feature_dict in func_features.iteritems():
returning = feature_dict.get("returning", "undefined")
if returning is not "undefined":
nose.tools.assert_is(cfg.kb.functions.function(addr=func_addr).returning, returning)
# with normalization enabled
cfg = proj.analyses.CFGFast(force_segment=True, normalize=True)
nose.tools.assert_true(set([k for k in cfg.kb.functions.keys()]).issuperset(func_addrs))
for func_addr, feature_dict in func_features.iteritems():
returning = feature_dict.get("returning", "undefined")
if returning is not "undefined":
nose.tools.assert_is(cfg.kb.functions.function(addr=func_addr).returning, returning)
def cfg_fast_edges_check(arch, binary_path, edges):
"""
Generate a fast CFG on the given binary, and test if all edges are found.
:param str arch: the architecture, will be prepended to `binary_path`
:param str binary_path: path to the binary under the architecture directory
:param list edges: a list of edges
:return: None
"""
path = os.path.join(test_location, arch, binary_path)
proj = angr.Project(path, load_options={'auto_load_libs': False})
cfg = proj.analyses.CFGFast()
for src, dst in edges:
src_node = cfg.get_any_node(src)
dst_node = cfg.get_any_node(dst)
nose.tools.assert_in(dst_node, src_node.successors)
def test_cfg_0():
filename = 'cfg_0'
functions = {
'x86_64': {
0x400410,
0x400420,
0x400430,
0x400440,
0x400470,
0x40052c,
0x40053c,
}
}
arches = functions.keys()
function_features = {
'x86_64': {}
}
for arch in arches:
yield cfg_fast_functions_check, arch, filename, functions[arch], function_features[arch]
def test_cfg_0_pe():
filename = 'cfg_0_pe'
functions = {
'x86_64': {
# 0x40150a, # currently angr identifies 0x40150e due to the way _func_addrs_from_prologues() is
# implemented. this issue can be resolved with a properly implemented approach like Byte-Weight
0x4014f0,
}
}
arches = functions.keys()
function_features = {
'x86_64': {}
}
for arch in arches:
yield cfg_fast_functions_check, arch, filename, functions[arch], function_features[arch]
def test_fauxware():
filename = "fauxware"
functions = {
'x86_64': {
0x4004e0,
0x400510,
0x400520,
0x400530,
0x400540,
0x400550,
0x400560,
0x400570, # .plt._exit
0x400580, # _start
0x4005ac,
0x4005d0,
0x400640,
0x400664,
0x4006ed,
0x4006fd,
0x40071d, # main
0x4007e0,
0x400870,
0x400880,
0x4008b8,
}
}
function_features = {
'x86_64':
{
0x400570: # plt.exit
{
"returning": False
},
0x4006fd: # rejected
{
"returning": False
}
}
}
return_edges = {
'x86_64':
[
(0x4006fb, 0x4007c7) # return from accepted to main
]
}
arches = functions.keys()
for arch in arches:
yield cfg_fast_functions_check, arch, filename, functions[arch], function_features[arch]
yield cfg_fast_edges_check, arch, filename, return_edges[arch]
def test_cfg_loop_unrolling():
filename = "cfg_loop_unrolling"
edges = {
'x86_64': {
(0x400658, 0x400636),
(0x400658, 0x400661),
(0x400651, 0x400636),
(0x400651, 0x400661),
}
}
arches = edges.keys()
for arch in arches:
yield cfg_fast_edges_check, arch, filename, edges[arch]
def test_segment_list_0():
seg_list = SegmentList()
seg_list.occupy(0, 1, "code")
seg_list.occupy(2, 3, "code")
nose.tools.assert_equal(len(seg_list), 2)
nose.tools.assert_equal(seg_list._list[0].end, 1)
nose.tools.assert_equal(seg_list._list[1].end, 5)
nose.tools.assert_equal(seg_list.is_occupied(4), True)
nose.tools.assert_equal(seg_list.is_occupied(5), False)
def test_segment_list_1():
seg_list = SegmentList()
# They should be merged
seg_list.occupy(0, 1, "code")
seg_list.occupy(1, 2, "code")
nose.tools.assert_equal(len(seg_list), 1)
nose.tools.assert_equal(seg_list._list[0].start, 0)
nose.tools.assert_equal(seg_list._list[0].end, 3)
def test_segment_list_2():
seg_list = SegmentList()
# They should not be merged
seg_list.occupy(0, 1, "code")
seg_list.occupy(1, 2, "data")
nose.tools.assert_equal(len(seg_list), 2)
nose.tools.assert_equal(seg_list._list[0].start, 0)
nose.tools.assert_equal(seg_list._list[0].end, 1)
nose.tools.assert_equal(seg_list._list[1].start, 1)
nose.tools.assert_equal(seg_list._list[1].end, 3)
def test_segment_list_3():
seg_list = SegmentList()
# They should be merged, and create three different segments
seg_list.occupy(0, 5, "code")
seg_list.occupy(5, 5, "code")
seg_list.occupy(1, 2, "data")
nose.tools.assert_equal(len(seg_list), 3)
nose.tools.assert_equal(seg_list._list[0].start, 0)
nose.tools.assert_equal(seg_list._list[0].end, 1)
nose.tools.assert_equal(seg_list._list[0].sort, "code")
nose.tools.assert_equal(seg_list._list[1].start, 1)
nose.tools.assert_equal(seg_list._list[1].end, 3)
nose.tools.assert_equal(seg_list._list[1].sort, "data")
nose.tools.assert_equal(seg_list._list[2].start, 3)
nose.tools.assert_equal(seg_list._list[2].end, 10)
nose.tools.assert_equal(seg_list._list[2].sort, "code")
def test_segment_list_4():
seg_list = SegmentList()
seg_list.occupy(5, 5, "code")
seg_list.occupy(4, 1, "code")
seg_list.occupy(2, 2, "code")
nose.tools.assert_equal(len(seg_list), 1)
nose.tools.assert_equal(seg_list._list[0].start, 2)
nose.tools.assert_equal(seg_list._list[0].end, 10)
def test_segment_list_5():
seg_list = SegmentList()
seg_list.occupy(5, 5, "data")
seg_list.occupy(4, 1, "code")
seg_list.occupy(2, 2, "data")
nose.tools.assert_equal(len(seg_list), 3)
nose.tools.assert_equal(seg_list._list[0].start, 2)
nose.tools.assert_equal(seg_list._list[2].end, 10)
seg_list.occupy(3, 2, "data")
nose.tools.assert_equal(len(seg_list), 1)
nose.tools.assert_equal(seg_list._list[0].start, 2)
nose.tools.assert_equal(seg_list._list[0].end, 10)
def test_segment_list_6():
seg_list = SegmentList()
seg_list.occupy(10, 20, "code")
seg_list.occupy(9, 2, "data")
nose.tools.assert_equal(len(seg_list), 2)
nose.tools.assert_equal(seg_list._list[0].start, 9)
nose.tools.assert_equal(seg_list._list[0].end, 11)
nose.tools.assert_equal(seg_list._list[0].sort, 'data')
nose.tools.assert_equal(seg_list._list[1].start, 11)
nose.tools.assert_equal(seg_list._list[1].end, 30)
nose.tools.assert_equal(seg_list._list[1].sort, 'code')
def main():
g = globals()
segmentlist_tests = [ v for k, v in g.iteritems() if k.startswith("test_segment_list_") and hasattr(v, "__call__")]
for func in segmentlist_tests:
func()
for args in test_cfg_0():
args[0](*args[1:])
for args in test_cfg_0_pe():
args[0](*args[1:])
for args in test_fauxware():
args[0](*args[1:])
for args in test_cfg_loop_unrolling():
args[0](*args[1:])
if __name__ == "__main__":
main()
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,448
|
MayfeelYang/angr
|
refs/heads/master
|
/tests/test_str_funcs.py
|
import nose
import angr
import logging
l = logging.getLogger("angr_tests")
import os
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
def test_strncpy():
strncpy_amd64 = angr.Project(test_location + "/x86_64/strncpy", load_options={'auto_load_libs': True}, exclude_sim_procedures_list=['strncpy'])
explorer = angr.surveyors.Explorer(strncpy_amd64, find=[0x4005FF]).run()
s = explorer.found[0].state
result = s.se.any_str(s.memory.load(s.registers.load(16), 16))
nose.tools.assert_equals(result, 'why hello there\x00')
def test_strncpy_size():
strncpy_size_amd64 = angr.Project(test_location + "/x86_64/strncpy-size", load_options={'auto_load_libs': True}, exclude_sim_procedures_list=['strncpy'])
explorer = angr.surveyors.Explorer(strncpy_size_amd64,max_repeats=50, find=[0x40064C]).run()
s = explorer.found[0].state
result = s.se.any_str(s.memory.load(s.registers.load(16), 40))
nose.tools.assert_equals(result, 'just testing things\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')
def test_strncpy_verify_null():
strncpy_verify_null_amd64 = angr.Project(test_location + "/x86_64/strncpy-verify-null", load_options={'auto_load_libs': True}, exclude_sim_procedures_list=['strncpy'])
explorer = angr.surveyors.Explorer(strncpy_verify_null_amd64,max_repeats=50, find=[0x40064C]).run()
s = explorer.found[0].state
result = s.se.any_str(s.memory.load(s.registers.load(16), 40))
nose.tools.assert_equals(result, 'just testing things\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00AAAAAA\x00')
def test_strstr_and_strncpy():
strstr_and_strncpy_amd64 = angr.Project(test_location + "/x86_64/strstr_and_strncpy", load_options={'auto_load_libs': True}, exclude_sim_procedures_list=['strstr'])
explorer = angr.surveyors.Explorer(strstr_and_strncpy_amd64, max_repeats=50, find=[0x400657]).run()
s = explorer.found[0].state
result = s.se.any_str(s.memory.load(s.registers.load(16), 15))
nose.tools.assert_equals(result, 'hi th hi there\x00')
def test_strstr():
strstr_amd64 = angr.Project(test_location + "/x86_64/strstr", load_options={'auto_load_libs': True}, exclude_sim_procedures_list=['strstr'])
explorer = angr.surveyors.Explorer(strstr_amd64, find=[0x4005FB]).run()
s = explorer.found[0].state
result = s.se.any_str(s.memory.load(s.registers.load(16), 9))
nose.tools.assert_equals(result, 'hi there\x00')
if __name__ == "__main__":
test_strncpy()
test_strncpy_size()
test_strncpy_verify_null()
test_strstr_and_strncpy()
test_strstr()
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,449
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/exploration_techniques/oppologist.py
|
import pyvex
import claripy
import simuvex
import functools
import logging
l = logging.getLogger("angr.exploration_techniques.Oppologist")
from ..errors import AngrError
exc_list = (AngrError, simuvex.SimError, claripy.ClaripyError, TypeError, ValueError, ArithmeticError, MemoryError)
from . import ExplorationTechnique
class Oppologist(ExplorationTechnique):
"""
The Oppologist is an exploration technique that forces uncooperative code through qemu.
"""
def __init__(self):
ExplorationTechnique.__init__(self)
@staticmethod
def _restore_path(old, new):
new.state.release_plugin('unicorn')
new.state.register_plugin('unicorn', old.state.unicorn.copy())
new.state.options = set(old.state.options)
return new
def _oppologize(self, p, pn, **kwargs):
l.debug("... pn: %s", pn)
irsb = self.project.factory.block(pn.addr).vex
addrs = [ s.addr for s in irsb.statements if isinstance(s, pyvex.IRStmt.IMark) ]
if len(addrs) > 1:
stops = [ addrs[1] ]
else:
stops = None
pn.state.options.add(simuvex.options.UNICORN)
pn.state.options.add(simuvex.options.UNICORN_AGGRESSIVE_CONCRETIZATION)
pn.state.unicorn.max_steps = 1
pn.state.unicorn.countdown_symbolic_registers = 0
pn.state.unicorn.countdown_symbolic_memory = 0
pn.state.unicorn.countdown_nonunicorn_blocks = 0
pn.step(extra_stop_points=stops, throw=True, **kwargs)
fixup = functools.partial(self._restore_path, p)
l.debug("... successors: %s", pn.successors)
return (
map(fixup, [ pp for pp in pn.successors if not pp.errored ]),
map(fixup, pn.unconstrained_successors),
map(fixup, pn.unsat_successors),
[ ], # pruned
map(fixup, [ pp for pp in pn.successors if pp.errored ]), #errored
)
@staticmethod
def _combine_results(*results):
all_successors = [ ]
all_unconstrained = [ ]
all_unsat = [ ]
all_pruned = [ ]
all_errored = [ ]
for s,uc,us,p,e in results:
all_successors.extend(s)
all_unconstrained.extend(uc)
all_unsat.extend(us)
all_pruned.extend(p)
all_errored.extend(e)
return (
all_successors,
all_unconstrained,
all_unsat,
all_pruned,
all_errored
)
def _delayed_oppology(self, p, e, **kwargs):
try:
p.step(num_inst=e.executed_instruction_count, throw=True, **kwargs)
except Exception: #pylint:disable=broad-except
return [], [], [], [], p.step(num_inst=e.executed_instruction_count, **kwargs)
need_oppologizing = [ pp for pp in p.successors if pp.addr == e.ins_addr ]
results = [ (
[ pp for pp in p.successors if pp.addr != e.ins_addr ],
p.unconstrained_successors,
p.unsat_successors,
[ ],
[ ]
) ]
results += map(functools.partial(self._oppologize, p, **kwargs), need_oppologizing)
return self._combine_results(*results)
def step_path(self, p, **kwargs):
try:
p.step(throw=True, **kwargs)
return None
except (simuvex.SimUnsupportedError, simuvex.SimCCallError) as e:
l.debug("Errored on path %s after %d instructions", p, e.executed_instruction_count)
try:
if e.executed_instruction_count:
return self._delayed_oppology(p, e, **kwargs)
else:
return self._oppologize(p, p.copy(), **kwargs)
except exc_list: #pylint:disable=broad-except
l.error("Oppologizer hit an error.", exc_info=True)
return None
except exc_list: #pylint:disable=broad-except
l.error("Original block hit an error.", exc_info=True)
return None
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,450
|
MayfeelYang/angr
|
refs/heads/master
|
/tests/test_argc.py
|
import nose
import angr
import logging
l = logging.getLogger("angr_tests")
import os
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
def test_mips():
arger_mips = angr.Project(test_location + "/mips/argc_decide")
r_addr = 0x4006f4
s = arger_mips.factory.path(args = ['aaa'], env = {"HOME": "/home/angr"})
xpl = arger_mips.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
nose.tools.assert_equals(len(xpl.found), 1)
s = arger_mips.factory.path(args = ['aaa', 'bbb'], env ={"HOME": "/home/angr"})
xpl = arger_mips.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
nose.tools.assert_equals(len(xpl.found), 0)
def test_mipsel():
arger_mipsel = angr.Project(test_location + "/mipsel/argc_decide")
r_addr = 0x40070c
s = arger_mipsel.factory.path(args = ['aaa', 'bbb'], env ={"HOME": "/home/angr"})
xpl = arger_mipsel.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
nose.tools.assert_equals(len(xpl.found), 1)
s = arger_mipsel.factory.path(args = ['aaa'], env ={"HOME": "/home/angr"})
xpl = arger_mipsel.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
nose.tools.assert_equals(len(xpl.found), 0)
def test_i386():
arger_i386 = angr.Project(test_location + "/i386/argc_decide")
r_addr = 0x80483d4
s = arger_i386.factory.path(args = ['aaa'], env ={"HOME": "/home/angr"})
xpl = arger_i386.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
nose.tools.assert_equals(len(xpl.found), 1)
s = arger_i386.factory.path(args = ['aaa', 'bbb'], env ={"HOME": "/home/angr"})
xpl = arger_i386.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
nose.tools.assert_equals(len(xpl.found), 0)
def test_amd64():
arger_amd64 = angr.Project(test_location + "/x86_64/argc_decide")
r_addr = 0x4004c7
s = arger_amd64.factory.path(args = ['aaa'], env ={"HOME": "/home/angr"})
xpl = arger_amd64.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
nose.tools.assert_equals(len(xpl.found), 1)
s = arger_amd64.factory.path(args = ['aaa', 'bbb'], env ={"HOME": "/home/angr"})
xpl = arger_amd64.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
nose.tools.assert_equals(len(xpl.found), 0)
def test_arm():
arger_arm = angr.Project(test_location + "/armel/argc_decide")
r_addr = 0x1040c
s = arger_arm.factory.path(args = ['aaa'], env ={"HOME": "/home/angr"})
xpl = arger_arm.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
nose.tools.assert_equals(len(xpl.found), 1)
s = arger_arm.factory.path(args = ['aaa', 'bbb'], env ={"HOME": "/home/angr"})
xpl = arger_arm.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
nose.tools.assert_equals(len(xpl.found), 0)
def test_ppc32():
arger_ppc32 = angr.Project(test_location + "/ppc/argc_decide")
r_addr = 0x10000404
s = arger_ppc32.factory.path(args = ['aaa'], env ={"HOME": "/home/angr"})
xpl = arger_ppc32.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
nose.tools.assert_equals(len(xpl.found), 1)
s = arger_ppc32.factory.path(args = ['aaa', 'bbb'], env ={"HOME": "/home/angr"})
xpl = arger_ppc32.surveyors.Explorer(find=[r_addr], start=s)
xpl.run()
nose.tools.assert_equals(len(xpl.found), 0)
if __name__ == "__main__":
test_mips()
test_mipsel()
test_arm()
test_i386()
test_amd64()
test_ppc32()
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,451
|
MayfeelYang/angr
|
refs/heads/master
|
/setup.py
|
from distutils.core import setup
setup(
name='angr',
version='5.6.8.22',
description='The next-generation binary analysis platform from UC Santa Barbara\'s Seclab!',
packages=['angr', 'angr.surveyors', 'angr.analyses', 'angr.knowledge', 'angr.exploration_techniques'],
install_requires=[
'capstone',
'networkx',
'futures',
'progressbar',
'mulpyplexer',
'cooldict',
'ana',
'archinfo',
'pyvex',
'claripy',
'simuvex',
'cle',
'cachetools',
],
)
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,452
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/analyses/cfg_utils.py
|
from collections import defaultdict
import networkx
class CFGUtils(object):
"""
A helper class with some static methods and algorithms implemented, that in fact, might take more than just normal
CFGs.
"""
@staticmethod
def find_merge_points(function_addr, function_endpoints, graph): # pylint:disable=unused-argument
"""
Given a local transition graph of a function, find all merge points inside, and then perform a
quasi-topological sort of those merge points.
A merge point might be one of the following cases:
- two or more paths come together, and ends at the same address.
- end of the current function
:param int function_addr: Address of the function.
:param list function_endpoints: Endpoints of the function. They typically come from Function.endpoints.
:param networkx.DiGraph graph: A local transition graph of a function. Normally it comes from Function.graph.
:return: A list of ordered addresses of merge points.
:rtype: list
"""
merge_points = set()
in_degree_to_nodes = defaultdict(set)
for node in graph.nodes_iter():
in_degree = graph.in_degree(node)
in_degree_to_nodes[in_degree].add(node)
if in_degree > 1:
merge_points.add(node.addr)
# Revised version of a topological sort
# we define a partial order between two merge points as follows:
# - if A -> B and not B -> A, then we have A < B
# - if A -> B and B -> A, and in a BFS, A is visited before B, then we have A < B
# - if A -> B and B -> A, and none of them were visited before, and addr(A) < addr(B), then we have A < B
ordered_merge_points = CFGUtils.quasi_topological_sort_nodes(graph,
node_addrs=merge_points,
in_degree_to_nodes=in_degree_to_nodes
)
addrs = [n.addr for n in ordered_merge_points]
return addrs
@staticmethod
def quasi_topological_sort_nodes(graph, node_addrs=None, in_degree_to_nodes=None):
"""
Sort a given set of nodes based on the following rules:
# - if A -> B and not B -> A, then we have A < B
# - if A -> B and B -> A, and in a BFS, A is visited before B, then we have A < B
# - if A -> B and B -> A, and none of them were visited before, and addr(A) < addr(B), then we have A < B
The above rules can be viewed as a quasi-topological sorting of nodes in the graph.
:param networkx.DiGraph graph: A local transition graph of the function.
:param list node_addrs: A list of node addresses to sort. None if you want to sort all nodes inside the graph.
:param dict in_degree_to_nodes: A mapping between in-degrees and sets of nodes.
:return: A list of ordered nodes.
:rtype: list
"""
# make a copy to the graph since we are gonna modify it
graph_copy = networkx.DiGraph(graph)
# store nodes that are visited and whose in-degree is not 0
waiting_queue = []
ordered_nodes = []
if in_degree_to_nodes is None:
# initialize in_degree_to_nodes mapping
in_degree_to_nodes = defaultdict(set)
for node in graph.nodes_iter():
in_degree = graph.in_degree(node)
in_degree_to_nodes[in_degree].add(node)
while graph_copy.number_of_nodes():
if not in_degree_to_nodes[0]:
# there is a loop somewhere
# get a node out of the waiting queue
n = waiting_queue[0]
waiting_queue = waiting_queue[1:]
# get all edges that has `n` as the destination
in_edges = graph_copy.in_edges(n)
# get all successors of n
successors = [ suc for suc in graph_copy.successors(n) if suc is not n ]
# since there are loops, we want to create new edges from those old destination to all successors of n,
# in order to keep the topology right
for src, _ in in_edges:
for suc in successors:
if src is not suc:
in_degree = graph_copy.in_degree(suc)
if suc not in graph_copy[src]:
graph_copy.add_edge(src, suc)
in_degree_to_nodes[in_degree].remove(suc)
in_degree_to_nodes[in_degree + 1].add(suc)
# remove all edges that has `n` as the destination
for src, _ in in_edges:
graph_copy.remove_edge(src, n)
else:
# get an zero-in-degree node
n = in_degree_to_nodes[0].pop()
if node_addrs is None or n.addr in node_addrs:
ordered_nodes.append(n)
if n in waiting_queue:
waiting_queue.remove(n)
if n not in graph_copy:
continue
out_edges = graph_copy.out_edges(n)
# now remove all out_edges
for edge in out_edges:
_, dst = edge
if n is not dst:
in_degree = graph_copy.in_degree(dst)
in_degree_to_nodes[in_degree].remove(dst)
in_degree_to_nodes[in_degree - 1].add(dst)
graph_copy.remove_edge(n, dst)
if dst not in waiting_queue:
waiting_queue.append(dst)
graph_copy.remove_node(n)
return ordered_nodes
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,453
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/analyses/cdg.py
|
from collections import defaultdict
import logging
import networkx
l = logging.getLogger("angr.analyses.cdg")
from ..analysis import Analysis, register_analysis
class TemporaryNode(object):
"""
A temporary node.
Used as the start node and end node in post-dominator tree generation. Also used in some test cases.
"""
def __init__(self, label):
self._label = label
def __repr__(self):
return 'TemporaryNode[%s]' % self._label
def __eq__(self, other):
if isinstance(other, TemporaryNode) and other._label == self._label:
return True
return False
def __hash__(self):
return hash('%s' % self._label)
class ContainerNode(object):
"""
A container node.
Only used in post-dominator tree generation. We did this so we can set the index property without modifying the
original object.
"""
def __init__(self, obj):
self._obj = obj
self.index = None
@property
def obj(self):
return self._obj
def __eq__(self, other):
if isinstance(other, ContainerNode):
return self._obj == other._obj and self.index == other.index
return False
class CDG(Analysis):
"""
Implements a control dependence graph.
"""
def __init__(self, cfg, start=None, no_construct=False):
"""
Constructor.
:param cfg: The control flow graph upon which this control dependence graph will build
:param start: The starting point to begin constructing the control dependence graph
:param no_construct: Skip the construction step. Only used in unit-testing.
"""
self._binary = self.project.loader.main_bin
self._start = start if start is not None else self.project.entry
self._cfg = cfg
self._ancestor = None
self._semi = None
self._post_dom = None
self._graph = None
self._label = None
self._normalized_cfg = None
if not no_construct:
if self._cfg is None:
self._cfg = self.project.analyses.CFGAccurate()
# FIXME: We should not use get_any_irsb in such a real setting...
self._entry = self._cfg.get_any_node(self._start)
self._construct()
#
# Properties
#
@property
def graph(self):
return self._graph
#
# Public methods
#
def get_post_dominators(self):
"""
Return the post-dom tree
"""
return self._post_dom
def get_dependants(self, run):
"""
Return a list of nodes that are control dependent on the given node in the control dependence graph
"""
if run in self._graph.nodes():
return self._graph.successors(run)
else:
return []
def get_guardians(self, run):
"""
Return a list of nodes on whom the specific node is control dependent in the control dependence graph
"""
if run in self._graph.nodes():
return self._graph.predecessors(run)
else:
return []
#
# Private methods
#
def _construct(self):
"""
Construct a control dependence graph.
This implementation is based on figure 6 of paper An Efficient Method of Computing Static Single Assignment
Form by Ron Cytron, etc.
"""
self._acyclic_cfg = self._cfg.copy()
# TODO: Cycle-removing is not needed - confirm it later
# The CFG we use should be acyclic!
#self._acyclic_cfg.remove_cycles()
# Pre-process the acyclic CFG
self._pre_process_cfg()
# Construct post-dominator tree
self._pd_construct()
self._graph = networkx.DiGraph()
# Construct the reversed dominance frontier mapping
rdf = self._df_construct(self._post_dom)
for y in self._cfg.graph.nodes_iter():
if y not in rdf:
continue
for x in rdf[y]:
self._graph.add_edge(x, y)
# self._post_process()
def _pre_process_cfg(self):
"""
Pre-process the acyclic CFG by changing all FakeRet edges to normal edges when necessary (e.g. the normal return
edge does not exist)
"""
for src, dst, data in self._acyclic_cfg.graph.edges(data=True):
if 'jumpkind' in data and data['jumpkind'] == 'Ijk_FakeRet':
all_edges_to_dst = self._acyclic_cfg.graph.in_edges([ dst ], data=True)
if not any((s, d) for s, d, da in all_edges_to_dst if da['jumpkind'] != 'Ijk_FakeRet' ):
# All in edges are FakeRets
# Change them to a normal edge
for _, _, data in all_edges_to_dst:
data['jumpkind'] = 'Ijk_Boring'
def _post_process(self):
"""
There are cases where a loop has two overlapping loop headers thanks
to the way VEX is dealing with continuous instructions. As we were
breaking the connection between the second loop header and its
successor, we shall restore them in our CDG.
"""
# TODO: Verify its correctness
loop_back_edges = self._cfg.get_loop_back_edges()
for b1, b2 in loop_back_edges:
self._graph.add_edge(b1, b2)
#
# Dominance frontier related
#
def _df_construct(self, postdom):
"""
Construct a dominance frontier based on the given post-dominator tree.
This implementation is based on figure 2 of paper An Efficient Method of Computing Static Single Assignment
Form by Ron Cytron, etc.
:param postdom: The post-dominator tree
:returns: A dict of dominance frontier
"""
DF = { }
# Perform a post-order search on the post-dom tree
for x in networkx.dfs_postorder_nodes(postdom):
DF[x] = set()
# local set
for y in self._normalized_cfg.successors_iter(x):
if x not in postdom.predecessors(y):
DF[x].add(y)
# up set
if x is None:
continue
for z in postdom.successors(x):
if z is x:
continue
if z not in DF:
continue
for y in DF[z]:
if x not in postdom.predecessors(y):
DF[x].add(y)
return DF
#
# Post-dominator tree related
#
def _pd_construct(self):
"""
Find post-dominators for each node in CFG.
This implementation is based on paper A Fast Algorithm for Finding Dominators in a Flow Graph by Thomas
Lengauer and Robert E. Tarjan from Stanford University, ACM Transactions on Programming Languages and Systems,
Vol. 1, No. 1, July 1979
"""
# Step 1
_normalized_cfg, vertices, parent = self._pd_normalize_graph()
# vertices is a list of ContainerNode(CFGNode) instances
# parent is a dict storing the mapping from ContainerNode(CFGNode) to ContainerNode(CFGNode)
# Each node in normalized_cfg is a ContainerNode(CFGNode) instance
bucket = defaultdict(set)
dom = [None] * (len(vertices))
self._ancestor = [None] * (len(vertices) + 1)
for i in xrange(len(vertices) - 1, 0, -1):
w = vertices[i]
# Step 2
if w not in parent:
# It's one of the start nodes
continue
predecessors = _normalized_cfg.predecessors(w)
for v in predecessors:
u = self._pd_eval(v)
if self._semi[u.index].index < self._semi[w.index].index:
self._semi[w.index] = self._semi[u.index]
bucket[vertices[self._semi[w.index].index].index].add(w)
self._pd_link(parent[w], w)
# Step 3
for v in bucket[parent[w].index]:
u = self._pd_eval(v)
if self._semi[u.index].index < self._semi[v.index].index:
dom[v.index] = u
else:
dom[v.index] = parent[w]
bucket[parent[w].index].clear()
for i in xrange(1, len(vertices)):
w = vertices[i]
if w not in parent:
continue
if dom[w.index].index != vertices[self._semi[w.index].index].index:
dom[w.index] = dom[dom[w.index].index]
self._post_dom = networkx.DiGraph() # The post-dom tree described in a directional graph
for i in xrange(1, len(vertices)):
if dom[i] is not None and vertices[i] is not None:
self._post_dom.add_edge(dom[i].obj, vertices[i].obj)
self._pd_post_process()
# Create the normalized_cfg without the annoying ContainerNodes
self._normalized_cfg = networkx.DiGraph()
for src, dst in _normalized_cfg.edges_iter():
self._normalized_cfg.add_edge(src.obj, dst.obj)
def _pd_post_process(self):
"""
Take care of those loop headers/tails where we manually broke their
connection to the next BBL
"""
loop_back_edges = self._cfg.get_loop_back_edges()
for b1, b2 in loop_back_edges:
# The edge between b1 and b2 is manually broken
# The post dominator of b1 should be b2 (or not?)
if type(b1) is TemporaryNode:
# This is for testing
successors = self._acyclic_cfg.graph.successors(b1)
else:
# Real CFGNode!
successors = self._acyclic_cfg.get_successors(b1)
if len(successors) == 0:
if b2 in self._post_dom:
self._post_dom.add_edge(b1, b2)
else:
l.debug("%s is not in post dominator dict.", b2)
def _pd_normalize_graph(self):
# We want to reverse the CFG, and label each node according to its
# order in a DFS
graph = networkx.DiGraph()
n = self._entry
queue = [ n ]
start_node = TemporaryNode("start_node")
# Put the start_node into a Container as well
start_node = ContainerNode(start_node)
container_nodes = { }
traversed_nodes = set()
while len(queue) > 0:
node = queue.pop()
if type(node) is TemporaryNode:
# This is for testing
successors = self._acyclic_cfg.graph.successors(node)
else:
# Real CFGNode!
successors = self._acyclic_cfg.get_successors(node)
# Put it into a container
if node in container_nodes:
container_node = container_nodes[node]
else:
container_node = ContainerNode(node)
container_nodes[node] = container_node
traversed_nodes.add(container_node)
if len(successors) == 0:
# Add an edge between this node and our start node
graph.add_edge(start_node, container_node)
for s in successors:
if s in container_nodes:
container_s = container_nodes[s]
else:
container_s = ContainerNode(s)
container_nodes[s] = container_s
graph.add_edge(container_s, container_node) # Reversed
if container_s not in traversed_nodes:
queue.append(s)
# Add a start node and an end node
graph.add_edge(container_nodes[n], ContainerNode(TemporaryNode("end_node")))
all_nodes_count = len(traversed_nodes) + 2 # A start node and an end node
l.debug("There should be %d nodes in all", all_nodes_count)
counter = 0
vertices = [ ContainerNode("placeholder") ]
scanned_nodes = set()
parent = {}
while True:
# DFS from the current start node
stack = [ start_node ]
while len(stack) > 0:
node = stack.pop()
counter += 1
# Mark it as scanned
scanned_nodes.add(node)
# Put the container node into vertices list
vertices.append(node)
# Put each successors into the stack
successors = graph.successors(node)
# Set the index property of it
node.index = counter
for s in successors:
if s not in scanned_nodes:
stack.append(s)
parent[s] = node
scanned_nodes.add(s)
if counter >= all_nodes_count:
break
l.debug("%d nodes are left out during the DFS. They must formed a cycle themselves.", all_nodes_count - counter)
# Find those nodes
leftovers = [ s for s in traversed_nodes if s not in scanned_nodes ]
graph.add_edge(start_node, leftovers[0])
# We have to start over...
counter = 0
parent = {}
scanned_nodes = set()
vertices = [ ContainerNode("placeholder") ]
self._semi = vertices[::]
self._label = vertices[::]
return (graph, vertices, parent)
def _pd_link(self, v, w):
self._ancestor[w.index] = v
def _pd_eval(self, v):
if self._ancestor[v.index] is None:
return v
else:
self._pd_compress(v)
return self._label[v.index]
def _pd_compress(self, v):
if self._ancestor[self._ancestor[v.index].index] != None:
self._pd_compress(self._ancestor[v.index])
if self._semi[self._label[self._ancestor[v.index].index].index].index < self._semi[self._label[v.index].index].index:
self._label[v.index] = self._label[self._ancestor[v.index].index]
self._ancestor[v.index] = self._ancestor[self._ancestor[v.index].index]
register_analysis(CDG, 'CDG')
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,454
|
MayfeelYang/angr
|
refs/heads/master
|
/tests/test_dfg.py
|
#!/usr/bin/env python
import logging
import time
import sys
from os.path import join, dirname, realpath
l = logging.getLogger("angr.tests.test_dfg")
l.setLevel(logging.DEBUG)
import nose
import angr
import pyvex
test_location = str(join(dirname(realpath(__file__)), "../../binaries/tests"))
def perform_one(binary_path):
proj = angr.Project(join(test_location, binary_path),
load_options={'auto_load_libs': False},
)
start = time.time()
cfg = proj.analyses.CFGAccurate(context_sensitivity_level=2)
end = time.time()
duration = end - start
l.info("CFG generated in %f seconds.", duration)
dfg = proj.analyses.DFG(cfg=cfg)
nose.tools.assert_true(len(dfg.dfgs) <= len(cfg.nodes()))
for addr, d in dfg.dfgs.items():
nose.tools.assert_true(cfg.get_any_node(addr) is not None)
# We check there is not node that we ignored
for n in d.nodes():
nose.tools.assert_not_equal(n.tag, 'Ist_IMark')
nose.tools.assert_not_equal(n.tag, 'Ist_AbiHint')
nose.tools.assert_not_equal(n.tag, 'Ist_Exit')
if n.tag == 'Ist_Put':
nose.tools.assert_not_equal(n.offset, n.arch.ip_offset)
for (a, b) in d.edges():
if isinstance(a, pyvex.IRExpr.IRExpr):
# We check that there is no edge between two expressions/const
nose.tools.assert_false(isinstance(b, pyvex.IRExpr.IRExpr))
# If there is an edge coming from an expr/const it should be in
# the dependencies of the other node
# FIXME
# Impossible to check because of the Unop optimization in the
# DFG...
# nose.tools.assert_true(a in b.expressions)
elif hasattr(a, 'tmp'):
# If there is an edge between a tmp and another node
# be sure that this tmp is in the dependencies of this node
tmps = [ ]
for e in b.expressions:
if hasattr(e, 'tmp'):
tmps.append(e.tmp)
nose.tools.assert_true(a.tmp in tmps)
def test_dfg_isalnum():
perform_one("i386/isalnum")
def test_dfg_counter():
perform_one("i386/counter")
def test_dfg_cfg_0():
perform_one("x86_64/cfg_0")
def test_dfg_fauxware():
perform_one("mips/fauxware")
def run_all():
functions = globals()
all_functions = dict(filter((lambda (k, v): k.startswith('test_') and hasattr(v, '__call__')), functions.items()))
for f in sorted(all_functions.keys()):
all_functions[f]()
if __name__ == "__main__":
logging.getLogger("angr.analyses.dfg").setLevel(logging.DEBUG)
if len(sys.argv) > 1:
globals()['test_' + sys.argv[1]]()
else:
run_all()
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,455
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/log.py
|
import logging
_original_emit = logging.StreamHandler.emit
class Loggers(object):
def __init__(self, default_level=logging.WARNING):
"""
A dumb and simple way to aggregate all loggers in a convenient way
"""
# All loggers are an attr of self for tab completion in iPython
# (with . replaced with _)
self._loggerdict = logging.Logger.manager.loggerDict
for name, logger in self._loggerdict.iteritems():
attr = name.replace('.', '_')
setattr(self, attr, logger)
if len(logging.root.handlers) == 0:
# The default level is INFO
fmt='%(levelname)-7s | %(asctime)-23s | %(name)-8s | %(message)s'
logging.basicConfig(format=fmt, level=default_level)
logging.StreamHandler.emit = self._emit_wrap
@staticmethod
def setall(level):
for name in logging.Logger.manager.loggerDict.keys():
logging.getLogger(name).setLevel(level)
@staticmethod
def _emit_wrap(*args, **kwargs):
record = args[1]
color = hash(record.name) % 7 + 31
try:
record.name = ("\x1b[%dm" % color) + record.name + "\x1b[0m"
except Exception:
pass
try:
record.msg = ("\x1b[%dm" % color) + record.msg + "\x1b[0m"
except Exception:
pass
_original_emit(*args, **kwargs)
# Set the default to INFO at import time
# Loggers.setall(logging.INFO)
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,456
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/surveyors/escaper.py
|
#!/usr/bin/env python
from ..surveyor import Surveyor
from . import Explorer
import logging
l = logging.getLogger("angr.surveyors.Escaper")
class Escaper(Surveyor):
"""
Escaper implements loop escaping!
normal - any found normal paths from the loop
forced - forced paths from the loop, if a normal wasn't found
"""
def __init__(self, project, loop_addresses, start=None, max_concurrency=None, max_active=None, pickle_paths=None,
loop_iterations=0, iteration_depth=100, unconstrain_memory=True, unconstrain_registers=True):
"""
Creates an Escaper. Most options are for Surveyor (separate docs).
:param loop_addresses: the addresses of all the basic blocks in the loop, to know the
instructions to which the analysis should be restricted
:param loop_iterations: the number of times to run the loop before escaping
:param iteration_depth: the maximum depth (in SimRuns) of a path through the loop
"""
Surveyor.__init__(self, project, start=start, max_concurrency=max_concurrency, max_active=max_active,
pickle_paths=pickle_paths)
self._loop_addresses = loop_addresses
self._loop_iterations = loop_iterations
self._iteration_depth = iteration_depth
self._current_iteration = 0
self._done = False
self._unconstrain_memory = unconstrain_memory
self._unconstrain_registers = unconstrain_registers
self.normal = []
self.forced = []
def _tick_loop(self, start=None):
results = Explorer(self._project, start=start, find=self._loop_addresses[0], restrict=self._loop_addresses,
min_depth=2, max_depth=self._iteration_depth, max_repeats=1,
max_concurrency=self._max_concurrency, num_find=self._num_find).run()
self.deadended += results.deadended
return results
def unconstrain_loop(self, constrained_entry):
"""
Unconstrains an exit to the loop header by looping one more time
and replacing all modified variables with unconstrained versions.
"""
constrained_state = constrained_entry.state.copy()
# first, go through the loop normally, one more time
constrained_results = self._tick_loop(start=constrained_entry)
l.debug("%d paths to header found", len(constrained_results.found))
# then unconstrain differences between the original state and any new
# head states
unconstrained_states = []
for p in constrained_results.found:
# because the head_entry might actually point partway *through* the
# loop header, in the cases of a loop starting between
# the counter-increment and the condition check (because the
# counter is only incremented at the end of the loop, and the
# end is placed in the beginning for optimization), so we run the
# loop through to the *end* of the header
new_state = p.state.copy()
if self._unconstrain_registers:
new_state.registers.unconstrain_differences(constrained_state.registers)
if self._unconstrain_memory:
new_state.memory.unconstrain_differences(constrained_state.memory)
unconstrained_states.append(new_state)
l.debug("%d unconstrained states", len(unconstrained_states))
unconstrained_exits = []
unconstrained_entry = constrained_entry
for s in unconstrained_states:
unconstrained_entry.state = s
unconstrained_results = self._tick_loop(start=unconstrained_entry)
unconstrained_exits += unconstrained_results.deviating
return unconstrained_exits
def tick(self):
"""
Makes one run through the loop.
"""
if self._current_iteration < self._loop_iterations:
l.debug("Currently at iteration %d of %d", self._current_iteration, self._loop_iterations)
results = self._tick_loop(start=self.active_exits(reachable=True))
l.debug("... found %d exiting paths", len(results.deviating))
self.normal += results.deviating
self.active = results.found
self._current_iteration += 1
else:
all_exits = self.active_exits(reachable=True)
l.debug("Unconstraining %d heads.", len(all_exits))
for e in all_exits:
self.forced += self.unconstrain_loop(e)
self._done = True
@property
def done(self):
return self._done
def __repr__(self):
return "<Escaper with paths: %s, %d normal, %d forced>" % (
Surveyor.__repr__(self), len(self.normal), len(self.forced))
from . import all_surveyors
all_surveyors['Escaper'] = Escaper
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,457
|
MayfeelYang/angr
|
refs/heads/master
|
/tests/test_checkbyte.py
|
import angr
import logging
l = logging.getLogger("angr.tests")
import os
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
arches = ( "armel", "i386", "mips", "mipsel", "ppc64", "ppc", "x86_64" )
# TODO: arches += ( "armhf", )
def run_checkbyte(arch):
p = angr.Project(os.path.join(test_location, arch, "checkbyte"))
results = p.factory.path_group().step(100) #, until=lambda lpg: len(lpg.active) > 1)
assert len(results.deadended) == 2
one = results.deadended[0].state.posix.dumps(1)
two = results.deadended[1].state.posix.dumps(1)
assert set((one, two)) == set(("First letter good\n", "First letter bad\n"))
def test_checkbyte():
for arch in arches:
yield run_checkbyte, arch
if __name__ == "__main__":
for r,a in test_checkbyte():
r(a)
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,458
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/analyses/static_hooker.py
|
import simuvex
from ..analysis import register_analysis, Analysis
from ..errors import AngrValueError
import logging
l = logging.getLogger('angr.analyses.static_hooker')
class StaticHooker(Analysis):
"""
This analysis works on statically linked binaries - it finds the library functions statically
linked into the binary and hooks them with the appropraite simprocedures.
Right now it only works on libc functions and unstripped binaries, but hey! There's room to
grow!
"""
def __init__(self):
self.results = {}
if self.project.loader.main_bin.linking == 'dynamic':
raise AngrValueError('StaticHooker only works on static binaries!')
for func in self.project.loader.main_bin._symbol_cache.values():
if not func.is_function: continue
try:
self.project.hook(func.rebased_addr, simuvex.SimProcedures['libc.so.6'][func.name])
except: # pylint: disable=bare-except
l.debug("Failed to hook %s at %#x", func.name, func.rebased_addr)
else:
l.info("Hooked %s at %#x", func.name, func.rebased_addr)
self.results[func.rebased_addr] = simuvex.SimProcedures['libc.so.6'][func.name]
register_analysis(StaticHooker, 'StaticHooker')
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,459
|
MayfeelYang/angr
|
refs/heads/master
|
/tests/test_pickle.py
|
from claripy import BVS
from simuvex import SimFile
import pickle
import nose
import angr
import ana
import gc
def load_pickles():
# This is the working case
f = open("/tmp/pickletest_good", 'r')
print pickle.load(f)
f.close()
# This will not work
f = open("/tmp/pickletest_bad", 'r')
print pickle.load(f)
f.close()
def make_pickles():
p = angr.Project("/bin/bash")
fs = {
'/dev/stdin': SimFile('/dev/stdin', 0),
'/dev/stdout': SimFile('/dev/stdout', 0),
'/dev/stderr': SimFile('/dev/stderr', 0),
#'/dev/urandom': SimFile('/dev/urandom', 0),
}
MEM_SIZE = 1024
mem_bvv = {}
for f in fs:
mem = BVS(f, MEM_SIZE * 8)
mem_bvv[f] = mem
# debug_wait()
f = open("/tmp/pickletest_good", "w")
#fname = f.name
pickle.dump(mem_bvv, f, -1)
f.close()
# If you do not have a state you cannot write
entry_state = p.factory.entry_state(fs=fs) #pylint:disable=unused-variable
for f in fs:
mem = mem_bvv[f]
fs[f].write(mem, MEM_SIZE)
fs[f].seek(0)
f = open("/tmp/pickletest_bad", "w")
#fname = f.name
pickle.dump(mem_bvv, f, -1)
f.close()
#print "Test case generated run '%s <something>' to execute the test" % sys.argv[0]
def test_pickling():
# set up ANA and make the pickles
ana.set_dl(ana.DirDataLayer('/tmp/pickletest'))
make_pickles()
# make sure the pickles work in the same "session"
load_pickles()
# reset ANA, and load the pickles
ana.set_dl(ana.DirDataLayer('/tmp/pickletest'))
gc.collect()
load_pickles()
# purposefully set the wrong directory to make sure this excepts out
ana.set_dl(ana.DirDataLayer('/tmp/pickletest2'))
gc.collect()
#load_pickles()
nose.tools.assert_raises(Exception, load_pickles)
if __name__ == '__main__':
test_pickling()
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,460
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/analyses/forward_analysis.py
|
import networkx
class EntryInfo(object):
"""
Stores information for each entry
"""
def __init__(self, key, entry):
self.key = key
self.entries = [ entry ]
self.merged_entries = [ ]
self.widened_entries = [ ]
self.narrowing_count = 0
def __hash__(self):
return hash(self.key)
def __eq__(self, o):
return type(self) == type(o) and \
self.key == o.key
def __repr__(self):
s = "<EntryInfo %s>" % (str(self.key))
return s
@property
def entry(self):
"""
Get the latest available entry.
:return: The latest available entry.
"""
if self.widened_entries:
return self.widened_entries[-1]
elif self.merged_entries:
return self.merged_entries[-1]
else:
return self.entries[-1]
def add_entry(self, entry, merged=False, widened=False):
"""
Appended a new entry to this EntryInfo node.
:param entry: The new entry to append
:param bool merged: Whether it is a merged entry or not.
:param bool widened: Whether it is a widened entry or not.
"""
if merged:
self.merged_entries.append(entry)
elif widened:
self.widened_entries.append(entry)
else:
self.entries.append(entry)
class ForwardAnalysis(object):
"""
This is my very first attempt to build a static forward analysis framework that can serve as the base of multiple
static analyses in angr, including CFG analysis, VFG analysis, DDG, etc.
In short, ForwardAnalysis performs a forward data-flow analysis by traversing the CFG (or the binary if a CFG is
not available) and generating a graph with nodes linked with each program point (usually per basic-block, or SimRun
in angr terms). A node on the graph stores analysis-specific information. For more information about nodes, take a
look at the implementation of CFGNode.
Feel free to discuss with me (Fish) if you have any suggestion or complaint!
"""
def __init__(self, order_entries=False, allow_merging=False):
"""
Constructor
:param bool order_entries: If all entries should be ordered or not.
:return: None
"""
self._order_entries = order_entries
self._allow_merging = allow_merging
# Analysis progress control
self._should_abort = False
# All remaining entries
self._entries = [ ]
# A map between entry key to entry. Entries with the same key will be merged by calling _merge_entries()
self._entries_map = { }
# The graph!
# Analysis results (nodes) are stored here
self._graph = networkx.DiGraph()
#
# Properties
#
@property
def should_abort(self):
"""
Should the analysis be terminated.
:return: True/False
"""
return self._should_abort
@property
def graph(self):
return self._graph
@property
def entries(self):
for entry_info in self._entries:
yield entry_info.entry
#
# Public methods
#
def abort(self):
"""
Abort the analysis
:return: None
"""
self._should_abort = True
#
# Abstract interfaces
#
def _pre_analysis(self):
raise NotImplementedError('_pre_analysis() is not implemented.')
def _intra_analysis(self):
raise NotImplementedError('_intra_analysis() is not implemented.')
def _post_analysis(self):
raise NotImplementedError('_post_analysis() is not implemented.')
def _entry_key(self, entry):
raise NotImplementedError('_entry_key() is not implemented.')
def _get_successors(self, entry):
raise NotImplementedError('_get_successors() is not implemented.')
def _pre_entry_handling(self, entry):
raise NotImplementedError('_pre_entry_handling() is not implemented.')
def _post_entry_handling(self, entry, new_entries, successors):
raise NotImplementedError('_post_entry_handling() is not implemented.')
def _handle_successor(self, entry, successor, successors):
raise NotImplementedError('_handle_successor() is not implemented.')
def _entry_list_empty(self):
raise NotImplementedError('_entry_list_empty() is not implemented.')
def _merge_entries(self, *entries):
raise NotImplementedError('_merge_entries() is not implemented.')
def _widen_entries(self, *entries):
raise NotImplementedError('_widen_entries() is not implemented.')
def _entry_sorting_key(self, entry):
raise NotImplementedError('_entry_sorting_key() is not implemented.')
#
# Private methods
#
def _analyze(self):
"""
The main analysis routine.
:return: None
"""
self._pre_analysis()
if not self._entries:
self._entry_list_empty()
while not self.should_abort and self._entries:
entry_info = self._entries[0]
self._entries = self._entries[1:]
self._handle_entry(entry_info)
# Short-cut for aborting the analysis
if self.should_abort:
break
self._intra_analysis()
if not self._entries:
self._entry_list_empty()
self._post_analysis()
def _handle_entry(self, entry_info):
"""
Process an entry, get all successors, and call _handle_successor() to handle each successor.
:param EntryInfo entry: The EntryInfo instance
:return: None
"""
entry = entry_info.entry
try:
self._pre_entry_handling(entry)
except AngrSkipEntryNotice:
return
successors = self._get_successors(entry)
all_new_entries = [ ]
for successor in successors:
new_entries = self._handle_successor(entry, successor, successors)
if new_entries:
all_new_entries.extend(new_entries)
for new_entry in new_entries:
self._insert_entry(new_entry)
self._post_entry_handling(entry, all_new_entries, successors)
def _insert_entry(self, entry):
"""
Insert a new entry into the entry list. If the entry list is ordered, this entry will be inserted at the
correct position.
:param entry: The entry to insert
:return: None
"""
if self._allow_merging:
key = self._entry_key(entry)
if key in self._entries_map:
entry_info = self._entries_map[key]
try:
merged_entry = self._merge_entries(entry_info.entry, entry)
entry_info.add_entry(merged_entry, merged=True)
except AngrJobMergingFailureNotice:
# merging failed
entry_info = EntryInfo(key, entry)
# update the entries map
self._entries_map[key] = entry_info
else:
entry_info = EntryInfo(key, entry)
self._entries_map[key] = entry_info
else:
key = self._entry_key(entry)
entry_info = EntryInfo(key, entry)
if self._order_entries:
self._binary_insert(self._entries, entry_info, lambda elem: self._entry_sorting_key(elem.entry))
else:
self._entries.append(entry_info)
def _peek_entry(self, pos):
"""
Return the entry currently at position `pos`, but still keep it in the entry list. An IndexError will be raised
if that position does not currently exist in the entry list.
:param int pos: Position of the entry to get.
:return: The entry
"""
if pos < len(self._entries):
return self._entries[pos].entry
raise IndexError()
#
# Utils
#
@staticmethod
def _binary_insert(lst, elem, key, lo=0, hi=None):
"""
Insert an element into a sorted list, and keep the list sorted.
The major difference from bisect.bisect_left is that this function supports a key method, so user doesn't have
to create the key array for each insertion.
:param list lst: The list. Must be pre-ordered.
:param object element: An element to insert into the list.
:param func key: A method to get the key for each element in the list.
:param int lo: Lower bound of the search.
:param int hi: Upper bound of the search.
:return: None
"""
if lo < 0:
raise ValueError("lo must be a non-negative number")
if hi is None:
hi = len(lst)
while lo < hi:
mid = (lo + hi) // 2
if key(lst[mid]) < key(elem):
lo = mid + 1
else:
hi = mid
lst.insert(lo, elem)
from ..errors import AngrSkipEntryNotice, AngrJobMergingFailureNotice
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,461
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/analyses/__init__.py
|
from .cfg_fast import CFGFast
from .cfg_accurate import CFGAccurate
from .cdg import CDG
from .ddg import DDG
from .vfg import VFG
from .boyscout import BoyScout
from .girlscout import GirlScout
from .backward_slice import BackwardSlice
from .veritesting import Veritesting
from .vsa_ddg import VSA_DDG
from .bindiff import BinDiff
from .dfg import DFG
from .loopfinder import LoopFinder
from .cfg import CFG
from .congruency_check import CongruencyCheck
from .cfg_arch_options import CFGArchOptions
from .static_hooker import StaticHooker
from .reassembler import Reassembler
from .binary_optimizer import BinaryOptimizer
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,462
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/exploration_techniques/dfs.py
|
from . import ExplorationTechnique
class DFS(ExplorationTechnique):
"""
Depth-first search.
Will only keep one path active at a time, any others will be stashed in the 'deferred' stash.
When we run out of active paths to step, we take the longest one from deferred and continue.
"""
def setup(self, pg):
if 'deferred' not in pg.stashes:
pg.stashes['deferred'] = []
def step(self, pg, stash, **kwargs):
pg = pg.step(stash=stash, **kwargs)
if len(pg.stashes[stash]) > 1:
pg.stashes['deferred'].extend(pg.stashes[stash][1:])
del pg.stashes[stash][1:]
if len(pg.stashes[stash]) == 0:
if len(pg.stashes['deferred']) == 0:
return pg
i, deepest = max(enumerate(pg.stashes['deferred']), key=lambda l: len(l[1].trace))
pg.stashes['deferred'].pop(i)
pg.stashes[stash].append(deepest)
return pg
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,463
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/__init__.py
|
""" angr module """
# pylint: disable=wildcard-import
import logging
logging.getLogger("angr").addHandler(logging.NullHandler())
from .project import *
from .regmap import *
from .path import *
from .errors import *
from .surveyor import *
from .service import *
from .analyses import *
from .analysis import *
from .tablespecs import *
from . import surveyors
from .blade import Blade
from .simos import SimOS
from .path_group import PathGroup
from .surveyors.caller import Callable
from .log import Loggers
from . import knowledge
from . import exploration_techniques
import sys
i = 0
while True:
i += 1
module = sys._getframe(i).f_globals.get('__name__')
if module == '__main__' or module == '__console__':
loggers = Loggers()
break
elif module is not None and module.startswith('nose.'):
break
del sys, i, module
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,464
|
MayfeelYang/angr
|
refs/heads/master
|
/tests/test_echo.py
|
import angr
import logging
l = logging.getLogger("angr.tests")
import os
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
target_arches = {
#'i386',
'x86_64',
#'ppc',
#'armel',
#'mips',
}
def run_echo_haha(arch):
p = angr.Project(os.path.join(test_location, arch, 'echo'), use_sim_procedures=False)
s = p.factory.full_init_state(mode='symbolic_approximating', args=['echo', 'haha'])
pg = p.factory.path_group(s)
pg.step(until=lambda lpg: len(lpg.active) != 1)
assert len(pg.deadended) == 1
assert len(pg.active) == 0
assert pg.deadended[0].state.posix.dumps(1) == 'haha\n'
def test_echo_haha():
for arch in target_arches:
yield run_echo_haha, arch
if __name__ == "__main__":
for r,a in test_echo_haha():
r(a)
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,465
|
MayfeelYang/angr
|
refs/heads/master
|
/tests/perf_unicorn.py
|
import sys
import os
import time
import angr
import simuvex.s_options as so
import nose.tools
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../'))
def perf_unicorn_0():
p = angr.Project(os.path.join(test_location, 'binaries', 'tests', 'x86_64', 'perf_unicorn_0'))
s_unicorn = p.factory.entry_state(add_options=so.unicorn | {so.STRICT_PAGE_ACCESS}, remove_options={so.LAZY_SOLVES}) # unicorn
pg_unicorn = p.factory.path_group(s_unicorn)
start = time.time()
pg_unicorn.run()
elapsed = time.time() - start
print "Elapsed %f sec" % elapsed
print pg_unicorn.one_deadended
def perf_unicorn_1():
p = angr.Project(os.path.join(test_location, 'binaries', 'tests', 'x86_64', 'perf_unicorn_1'))
s_unicorn = p.factory.entry_state(add_options=so.unicorn | {so.STRICT_PAGE_ACCESS}, remove_options={so.LAZY_SOLVES}) # unicorn
pg_unicorn = p.factory.path_group(s_unicorn)
start = time.time()
pg_unicorn.run()
elapsed = time.time() - start
print "Elapsed %f sec" % elapsed
print pg_unicorn.one_deadended
if __name__ == "__main__":
if len(sys.argv) > 1:
for arg in sys.argv[1:]:
print 'perf_' + arg
globals()['perf_' + arg]()
else:
for fk, fv in globals().items():
if fk.startswith('perf_') and callable(fv):
print fk
res = fv()
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,466
|
MayfeelYang/angr
|
refs/heads/master
|
/tests/test_explorer.py
|
import nose
import angr
import os
location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
def test_xpl():
p = angr.Project(os.path.join(location, "x86_64/all"))
pltaddr = p.loader.main_bin.get_call_stub_addr("printf")
nose.tools.assert_equal(pltaddr, 0x400560)
a = p.surveyors.Explorer(find=(0x400560,), num_find=4)
a.run()
nose.tools.assert_equal(len(a.found), 4)
if __name__ == '__main__':
test_xpl()
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,467
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/regmap.py
|
class RegisterMap(object):
def __init__(self, arch):
self._reg_map = {}
# TODO: Properly set the width of a register
self._general_register_width = arch.bytes
def assign(self, reg_offset, expr):
if reg_offset % self._general_register_width == 0:
self._reg_map[reg_offset] = expr
else:
raise Exception("The offset %d is not aligned." % reg_offset)
def contains(self, reg_offset):
# TODO: Support unaligned offsets
assert reg_offset % self._general_register_width == 0
return reg_offset in self._reg_map
def get(self, reg_offset):
if reg_offset % self._general_register_width == 0:
if reg_offset in self._reg_map:
return self._reg_map[reg_offset]
else:
return None
else:
raise Exception("The offset is not aligned.")
def remove(self, reg_offset):
if reg_offset % self._general_register_width == 0:
del self._reg_map[reg_offset]
else:
raise Exception("The offset is not aligned.")
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,468
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/exploration_techniques/veritesting.py
|
from . import ExplorationTechnique
class Veritesting(ExplorationTechnique):
"""
Enable veritesting. This technique, described in a paper[1] from CMU, attempts to address the problem of state
explosions in loops by performing smart merging.
[1] https://users.ece.cmu.edu/~aavgerin/papers/veritesting-icse-2014.pdf
"""
def __init__(self, **options):
super(Veritesting, self).__init__()
self.options = options
def step_path(self, path):
vt = self.project.analyses.Veritesting(path, **self.options)
if vt.result and vt.final_path_group:
pg = vt.final_path_group
pg.stash(from_stash='deviated', to_stash='active')
pg.stash(from_stash='successful', to_stash='active')
return pg.active, pg.stashes.get('unconstrained', []), pg.stashes.get('unsat', []), [], []
return None
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,469
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/analyses/ddg.py
|
import logging
import re
from collections import defaultdict
import networkx
import pyvex
from simuvex import SimRegisterVariable, SimMemoryVariable, SimTemporaryVariable, SimConstantVariable, SimStackVariable
from simuvex import SimSolverModeError, SimUnsatError
from ..errors import AngrDDGError
from ..analysis import Analysis, register_analysis
from .code_location import CodeLocation
l = logging.getLogger("angr.analyses.ddg")
class ProgramVariable(object):
"""
Describes a variable in the program at a specific location.
:ivar SimVariable variable: The variable.
:ivar CodeLocation location: Location of the variable.
"""
def __init__(self, variable, location, initial=False):
self.variable = variable
self.location = location
self.initial = initial
def __hash__(self):
return hash((self.variable, self.location))
def __eq__(self, other):
if not isinstance(other, ProgramVariable):
return False
return self.variable == other.variable and self.location == other.location
def __repr__(self):
s = "<%s @ %s>" % (self.variable, self.location)
return s
class DDGJob(object):
def __init__(self, cfg_node, call_depth):
self.cfg_node = cfg_node
self.call_depth = call_depth
def __repr__(self):
return "<DDGJob %s, call_depth %d>" % (self.cfg_node, self.call_depth)
class LiveDefinitions(object):
"""
A collection of live definitions with some handy interfaces for definition killing and lookups.
"""
def __init__(self):
"""
Constructor.
"""
# byte-to-byte mappings
# TODO: make it copy-on-write in order to save memory.
# TODO: options are either cooldict.COWDict or a modified version of simuvex.SimPagedMemory
self._memory_map = defaultdict(set)
self._register_map = defaultdict(set)
self._defs = defaultdict(set)
#
# Overridend methods
#
def __contains__(self, variable):
return variable in self._defs
#
# Public methods
#
def branch(self):
"""
Create a branch of the current live definition collection.
:return: A new LiveDefinition instance.
:rtype: LiveDefinitions
"""
ld = LiveDefinitions()
ld._memory_map = self._memory_map.copy()
ld._register_map = self._register_map.copy()
ld._defs = self._defs.copy()
return ld
def copy(self):
"""
Make a hard copy of `self`.
:return: A new LiveDefinition instance.
:rtype: LiveDefinitions
"""
ld = LiveDefinitions()
ld._memory_map = self._memory_map.copy()
ld._register_map = self._register_map.copy()
ld._defs = self._defs.copy()
return ld
def add_def(self, variable, location, size_threshold=32):
"""
Add a new definition of variable.
:param SimVariable variable: The variable being defined.
:param CodeLocation location: Location of the varaible being defined.
:param int size_threshold: The maximum bytes to consider for the variable.
:return: True if the definition was new, False otherwise
:rtype: bool
"""
new_defs_added = False
if isinstance(variable, SimRegisterVariable):
if variable.reg is None:
l.warning('add_def: Got a None for a SimRegisterVariable. Consider fixing.')
return new_defs_added
size = min(variable.size / 8, size_threshold)
offset = variable.reg
while offset < variable.reg + size:
if location not in self._register_map[offset]:
new_defs_added = True
self._register_map[offset].add(location)
offset += 1
self._defs[variable].add(location)
elif isinstance(variable, SimMemoryVariable):
size = min(variable.size / 8, size_threshold)
offset = variable.addr
while offset < variable.addr + size:
if location not in self._memory_map[offset]:
new_defs_added = True
self._memory_map[offset].add(location)
offset += 1
self._defs[variable].add(location)
else:
l.error('Unsupported variable type "%s".', type(variable))
return new_defs_added
def add_defs(self, variable, locations, size_threshold=32):
"""
Add a collection of new definitions of a variable.
:param SimVariable variable: The variable being defined.
:param iterable locations: A collection of locations where the variable was defined.
:param int size_threshold: The maximum bytes to consider for the variable.
:return: True if any of the definition was new, False otherwise
:rtype: bool
"""
new_defs_added = False
for loc in locations:
new_defs_added |= self.add_def(variable, loc, size_threshold=size_threshold)
return new_defs_added
def kill_def(self, variable, location, size_threshold=32):
"""
Add a new definition for variable and kill all previous definitions.
:param SimVariable variable: The variable to kill.
:param CodeLocation location: The location where this variable is defined.
:param int size_threshold: The maximum bytes to consider for the variable.
:return: None
"""
if isinstance(variable, SimRegisterVariable):
if variable.reg is None:
l.warning('kill_def: Got a None for a SimRegisterVariable. Consider fixing.')
return None
size = min(variable.size / 8, size_threshold)
offset = variable.reg
while offset < variable.reg + size:
self._register_map[offset] = { location }
offset += 1
self._defs[variable] = { location }
elif isinstance(variable, SimMemoryVariable):
size = min(variable.size / 8, size_threshold)
offset = variable.addr
while offset < variable.addr + size:
self._memory_map[offset] = { location }
offset += 1
self._defs[variable] = { location }
else:
l.error('Unsupported variable type "%s".', type(variable))
def lookup_defs(self, variable, size_threshold=32):
"""
Find all definitions of the varaible
:param SimVariable variable: The variable to lookup for.
:param int size_threshold: The maximum bytes to consider for the variable. For example, if the variable is 100
byte long, only the first `size_threshold` bytes are considered.
:return: A set of code locations where the variable is defined.
:rtype: set
"""
live_def_locs = set()
if isinstance(variable, SimRegisterVariable):
if variable.reg is None:
l.warning('lookup_defs: Got a None for a SimRegisterVariable. Consider fixing.')
return live_def_locs
size = min(variable.size / 8, size_threshold)
offset = variable.reg
while offset < variable.reg + size:
if offset in self._register_map:
live_def_locs |= self._register_map[offset]
offset += 1
elif isinstance(variable, SimMemoryVariable):
size = min(variable.size / 8, size_threshold)
offset = variable.addr
while offset < variable.addr + size:
if offset in self._memory_map:
live_def_locs |= self._memory_map[offset]
offset += 1
else:
# umm unsupported variable type
l.error('Unsupported variable type "%s".', type(variable))
return live_def_locs
def iteritems(self):
"""
An iterator that returns all live definitions.
:return: The iterator.
:rtype: iter
"""
return self._defs.iteritems()
def itervariables(self):
"""
An iterator that returns all live variables.
:return: The iterator.
:rtype: iter
"""
return self._defs.iterkeys()
class DDG(Analysis):
"""
This is a fast data dependence graph directly generated from our CFG analysis result. The only reason for its
existence is the speed. There is zero guarantee for being sound or accurate. You are supposed to use it only when
you want to track the simplest data dependence, and you do not care about soundness or accuracy.
For a better data dependence graph, please consider performing a better static analysis first (like Value-set
Analysis), and then construct a dependence graph on top of the analysis result (for example, the VFG in angr).
Also note that since we are using states from CFG, any improvement in analysis performed on CFG (like a points-to
analysis) will directly benefit the DDG.
"""
def __init__(self, cfg, start=None, call_depth=None):
"""
:param cfg: Control flow graph. Please make sure each node has an associated `state` with it. You may
want to generate your CFG with `keep_state=True`.
:param start: An address, Specifies where we start the generation of this data dependence graph.
:param call_depth: None or integers. A non-negative integer specifies how deep we would like to track in the
call tree. None disables call_depth limit.
"""
# Sanity check
if not cfg._keep_state:
raise AngrDDGError('CFG must have "keep_state" set to True.')
self._cfg = cfg
self._start = self.project.entry if start is None else start
self._call_depth = call_depth
self._stmt_graph = networkx.DiGraph()
self._data_graph = networkx.DiGraph()
self._simplified_data_graph = None
self._symbolic_mem_ops = set()
# Data dependency graph per function
self._function_data_dependencies = None
# Begin construction!
self._construct()
#
# Properties
#
@property
def graph(self):
"""
:returns: A networkx DiGraph instance representing the dependence relations between statements.
:rtype: networkx.DiGraph
"""
return self._stmt_graph
@property
def data_graph(self):
"""
Get the data dependence graph.
:return: A networkx DiGraph instance representing data dependence.
:rtype: networkx.DiGraph
"""
return self._data_graph
@property
def simplified_data_graph(self):
"""
:return:
"""
if self._simplified_data_graph is None:
self._simplified_data_graph = self._simplify_data_graph(self.data_graph)
return self._simplified_data_graph
#
# Public methods
#
def pp(self):
"""
Pretty printing.
"""
# TODO: make it prettier
for src, dst, data in self.graph.edges_iter(data=True):
print "%s <-- %s, %s" % (src, dst, data)
def dbg_repr(self):
"""
Representation for debugging.
"""
# TODO:
return str(self.graph)
def __contains__(self, code_location):
"""
Returns whether `code_location` is in the graph.
:param code_location: A CodeLocation instance.
:returns: True/False
"""
return code_location in self.graph
def get_predecessors(self, code_location):
"""
Returns all predecessors of the code location.
:param code_location: A CodeLocation instance.
:returns: A list of all predecessors.
"""
return self.graph.predecessors(code_location)
def function_dependency_graph(self, func):
"""
Get a dependency graph for the function `func`.
:param func: The Function object in CFG.function_manager.
:returns: A networkx.DiGraph instance.
"""
if self._function_data_dependencies is None:
self._build_function_dependency_graphs()
if func in self._function_data_dependencies:
return self._function_data_dependencies[func]
# Not found
return None
def data_sub_graph(self, pv, simplified=True, killing_edges=False, excluding_types=None):
"""
Get a subgraph from the data graph or the simplified data graph that starts from node pv.
:param ProgramVariable pv: The starting point of the subgraph.
:param bool simplified: When True, the simplified data graph is used, otherwise the data graph is used.
:param bool killing_edges: Are killing edges included or not.
:param iterable excluding_types: Excluding edges whose types are among those excluded types.
:return: A subgraph.
:rtype: networkx.MultiDiGraph
"""
result = networkx.MultiDiGraph()
result.add_node(pv)
base_graph = self.simplified_data_graph if simplified else self.data_graph
if pv not in base_graph:
return result
# traverse all edges and add them to the result graph if needed
queue = [ pv ]
traversed = set()
while queue:
elem = queue[0]
queue = queue[1:]
if elem in traversed:
continue
traversed.add(elem)
out_edges = base_graph.out_edges(elem, data=True)
if not killing_edges:
# remove killing edges
out_edges = [ (a, b, data) for a, b, data in out_edges if 'type' not in data or data['type'] != 'kill']
if excluding_types:
out_edges = [ (a, b, data) for a, b, data in out_edges if
'type' not in data or data['type'] not in excluding_types
]
for src, dst, data in out_edges:
result.add_edge(src, dst, **data)
if dst not in traversed:
queue.append(dst)
return result
#
# Private methods
#
def _construct(self):
"""
Construct the data dependence graph.
We track the following types of dependence:
- (Intra-IRSB) temporary variable dependencies
- Register dependencies
- Memory dependencies, although it's very limited. See below.
We track the following types of memory access:
- (Intra-functional) Stack read/write.
Trace changes of stack pointers inside a function, and the dereferences of stack pointers.
- (Inter-functional) Stack read/write.
- (Global) Static memory positions.
Keep a map of all accessible memory positions to their source statements per function. After that, we
traverse the CFG and link each pair of reads/writes together in the order of control-flow.
We do not track the following types of memory access
- Symbolic memory access
Well, they cannot be tracked under fastpath mode (which is the mode we are generating the CTF) anyways.
"""
worklist = []
worklist_set = set()
# initial nodes are those nodes in CFG that has no in-degrees
for n in self._cfg.graph.nodes_iter():
if self._cfg.graph.in_degree(n) == 0:
# Put it into the worklist
job = DDGJob(n, 0)
self._worklist_append(job, worklist, worklist_set)
# A dict storing defs set
# DDGJob -> LiveDefinition
live_defs_per_node = {}
while worklist:
# Pop out a node
ddg_job = worklist[0]
node, call_depth = ddg_job.cfg_node, ddg_job.call_depth
worklist = worklist[ 1 : ]
worklist_set.remove(node)
# Grab all final states. There are usually more than one (one state for each successor), and we gotta
# process all of them
final_states = node.final_states
if node in live_defs_per_node:
live_defs = live_defs_per_node[node]
else:
live_defs = LiveDefinitions()
live_defs_per_node[node] = live_defs
successing_nodes = self._cfg.graph.successors(node)
for state in final_states:
if state.scratch.jumpkind == 'Ijk_FakeRet' and len(final_states) > 1:
# Skip fakerets if there are other control flow transitions available
continue
new_call_depth = call_depth
if state.scratch.jumpkind == 'Ijk_Call':
new_call_depth += 1
elif state.scratch.jumpkind == 'Ijk_Ret':
new_call_depth -= 1
if self._call_depth is not None and call_depth > self._call_depth:
l.debug('Do not trace into %s due to the call depth limit', state.ip)
continue
new_defs = self._track(state, live_defs, node.irsb.statements if node.irsb is not None else None)
#corresponding_successors = [n for n in successing_nodes if
# not state.ip.symbolic and n.addr == state.se.any_int(state.ip)]
#if not corresponding_successors:
# continue
changed = False
for successing_node in successing_nodes:
if (state.scratch.jumpkind == 'Ijk_Call' or state.scratch.jumpkind.startswith('Ijk_Sys')) and \
(state.ip.symbolic or successing_node.addr != state.se.any_int(state.ip)):
# this might be the block after the call, and we are not tracing into the call
# TODO: make definition killing architecture independent and calling convention independent
filtered_defs = LiveDefinitions()
for variable, locs in new_defs.iteritems():
if isinstance(variable, SimRegisterVariable):
if variable.reg in (self.project.arch.registers['eax'][0],
self.project.arch.registers['ecx'][0],
self.project.arch.registers['edx'][0]):
continue
filtered_defs.add_defs(variable, locs)
new_defs = filtered_defs
if successing_node in live_defs_per_node:
defs_for_next_node = live_defs_per_node[successing_node]
else:
defs_for_next_node = LiveDefinitions()
live_defs_per_node[successing_node] = defs_for_next_node
for var, code_loc_set in new_defs.iteritems():
l.debug("Adding %d new defitions for variable %s.", len(code_loc_set), var)
changed |= defs_for_next_node.add_defs(var, code_loc_set)
if changed:
if (self._call_depth is None) or \
(self._call_depth is not None and 0 <= new_call_depth <= self._call_depth):
# Put all reachable successors back to our worklist again
for successing_node in successing_nodes:
nw = DDGJob(successing_node, new_call_depth)
self._worklist_append(nw, worklist, worklist_set)
def _track(self, state, live_defs, statements):
"""
Given all live definitions prior to this program point, track the changes, and return a new list of live
definitions. We scan through the action list of the new state to track the changes.
:param state: The input state at that program point.
:param live_defs: A list of all live definitions prior to reaching this program point.
:param list statements: A list of VEX statements.
:returns: A list of new live definitions.
"""
# Make a copy of live_defs
live_defs = live_defs.copy()
action_list = list(state.log.actions)
# Since all temporary variables are local, we simply track them in a local dict
temp_defs = { }
temp_variables = { }
temp_register_symbols = { }
# All dependence edges are added to the graph either at the end of this method, or when they are going to be
# overwritten by a new edge. This is because we sometimes have to modify a previous edge (e.g. add new labels
# to the edge)
temps_to_edges = defaultdict(list)
regs_to_edges = defaultdict(list)
last_statement_id = None
pv_read = None # program variables read out in the same statement. we keep a copy of those variables here so
# we can link it to the tmp_write action right afterwards
data_generated = None
# tracks stack pointer and base pointer
#sp = state.se.any_int(state.regs.sp) if not state.regs.sp.symbolic else None
#bp = state.se.any_int(state.regs.bp) if not state.regs.bp.symbolic else None
for a in action_list:
if last_statement_id is None or last_statement_id != a.stmt_idx:
pv_read = [ ]
data_generated = None
last_statement_id = a.stmt_idx
if a.bbl_addr is None:
current_code_location = CodeLocation(None, None, sim_procedure=a.sim_procedure)
else:
current_code_location = CodeLocation(a.bbl_addr, a.stmt_idx, ins_addr=a.ins_addr)
if a.type == "mem":
if a.actual_addrs is None:
# For now, mem reads don't necessarily have actual_addrs set properly
try:
addr_list = { state.se.any_int(a.addr.ast) }
except (SimSolverModeError, SimUnsatError, ZeroDivisionError):
# FIXME: ZeroDivisionError should have been caught by claripy and simuvex.
# FIXME: see claripy issue #75. this is just a temporary workaround.
# it's symbolic... just continue
addr_list = { 0x60000000 } # TODO: this is a random address that I pick. Fix it.
else:
addr_list = set(a.actual_addrs)
for addr in addr_list:
variable = None
if len(addr_list) == 1 and len(a.addr.tmp_deps) == 1:
addr_tmp = list(a.addr.tmp_deps)[0]
if addr_tmp in temp_register_symbols:
# it must be a stack variable
sort, offset = temp_register_symbols[addr_tmp]
variable = SimStackVariable(offset, a.data.ast.size() / 8, base=sort, base_addr=addr - offset)
if variable is None:
variable = SimMemoryVariable(addr, a.data.ast.size() / 8) # TODO: Properly unpack the SAO
pvs = [ ]
if a.action == "read":
# Create an edge between def site and use site
prevdefs = self._def_lookup(live_defs, variable)
# TODO: prevdefs should only contain location, not labels
for prev_code_loc, labels in prevdefs.iteritems():
self._stmt_graph_add_edge(prev_code_loc, current_code_location, **labels)
pvs.append(ProgramVariable(variable, prev_code_loc))
if not pvs:
pvs.append(ProgramVariable(variable, current_code_location, initial=True))
# make sure to put it into the killing set
self._kill(live_defs, variable, current_code_location)
for pv in pvs:
pv_read.append(pv)
if a.action == "write":
# Kill the existing live def
self._kill(live_defs, variable, current_code_location)
pvs.append(ProgramVariable(variable, current_code_location))
for pv in pvs:
# For each of its register dependency and data dependency, we annotate the corresponding edge
for reg_offset in a.addr.reg_deps:
self._stmt_graph_annotate_edges(regs_to_edges[reg_offset], subtype='mem_addr')
reg_variable = SimRegisterVariable(reg_offset, self._get_register_size(reg_offset))
prev_defs = self._def_lookup(live_defs, reg_variable)
for loc, _ in prev_defs.iteritems():
v = ProgramVariable(reg_variable, loc)
self._data_graph_add_edge(v, pv, type='mem_addr')
for tmp in a.addr.tmp_deps:
self._stmt_graph_annotate_edges(temps_to_edges[tmp], subtype='mem_addr')
if tmp in temp_variables:
self._data_graph_add_edge(temp_variables[tmp], pv, type='mem_addr')
for reg_offset in a.data.reg_deps:
self._stmt_graph_annotate_edges(regs_to_edges[reg_offset], subtype='mem_data')
reg_variable = SimRegisterVariable(reg_offset, self._get_register_size(reg_offset))
prev_defs = self._def_lookup(live_defs, reg_variable)
for loc, _ in prev_defs.iteritems():
v = ProgramVariable(reg_variable, loc)
self._data_graph_add_edge(v, pv, type='mem_data')
for tmp in a.data.tmp_deps:
self._stmt_graph_annotate_edges(temps_to_edges[tmp], subtype='mem_data')
if tmp in temp_variables:
self._data_graph_add_edge(temp_variables[tmp], pv, type='mem_data')
elif a.type == 'reg':
# TODO: Support symbolic register offsets
reg_offset = a.offset
variable = SimRegisterVariable(reg_offset, a.data.ast.size())
if a.action == 'read':
# What do we want to do?
prevdefs = self._def_lookup(live_defs, variable)
# add edges to the statement dependence graph
for prev_code_loc, labels in prevdefs.iteritems():
self._stmt_graph_add_edge(prev_code_loc, current_code_location, **labels)
# record the edge
edge_tuple = (prev_code_loc, current_code_location)
regs_to_edges[reg_offset].append(edge_tuple)
pv_read.append(ProgramVariable(variable, prev_code_loc))
if not prevdefs:
# the register was never defined before - it must be passed in as an argument
pv_read.append(ProgramVariable(variable, current_code_location, initial=True))
# make sure to put it into the killing set
self._kill(live_defs, variable, current_code_location)
if reg_offset == self.project.arch.sp_offset:
data_generated = ('sp', 0)
elif reg_offset == self.project.arch.bp_offset:
data_generated = ('bp', 0)
else:
# write
self._kill(live_defs, variable, current_code_location)
if reg_offset in regs_to_edges:
# clear the existing edges definition
del regs_to_edges[reg_offset]
# add a node on the data dependence graph
pv = ProgramVariable(variable, current_code_location)
self._data_graph_add_node(pv)
if not a.reg_deps and not a.tmp_deps:
# moving a constant into the register
# try to parse out the constant from statement
const_variable = SimConstantVariable()
if statements is not None:
stmt = statements[a.stmt_idx]
if isinstance(stmt.data, pyvex.IRExpr.Const):
const_variable = SimConstantVariable(value=stmt.data.con.value)
const_pv = ProgramVariable(const_variable, current_code_location)
self._data_graph_add_edge(const_pv, pv)
for tmp in a.tmp_deps:
if tmp in temp_variables:
self._data_graph_add_edge(temp_variables[tmp], pv)
elif a.type == 'tmp':
# tmp is definitely not symbolic
tmp = a.tmp
pv = ProgramVariable(SimTemporaryVariable(tmp), current_code_location)
if a.action == 'read':
prev_code_loc = temp_defs[tmp]
self._stmt_graph_add_edge(prev_code_loc, current_code_location, type='tmp', data=a.tmp)
# record the edge
edge_tuple = (prev_code_loc, current_code_location)
temps_to_edges[a.tmp].append(edge_tuple)
if tmp in temp_register_symbols:
data_generated = temp_register_symbols[tmp]
else:
# write
temp_defs[tmp] = current_code_location
temp_variables[tmp] = pv
# clear existing edges
if tmp in temps_to_edges:
del temps_to_edges[tmp]
for tmp_dep in a.tmp_deps:
if tmp_dep in temp_variables:
self._data_graph_add_edge(temp_variables[tmp_dep], pv)
if data_generated:
temp_register_symbols[tmp] = data_generated
for data in pv_read:
self._data_graph_add_edge(data, pv)
if not a.tmp_deps and not pv_read:
# read in a constant
# try to parse out the constant from statement
const_variable = SimConstantVariable()
if statements is not None:
stmt = statements[a.stmt_idx]
if isinstance(stmt, pyvex.IRStmt.Dirty):
l.warning('Dirty statements are not supported in DDG for now.')
elif isinstance(stmt.data, pyvex.IRExpr.Const):
const_variable = SimConstantVariable(value=stmt.data.con.value)
const_pv = ProgramVariable(const_variable, current_code_location)
self._data_graph_add_edge(const_pv, pv)
elif a.type == 'exit':
# exits should only depend on tmps
for tmp in a.tmp_deps:
prev_code_loc = temp_defs[tmp]
# add the edge to the graph
self._stmt_graph_add_edge(prev_code_loc, current_code_location, type='exit', data='tmp')
# log the edge
edge_tuple = (prev_code_loc, current_code_location)
temps_to_edges[tmp].append(edge_tuple)
elif a.type == 'operation':
# FIXME: we should support a more complete range of operations
if a.op.endswith('Sub32') or a.op.endswith('Sub64'):
# subtract
expr_0, expr_1 = a.exprs
if expr_0.tmp_deps and (not expr_1.tmp_deps and not expr_1.reg_deps):
# tmp - const
tmp = list(expr_0.tmp_deps)[0]
if tmp in temp_register_symbols:
sort, offset = temp_register_symbols[tmp]
offset -= expr_1.ast.args[0]
data_generated = (sort, offset)
elif a.op.endswith('Add32') or a.op.endswith('Add64'):
# add
expr_0, expr_1 = a.exprs
if expr_0.tmp_deps and (not expr_1.tmp_deps and not expr_1.reg_deps):
# tmp + const
tmp = list(expr_0.tmp_deps)[0]
if tmp in temp_register_symbols:
sort, offset = temp_register_symbols[tmp]
offset += expr_1.ast.args[0]
data_generated = (sort, offset)
#import pprint
#pprint.pprint(self._data_graph.edges())
#pprint.pprint(self.simplified_data_graph.edges())
# import ipdb; ipdb.set_trace()
return live_defs
def _def_lookup(self, live_defs, variable): # pylint:disable=no-self-use
"""
This is a backward lookup in the previous defs. Note that, as we are using VSA, it is possible that `variable`
is affected by several definitions.
:param LiveDefinitions live_defs: The collection of live definitions.
:param SimVariable: The variable to lookup for definitions.
:returns: A dict {stmt:labels} where label is the number of individual addresses of `addr_list` (or
the actual set of addresses depending on the keep_addrs flag) that are definted by stmt.
"""
prevdefs = {}
for code_loc in live_defs.lookup_defs(variable):
# Label edges with cardinality or actual sets of addresses
if isinstance(variable, SimMemoryVariable):
type_ = 'mem'
elif isinstance(variable, SimRegisterVariable):
type_ = 'reg'
else:
raise AngrDDGError('Unknown variable type %s' % type(variable))
prevdefs[code_loc] = {
'type': type_,
'data': variable
}
return prevdefs
def _kill(self, live_defs, variable, code_loc): # pylint:disable=no-self-use
"""
Kill previous defs. addr_list is a list of normalized addresses.
"""
# Case 1: address perfectly match, we kill
# Case 2: a is a subset of the original address
# Case 3: a is a superset of the original address
# the previous definition is killed. mark it in data graph.
if variable in live_defs:
for loc in live_defs.lookup_defs(variable):
pv = ProgramVariable(variable, loc)
self._data_graph_add_edge(pv, ProgramVariable(variable, code_loc), type='kill')
live_defs.kill_def(variable, code_loc)
def _get_register_size(self, reg_offset):
"""
Get the size of a register.
:param int reg_offset: Offset of the register.
:return: Size in bytes.
:rtype: int
"""
# TODO: support registers that are not aligned
if reg_offset in self.project.arch.register_names:
reg_name = self.project.arch.register_names[reg_offset]
reg_size = self.project.arch.registers[reg_name][1]
return reg_size
l.warning("_get_register_size(): unsupported register offset %d. Assum size 1. "
"More register name mappings should be implemented in archinfo.", reg_offset)
return 1
def _data_graph_add_node(self, node):
"""
Add a noe in the data dependence graph.
:param ProgramVariable node: The node to add.
:return: None
"""
self._data_graph.add_node(node)
self._simplified_data_graph = None
def _data_graph_add_edge(self, src, dst, **edge_labels):
"""
Add an edge in the data dependence graph.
:param ProgramVariable src: Source node.
:param ProgramVariable dst: Destination node.
:param edge_labels: All labels associated with the edge.
:return: None
"""
if src in self._data_graph and dst in self._data_graph[src]:
return
self._data_graph.add_edge(src, dst, **edge_labels)
self._simplified_data_graph = None
def _stmt_graph_add_edge(self, src, dst, **edge_labels):
"""
Add an edge in the statement dependence graph from a program location `src` to another program location `dst`.
:param CodeLocation src: Source node.
:param CodeLocation dst: Destination node.
:param edge_labels: All labels associated with the edge.
:returns: None
"""
# Is that edge already in the graph ?
# If at least one is new, then we are not redoing the same path again
if src in self._stmt_graph and dst in self._stmt_graph[src]:
return
self._stmt_graph.add_edge(src, dst, **edge_labels)
def _stmt_graph_annotate_edges(self, edges_to_annotate, **new_labels):
"""
Add new annotations to edges in the statement dependence graph.
:param list edges_to_annotate: A list of edges to annotate.
:param new_labels: New labels to be added to those edges.
:returns: None
"""
graph = self.graph
for src, dst in edges_to_annotate:
if src not in graph:
continue
if dst not in graph[src]:
continue
data = graph[src][dst]
for k, v in new_labels.iteritems():
if k in data:
data[k] = data[k] + (v,)
else:
# Construct a tuple
data[k] = (v,)
def _simplify_data_graph(self, data_graph): # pylint:disable=no-self-use
"""
Simplify a data graph by removing all temp variable nodes on the graph.
:param networkx.DiGraph data_graph: The data dependence graph to simplify.
:return: The simplified graph.
:rtype: networkx.MultiDiGraph
"""
graph = networkx.MultiDiGraph(data_graph)
all_nodes = [ n for n in graph.nodes_iter() if isinstance(n.variable, SimTemporaryVariable) ]
for tmp_node in all_nodes:
# remove each tmp node by linking their successors and predecessors directly
in_edges = graph.in_edges(tmp_node, data=True)
out_edges = graph.out_edges(tmp_node, data=True)
for pred, _, _ in in_edges:
graph.remove_edge(pred, tmp_node)
for _, suc, _ in out_edges:
graph.remove_edge(tmp_node, suc)
for pred, _, data_in in in_edges:
for _, suc, data_out in out_edges:
if pred is not tmp_node and suc is not tmp_node:
data = data_in.copy()
data.update(data_out)
graph.add_edge(pred, suc, **data)
graph.remove_node(tmp_node)
return graph
def _worklist_append(self, node_wrapper, worklist, worklist_set):
"""
Append a CFGNode and its successors into the work-list, and respect the call-depth limit
:param node_wrapper: The NodeWrapper instance to insert.
:param worklist: The work-list, which is a list.
:param worklist_set: A set of all CFGNodes that are inside the work-list, just for the sake of fast look-up.
It will be updated as well.
:returns: A set of newly-inserted CFGNodes (not NodeWrapper instances).
"""
if node_wrapper.cfg_node in worklist_set:
# It's already in the work-list
return
worklist.append(node_wrapper)
worklist_set.add(node_wrapper.cfg_node)
stack = [ node_wrapper ]
traversed_nodes = { node_wrapper.cfg_node }
inserted = { node_wrapper.cfg_node }
while stack:
nw = stack.pop()
n, call_depth = nw.cfg_node, nw.call_depth
# Get successors
edges = self._cfg.graph.out_edges(n, data=True)
for _, dst, data in edges:
if (dst not in traversed_nodes # which means we haven't touch this node in this appending procedure
and dst not in worklist_set): # which means this node is not in the work-list
# We see a new node!
traversed_nodes.add(dst)
if data['jumpkind'] == 'Ijk_Call':
if self._call_depth is None or call_depth < self._call_depth:
inserted.add(dst)
new_nw = DDGJob(dst, call_depth + 1)
worklist.append(new_nw)
worklist_set.add(dst)
stack.append(new_nw)
elif data['jumpkind'] == 'Ijk_Ret':
if call_depth > 0:
inserted.add(dst)
new_nw = DDGJob(dst, call_depth - 1)
worklist.append(new_nw)
worklist_set.add(dst)
stack.append(new_nw)
else:
new_nw = DDGJob(dst, call_depth)
inserted.add(dst)
worklist_set.add(dst)
worklist.append(new_nw)
stack.append(new_nw)
return inserted
def _build_function_dependency_graphs(self):
"""
Build dependency graphs for each function, and save them in self._function_data_dependencies.
"""
# This is a map between functions and its corresponding dependencies
self._function_data_dependencies = defaultdict(networkx.DiGraph)
# Group all dependencies first
simrun_addr_to_func = { }
for _, func in self.kb.functions.iteritems():
for block in func.blocks:
simrun_addr_to_func[block.addr] = func
for src, dst, data in self.graph.edges_iter(data=True):
src_target_func = None
if src.simrun_addr in simrun_addr_to_func:
src_target_func = simrun_addr_to_func[src.simrun_addr]
self._function_data_dependencies[src_target_func].add_edge(src, dst, **data)
if dst.simrun_addr in simrun_addr_to_func:
dst_target_func = simrun_addr_to_func[dst.simrun_addr]
if not dst_target_func is src_target_func:
self._function_data_dependencies[dst_target_func].add_edge(src, dst, **data)
register_analysis(DDG, 'DDG')
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,470
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/path_group.py
|
import logging
import itertools
import ana
import simuvex
import claripy
import mulpyplexer
l = logging.getLogger('angr.path_group')
class PathGroup(ana.Storable):
"""
Path groups are the future.
Path groups allow you to wrangle multiple paths in a slick way. Paths are organized into "stashes", which you can
step forward, filter, merge, and move around as you wish. This allows you to, for example, step two different
stashes of paths at different rates, then merge them together.
Stashes can be accessed as attributes (i.e. pg.active). A mulpyplexed stash can be retrieved by prepending the name
with `mp_` (e.g., `pg.mp_active`).
Note that you shouldn't usually be constructing path groups directly - there is a convenient shortcuts for
creating path groups in ``Project.factory``: see :class:`angr.factory.AngrObjectFactory`.
Multithreading your search can be useful in constraint-solving-intensive paths. Indeed, Python cannot multithread
due to its GIL, but z3, written in C, can.
The most important methods you should look at are ``step``, ``explore``, and ``use_tech``.
"""
ALL = '_ALL'
DROP = '_DROP'
def __init__(self, project, active_paths=None, stashes=None, hierarchy=None, veritesting=None,
veritesting_options=None, immutable=None, resilience=None, save_unconstrained=None,
save_unsat=None, threads=None):
"""
:param project: A Project instance.
:type project: angr.project.Project
The following parameters are optional.
:param active_paths: Active paths to seed the "active" stash with.
:param stashes: A dictionary to use as the stash store.
:param hierarchy: A PathHierarchy object to use to track path reachability.
:param immutable: If True, all operations will return a new PathGroup. Otherwise (default), all operations
will modify the PathGroup (and return it, for consistency and chaining).
:param threads: the number of worker threads to concurrently analyze states (useful in z3-intensive paths).
"""
self._project = project
self._hierarchy = PathHierarchy() if hierarchy is None else hierarchy
self._immutable = False if immutable is None else immutable
self._resilience = False if resilience is None else resilience
# public options
self.save_unconstrained = False if save_unconstrained is None else save_unconstrained
self.save_unsat = False if save_unsat is None else save_unsat
# techniques
self._hooks_step = []
self._hooks_step_path = []
self._hooks_filter = []
self._hooks_complete = []
if threads is not None:
self.use_technique(exploration_techniques.Threading(threads))
if veritesting:
self.use_technique(exploration_techniques.Veritesting(
**({} if veritesting_options is None else veritesting_options)
))
self.stashes = {
'active': [ ] if active_paths is None else active_paths,
'stashed': [ ],
'pruned': [ ],
'unsat': [ ],
'errored': [ ],
'deadended': [ ],
'unconstrained': [ ]
} if stashes is None else stashes
#
# Pickling
#
def _ana_getstate(self):
self.prune()
s = dict(self.__dict__)
if self._hierarchy is not False:
s['_hierarchy'] = None
return s
def _ana_setstate(self, s):
self.__dict__.update(s)
if self._hierarchy is None:
self._hierarchy = PathHierarchy()
#
# Util functions
#
def copy(self, stashes=None):
stashes = stashes if stashes is not None else self._copy_stashes(immutable=True)
out = PathGroup(self._project, stashes=stashes, hierarchy=self._hierarchy, immutable=self._immutable, resilience=self._resilience, save_unconstrained=self.save_unconstrained, save_unsat=self.save_unsat)
out._hooks_step = list(self._hooks_step)
out._hooks_step_path = list(self._hooks_step_path)
out._hooks_filter = list(self._hooks_filter)
out._hooks_complete = list(self._hooks_complete)
return out
def _copy_stashes(self, immutable=None):
"""
Returns a copy of the stashes (if immutable) or the stashes themselves (if not immutable). Used to abstract away
immutability.
"""
if self._immutable if immutable is None else immutable:
return { k:list(v) for k,v in self.stashes.items() }
else:
return self.stashes
def _copy_paths(self, paths):
"""
Returns a copy of a list of paths (if immutable) or the paths themselves (if not immutable). Used to abstract
away immutability.
"""
if self._immutable:
return [ p.copy() for p in paths ]
else:
return paths
def _successor(self, new_stashes):
"""
Creates a new PathGroup with the provided stashes (if immutable), or sets the stashes (if not immutable). Used
to abstract away immutability.
:returns: A PathGroup.
"""
if self.DROP in new_stashes:
del new_stashes[self.DROP]
if not self._immutable:
self.stashes = new_stashes
return self
else:
return self.copy(stashes=new_stashes)
@staticmethod
def _filter_paths(filter_func, paths):
"""
Filters a sequence of paths according to a filter_func.
:param filter_func: The filter function. Should take a path as input and return a boolean.
:param paths: A sequence of paths.
:returns: A tuple, with the first element the matching paths and the second element the non-matching
paths.
"""
if filter_func is None:
return paths, [] # does this condition actually matter
l.debug("Filtering %d paths", len(paths))
match = [ ]
nomatch = [ ]
for p in paths:
if filter_func(p):
l.debug("... path %s matched!", p)
match.append(p)
else:
l.debug("... path %s didn't match!", p)
nomatch.append(p)
l.debug("... returning %d matches and %d non-matches", len(match), len(nomatch))
return match, nomatch
def _one_path_step(self, a, check_func=None, successor_func=None, **kwargs):
"""
Internal function to step a single path forward.
:param a: The path.
:param check_func: A function to check the path for an error state.
:param successor_func: A function to run on the path instead of doing a.step().
:returns: A tuple of lists: successors, unconstrained, unsat, pruned, errored.
"""
for hook in self._hooks_step_path:
out = hook(a, **kwargs)
if out is not None:
return out
if (check_func is not None and check_func(a)) or (check_func is None and a.errored):
# This path has error(s)!
if hasattr(a, "error") and isinstance(a.error, PathUnreachableError):
return [], [], [], [a], []
else:
if self._hierarchy:
self._hierarchy.unreachable_path(a)
self._hierarchy.simplify()
return [], [], [], [], [a]
else:
try:
if successor_func is not None:
successors = successor_func(a)
else:
successors = a.step(**kwargs)
return successors, a.unconstrained_successors, a.unsat_successors, [], []
except (AngrError, simuvex.SimError, claripy.ClaripyError):
if not self._resilience:
raise
else:
l.warning("PathGroup resilience squashed an exception", exc_info=True)
return [], [], [], [], [a]
def _record_step_results(self, new_stashes, new_active, a, successors, unconstrained, unsat, pruned, errored):
"""
Take a whole bunch of intermediate values and smushes them together
:param new_stashes: The dict of stashes that will be modified by this operation
:param new_active: One of the lists in new_stashes that is being actively ticked
:param a: The path that just got ticked
:param successors: The successors of a
:param unconstrained: The unconstrained successors
:param pruned: The pruned successors
:param errored: The errored successors
"""
if self._hierarchy:
for p in successors:
self._hierarchy.add_path(p)
self._hierarchy.simplify()
if len(self._hooks_filter) == 0:
new_active.extend(successors)
else:
for path in successors:
for hook in self._hooks_filter:
goto = hook(path)
if goto is None:
continue
if type(goto) is tuple:
goto, path = goto
if goto in new_stashes:
new_stashes[goto].append(path)
break
else:
new_stashes[goto] = [path]
break
else:
new_active.append(path)
if self.save_unconstrained:
new_stashes['unconstrained'] += unconstrained
if self.save_unsat:
new_stashes['unsat'] += unsat
new_stashes['pruned'] += pruned
new_stashes['errored'] += errored
if a not in pruned and a not in errored and len(successors) == 0:
new_stashes['deadended'].append(a)
def _one_step(self, stash, selector_func=None, successor_func=None, check_func=None, **kwargs):
"""
Takes a single step in a given stash.
:param stash: The name of the stash.
:param successor_func: If provided, this function is called with the path as its only argument. It should
return the path's successors. If this is None, path.successors is used, instead.
:param selector_func: If provided, should be a lambda that takes a Path and returns a boolean. If True, the
path will be stepped. Otherwise, it will be kept as-is.
:param check_func: If provided, this function will be called to decide whether the current path is errored
or not. Path.errored will not be called anymore.
:returns: The successor PathGroup.
:rtype: PathGroup
"""
if len(self._hooks_step) != 0:
# hooking step is a bit of an ordeal, how are you supposed to compose stepping operations?
# the answer is that you nest them - any stepping hook must eventually call step itself,
# at which point it calls the next hook, and so on, until we fall through to the
# basic stepping operation.
hook = self._hooks_step.pop()
pg = self.copy() if self._immutable else self
pg._immutable = False # this is a performance consideration
out = hook(pg, stash, selector_func=selector_func, successor_func=successor_func, check_func=check_func, **kwargs)
out._immutable = self._immutable
self._hooks_step.append(hook)
if out is not self:
out._hooks_step.append(hook)
return out
new_stashes = self._copy_stashes()
to_tick = list(self.stashes[stash])
if selector_func is None:
new_active = []
to_tick = list(self.stashes[stash])
else:
new_active = []
to_tick = []
for a in self.stashes[stash]:
if selector_func(a):
to_tick.append(a)
else:
new_active.append(a)
for a in to_tick:
r = self._one_path_step(a, successor_func=successor_func, check_func=check_func, **kwargs)
self._record_step_results(new_stashes, new_active, a, *r)
new_stashes[stash] = new_active
return self._successor(new_stashes)
@staticmethod
def _move(stashes, filter_func, src, dst):
"""
Moves all stashes that match the filter_func from src to dst.
:returns: A new stashes dictionary.
"""
if dst == PathGroup.ALL:
raise AngrPathGroupError("Can't handle '_ALL' as a target stash.")
if src == PathGroup.DROP:
raise AngrPathGroupError("Can't handle '_DROP' as a source stash.")
if src == PathGroup.ALL:
srces = [ a for a in stashes.keys() if a != dst ]
else:
srces = [ src ]
matches = [ ]
for f in srces:
to_move, to_keep = PathGroup._filter_paths(filter_func, stashes[f])
stashes[f] = to_keep
matches.extend(to_move)
if dst != PathGroup.DROP:
if dst not in stashes:
stashes[dst] = [ ]
stashes[dst].extend(matches)
return stashes
def __repr__(self):
s = "<PathGroup with "
s += ', '.join(("%d %s" % (len(v),k)) for k,v in self.stashes.items() if len(v) != 0)
s += ">"
return s
def mulpyplex(self, *stashes):
"""
Mulpyplex across several stashes.
:param stashes: the stashes to mulpyplex
:return: a mulpyplexed list of paths from the stashes in question, in the specified order
"""
return mulpyplexer.MP(list(itertools.chain.from_iterable(self.stashes[s] for s in stashes)))
def __getattr__(self, k):
if k == PathGroup.ALL:
return [ p for p in itertools.chain.from_iterable(s for s in self.stashes.values()) ]
elif k == 'mp_' + PathGroup.ALL:
return mulpyplexer.MP([ p for p in itertools.chain.from_iterable(s for s in self.stashes.values()) ])
elif k.startswith('mp_'):
return mulpyplexer.MP(self.stashes[k[3:]])
elif k.startswith('one_') and k[4:] in self.stashes:
return self.stashes[k[4:]][0]
elif k in self.stashes:
return self.stashes[k]
else:
raise AttributeError(k)
def __dir__(self):
return sorted(set(
self.__dict__.keys() +
dir(super(PathGroup, self)) +
dir(type(self)) +
self.stashes.keys() +
['mp_'+k for k in self.stashes.keys()] +
['one_'+k for k in self.stashes.keys()]
))
#
# Interface
#
def apply(self, path_func=None, stash_func=None, stash=None):
"""
Applies a given function to a given stash.
:param path_func: A function to apply to every path. Should take a path and return a path. The returned path
will take the place of the old path. If the function *doesn't* return a path, the old
path will be used. If the function returns a list of paths, they will replace the original
paths.
:param stash_func: A function to apply to the whole stash. Should take a list of paths and return a list of
paths. The resulting list will replace the stash.
If both path_func and stash_func are provided path_func is applied first, then stash_func
is applied on the results.
:returns: The resulting PathGroup.
:rtype: PathGroup
"""
stash = 'active' if stash is None else stash
new_stashes = self._copy_stashes()
new_paths = new_stashes[stash]
if path_func is not None:
new_new_paths = [ ]
for p in new_paths:
np = path_func(p)
if isinstance(np, Path):
new_new_paths.append(np)
elif isinstance(np, (list, tuple, set)):
new_new_paths.extend(np)
else:
new_new_paths.append(p)
new_paths = new_new_paths
if stash_func is not None:
new_paths = stash_func(new_paths)
new_stashes[stash] = new_paths
return self._successor(new_stashes)
def split(self, stash_splitter=None, stash_ranker=None, path_ranker=None, limit=None, from_stash=None, to_stash=None):
"""
Split a stash of paths. The stash from_stash will be split into two stashes depending on the other options
passed in. If to_stash is provided, the second stash will be written there.
stash_splitter overrides stash_ranker, which in turn overrides path_ranker. If no functions are provided, the
paths are simply split according to the limit.
The sort done with path_ranker is ascending.
:param stash_splitter: A function that should take a list of paths and return a tuple of two lists (the two
resulting stashes).
:param stash_ranker: A function that should take a list of paths and return a sorted list of paths. This list
will then be split according to "limit".
:param path_ranker: An alternative to stash_splitter. Paths will be sorted with outputs of this function.
used as a key. The first "limit" of them will be kept, the rest split off.
:param limit: For use with path_ranker. The number of paths to keep. Default: 8
:param from_stash: The stash to split (default: 'active')
:param to_stash: The stash to write to (default: 'stashed')
:returns: The resulting PathGroup.
:rtype: PathGroup
"""
limit = 8 if limit is None else limit
from_stash = 'active' if from_stash is None else from_stash
to_stash = 'stashed' if to_stash is None else to_stash
new_stashes = self._copy_stashes()
old_paths = new_stashes[from_stash]
if stash_splitter is not None:
keep, split = stash_splitter(old_paths)
elif stash_ranker is not None:
ranked_paths = stash_ranker(old_paths)
keep, split = ranked_paths[:limit], ranked_paths[limit:]
elif path_ranker is not None:
ranked_paths = sorted(old_paths, key=path_ranker)
keep, split = ranked_paths[:limit], ranked_paths[limit:]
else:
keep, split = old_paths[:limit], old_paths[limit:]
new_stashes[from_stash] = keep
new_stashes[to_stash] = split if to_stash not in new_stashes else new_stashes[to_stash] + split
return self._successor(new_stashes)
def step(self, n=None, selector_func=None, step_func=None, stash=None,
successor_func=None, until=None, check_func=None, **kwargs):
"""
Step a stash of paths forward, i.e. run :meth:`angr.path.Path.step` on each of the individual paths in a stash
and categorize the successors appropriately.
The parameters to this function allow you to control everything about the stepping and categorization process.
:param stash: The name of the stash to step (default: 'active')
:param n: The number of times to step (default: 1 if "until" is not provided)
:param selector_func: If provided, should be a function that takes a Path and returns a boolean. If True, the
path will be stepped. Otherwise, it will be kept as-is.
:param step_func: If provided, should be a function that takes a PathGroup and returns a PathGroup. Will
be called with the PathGroup at every step. Note that this function should not actually
perform any stepping - it is meant to be a maintenance function called after each step.
:param successor_func: If provided, should be a function that takes a path and return its successors.
Otherwise, Path.successors will be used.
:param until: If provided, should be a function that takes a PathGroup and returns True or False.
Stepping will terminate when it is True.
:param check_func: If provided, this function will be called to decide whether the current path is errored
or not. Otherwise, Path.errored will be used.
Additionally, you can pass in any of the following keyword args for project.factory.sim_run:
:param jumpkind: The jumpkind of the previous exit
:param addr: An address to execute at instead of the state's ip.
:param stmt_whitelist: A list of stmt indexes to which to confine execution.
:param last_stmt: A statement index at which to stop execution.
:param thumb: Whether the block should be lifted in ARM's THUMB mode.
:param backup_state: A state to read bytes from instead of using project memory.
:param opt_level: The VEX optimization level to use.
:param insn_bytes: A string of bytes to use for the block instead of the project.
:param max_size: The maximum size of the block, in bytes.
:param num_inst: The maximum number of instructions.
:param traceflags: traceflags to be passed to VEX. Default: 0
:returns: The resulting PathGroup.
:rtype: PathGroup
"""
stash = 'active' if stash is None else stash
n = n if n is not None else 1 if until is None else 100000
pg = self
for i in range(n):
l.debug("Round %d: stepping %s", i, pg)
pg = pg._one_step(stash=stash, selector_func=selector_func, successor_func=successor_func, check_func=check_func, **kwargs)
if step_func is not None:
pg = step_func(pg)
if len(pg.stashes[stash]) == 0:
l.debug("Out of paths in stash %s", stash)
break
if until is not None and until(pg):
l.debug("Until function returned true")
break
return pg
def prune(self, filter_func=None, from_stash=None, to_stash=None):
"""
Prune unsatisfiable paths from a stash.
This function will move all unsatisfiable or errored paths in the given stash into a different stash.
:param filter_func: Only prune paths that match this filter.
:param from_stash: Prune paths from this stash. (default: 'active')
:param to_stash: Put pruned paths in this stash. (default: 'pruned')
:returns: The resulting PathGroup.
:rtype: PathGroup
"""
to_stash = 'pruned' if to_stash is None else to_stash
from_stash = 'active' if from_stash is None else from_stash
to_prune, new_active = self._filter_paths(filter_func, self.stashes[from_stash])
new_stashes = self._copy_stashes()
for p in to_prune:
if p.errored or not p.state.satisfiable():
if to_stash not in new_stashes:
new_stashes[to_stash] = [ ]
new_stashes[to_stash].append(p)
if self._hierarchy:
self._hierarchy.unreachable_path(p)
self._hierarchy.simplify()
else:
new_active.append(p)
new_stashes[from_stash] = new_active
return self._successor(new_stashes)
def move(self, from_stash, to_stash, filter_func=None):
"""
Move paths from one stash to another.
:param from_stash: Take matching paths from this stash.
:param to_stash: Put matching paths into this stash.
:param filter_func: Stash paths that match this filter. Should be a function that takes a path and returns
True or False. Default: stash all paths
:returns: The resulting PathGroup.
:rtype: PathGroup
"""
new_stashes = self._copy_stashes()
self._move(new_stashes, filter_func, from_stash, to_stash)
return self._successor(new_stashes)
def stash(self, filter_func=None, from_stash=None, to_stash=None):
"""
Stash some paths. This is an alias for move(), with defaults for the stashes.
:param filter_func: Stash paths that match this filter. Should be a function. that takes a path and returns True
or False. (default: stash all paths)
:param from_stash: Take matching paths from this stash. (default: 'active')
:param to_stash: Put matching paths into this stash. (default: 'stashed')
:returns: The resulting PathGroup
:rtype: PathGroup
"""
to_stash = 'stashed' if to_stash is None else to_stash
from_stash = 'active' if from_stash is None else from_stash
return self.move(from_stash, to_stash, filter_func=filter_func)
def drop(self, filter_func=None, stash=None):
"""
Drops paths from a stash. This is an alias for move(), with defaults for the stashes.
:param filter_func: Drop paths that match this filter. Should be a function that takes a path and returns True
or False. (default: drop all paths)
:param stash: Drop matching paths from this stash. (default: 'active')
:returns: The resulting PathGroup
:rtype: PathGroup
"""
stash = 'active' if stash is None else stash
return self.move(stash, self.DROP, filter_func=filter_func)
def unstash(self, filter_func=None, to_stash=None, from_stash=None):
"""
Unstash some paths. This is an alias for move(), with defaults for the stashes.
:param filter_func: Unstash paths that match this filter. Should be a function that takes a path and returns
True or False. (default: unstash all paths)
:param from_stash: take matching paths from this stash. (default: 'stashed')
:param to_stash: put matching paths into this stash. (default: 'active')
:returns: The resulting PathGroup.
:rtype: PathGroup
"""
to_stash = 'active' if to_stash is None else to_stash
from_stash = 'stashed' if from_stash is None else from_stash
return self.move(from_stash, to_stash, filter_func=filter_func)
def _merge_paths(self, paths):
"""
Merges a list of paths.
:param paths: the paths to merge
:returns: the resulting path
:rtype: Path
"""
if self._hierarchy:
optimal, common_history, others = self._hierarchy.most_mergeable(paths)
else:
optimal, common_history, others = paths, None, [ ]
if len(optimal) < 2:
raise AngrPathGroupError("unable to find merge candidates")
o = optimal.pop()
m = o.merge(*optimal, common_history=common_history)
if self._hierarchy:
self._hierarchy.add_path(m)
if len(others):
others.append(m)
return self._merge_paths(others)
else:
return m
def merge(self, merge_func=None, stash=None):
"""
Merge the states in a given stash.
:param stash: The stash (default: 'active')
:param merge_func: If provided, instead of using path.merge, call this function with the paths as the argument.
Should return the merged path.
:returns: The result PathGroup.
:rtype: PathGroup
"""
stash = 'active' if stash is None else stash
to_merge = self.stashes[stash]
not_to_merge = [ ]
merge_groups = [ ]
while len(to_merge) > 0:
g, to_merge = self._filter_paths(lambda p: p.addr == to_merge[0].addr, to_merge)
if len(g) <= 1:
not_to_merge.extend(g)
else:
merge_groups.append(g)
for g in merge_groups:
try:
m = self._merge_paths(g) if merge_func is None else merge_func(*g)
not_to_merge.append(m)
except simuvex.SimMergeError:
l.warning("SimMergeError while merging %d paths", len(g), exc_info=True)
not_to_merge.extend(g)
new_stashes = self._copy_stashes()
new_stashes[stash] = not_to_merge
return self._successor(new_stashes)
def use_technique(self, tech):
"""
Use an exploration technique with this path group.
Techniques can be found in :mod:`angr.exploration_techniques`.
:param tech: An ExplorationTechnique object that contains code to modify this path group's behavior
"""
# this might be the best worst code I've ever written in my life
tech.project = self._project
self.remove_tech(tech)
tech.setup(self)
for hook in ['step_path', 'step', 'filter', 'complete']:
hookfunc = getattr(tech, hook)
if hookfunc.im_func is not getattr(exploration_techniques.ExplorationTechnique, hook).im_func:
getattr(self, '_hooks_' + hook).append(hookfunc)
def remove_tech(self, tech):
for hook in ['step_path', 'step', 'filter', 'complete']:
try:
getattr(self, '_hooks_' + hook).remove(getattr(tech, hook))
except ValueError:
pass # it'll error if it wasn't hooked but we don't care
#
# Various canned functionality
#
def stash_not_addr(self, addr, from_stash=None, to_stash=None):
"""
Stash all paths not at address addr from stash from_stash to stash to_stash.
"""
return self.stash(lambda p: p.addr != addr, from_stash=from_stash, to_stash=to_stash)
def stash_addr(self, addr, from_stash=None, to_stash=None):
"""
Stash all paths at address addr from stash from_stash to stash to_stash.
"""
return self.stash(lambda p: p.addr == addr, from_stash=from_stash, to_stash=to_stash)
def stash_addr_past(self, addr, from_stash=None, to_stash=None):
"""
Stash all paths containg address addr in their backtrace from stash from_stash to stash to_stash.
"""
return self.stash(lambda p: addr in p.addr_trace, from_stash=from_stash, to_stash=to_stash)
def stash_not_addr_past(self, addr, from_stash=None, to_stash=None):
"""
Stash all paths not containg address addr in their backtrace from stash from_stash to stash to_stash.
"""
return self.stash(lambda p: addr not in p.addr_trace, from_stash=from_stash, to_stash=to_stash)
def stash_all(self, from_stash=None, to_stash=None):
"""
Stash all paths from stash from_stash to stash to_stash.
"""
return self.stash(lambda p: True, from_stash=from_stash, to_stash=to_stash)
def unstash_addr(self, addr, from_stash=None, to_stash=None):
"""
Unstash all paths at address addr.
"""
return self.unstash(lambda p: p.addr == addr, from_stash=from_stash, to_stash=to_stash)
def unstash_addr_past(self, addr, from_stash=None, to_stash=None):
"""
Unstash all paths containing address addr in their backtrace.
"""
return self.unstash(lambda p: addr in p.addr_trace, from_stash=from_stash, to_stash=to_stash)
def unstash_not_addr(self, addr, from_stash=None, to_stash=None):
"""
Unstash all paths not at address addr.
"""
return self.unstash(lambda p: p.addr != addr, from_stash=from_stash, to_stash=to_stash)
def unstash_not_addr_past(self, addr, from_stash=None, to_stash=None):
"""
Unstash all paths not containing address addr in their backtrace.
"""
return self.unstash(lambda p: addr not in p.addr_trace, from_stash=from_stash, to_stash=to_stash)
def unstash_all(self, from_stash=None, to_stash=None):
"""
Unstash all paths.
"""
return self.unstash(lambda p: True, from_stash=from_stash, to_stash=to_stash)
#
# High-level functionality
#
def explore(self, stash=None, n=None, find=None, avoid=None, find_stash='found', avoid_stash='avoid', cfg=None, num_find=1, step_func=None):
"""
Tick stash "stash" forward (up to "n" times or until "num_find" paths are found), looking for condition "find",
avoiding condition "avoid". Stashes found paths into "found_stash' and avoided paths into "avoid_stash".
The "find" and "avoid" parameters may be any of:
- An address to find
- A set or list of addresses to find
- A function that takes a path and returns whether or not it matches.
If an angr CFG is passed in as the "cfg" parameter and "find" is either a number or a list or a set, then
any paths which cannot possibly reach a success state without going through a failure state will be
preemptively avoided.
"""
num_find += len(self.stashes[find_stash]) if find_stash in self.stashes else 0
tech = exploration_techniques.Explorer(find=find,
avoid=avoid,
find_stash=find_stash,
avoid_stash=avoid_stash,
cfg=cfg,
num_find=num_find)
self.use_technique(tech)
out = self.run(stash=stash,
step_func=step_func,
n=n)
out.remove_tech(tech)
self.remove_tech(tech)
return out
def run(self, stash=None, n=None, step_func=None):
"""
Run until the path group has reached a completed state, according to
the current exploration techniques.
TODO: step_func doesn't work with veritesting, since veritesting replaces
the default step logic.
:param stash: Operate on this stash
:param n: Step at most this many times
:param step_func: If provided, should be a function that takes a PathGroup and returns a new PathGroup. Will
be called with the current PathGroup at every step.
:return: The resulting PathGroup.
:rtype: PathGroup
"""
if len(self._hooks_complete) == 0 and n is None:
l.warn("No completion state defined for path group; stepping until all paths deadend")
until_func = lambda pg: any(h(pg) for h in self._hooks_complete)
return self.step(n=n, step_func=step_func, until=until_func, stash=stash)
from .path_hierarchy import PathHierarchy
from .errors import PathUnreachableError, AngrError, AngrPathGroupError
from .path import Path
from . import exploration_techniques
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,471
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/analyses/dfg.py
|
import logging
from ..analysis import Analysis, register_analysis
from networkx import DiGraph
from copy import copy
l = logging.getLogger(name="angr.analyses.dfg")
class DFG(Analysis):
def __init__(self, cfg=None, annocfg=None):
"""
Build a Data Flow Grah (DFG) for every basic block of a CFG
The DFGs are available in the dict self.dfgs where the key
is a basic block addr and the value a DFG.
:param cfg: A CFG used to get all the basic blocks
:param annocfg: An AnnotatedCFG built from a backward slice used to only build the DFG on the whitelisted statements
"""
if cfg is None:
self._cfg = self.project.analyses.CFGAccurate()
else:
self._cfg = cfg
self._annocfg = annocfg
self.dfgs = self._construct()
def _need_to_ignore(self, addr, stmt, stmt_idx):
if self._annocfg is not None:
whitelist = self._annocfg.get_whitelisted_statements(addr)
if whitelist is False or (whitelist is not None and stmt_idx not in whitelist):
return True
if stmt.tag == 'Ist_IMark' or stmt.tag == 'Ist_AbiHint' or stmt.tag == 'Ist_Exit':
return True
elif stmt.tag == 'Ist_Put':
arch = self.project.arch
if stmt.offset in arch.register_names:
if stmt.offset == arch.ip_offset:
return True
return False
def _construct(self):
"""
We want to build the type of DFG that's used in "Automated Ident. of Crypto
Primitives in Binary Code with Data Flow Graph Isomorphisms." Unlike that
paper, however, we're building it on Vex IR instead of assembly instructions.
"""
cfg = self._cfg
p = self.project
dfgs = {}
l.debug("Building Vex DFG...")
for node in cfg.nodes():
try:
if node.simprocedure_name == None:
irsb = p.factory.block(node.addr).vex
else:
l.debug("Cannot process SimProcedures, ignoring %s" % node.simprocedure_name)
continue
except Exception as e:
l.debug(e)
continue
tmpsnodes = {}
storesnodes = {}
putsnodes = {}
statements = irsb.statements
dfg = DiGraph()
for stmt_idx, stmt in enumerate(statements):
# We want to skip over certain types, such as Imarks
if self._need_to_ignore(node.addr, stmt, stmt_idx):
continue
# break statement down into sub-expressions
exprs = stmt.expressions
stmt_node = stmt
dfg.add_node(stmt)
if stmt.tag == 'Ist_WrTmp':
tmpsnodes[stmt.tmp] = stmt_node
if exprs[0].tag == 'Iex_Binop':
if exprs[1].tag == 'Iex_RdTmp':
dfg.add_edge(tmpsnodes[exprs[1].tmp], stmt_node)
else:
dfg.add_edge(exprs[1], stmt_node)
if exprs[2].tag == 'Iex_RdTmp':
dfg.add_edge(tmpsnodes[exprs[2].tmp], stmt_node)
else:
dfg.add_edge(exprs[2], stmt_node)
elif exprs[0].tag == 'Iex_Unop':
dfg.remove_node(stmt_node)
if exprs[1].tag == 'Iex_RdTmp':
tmpsnodes[stmt.tmp] = copy(tmpsnodes[exprs[1].tmp])
tmpsnodes[stmt.tmp].tmp = stmt.tmp
else:
tmpsnodes[stmt.tmp] = exprs[1]
elif exprs[0].tag == 'Iex_RdTmp':
tmpsnodes[stmt.tmp] = copy(tmpsnodes[exprs[0].tmp])
tmpsnodes[stmt.tmp].tmp = stmt.tmp
elif exprs[0].tag == 'Iex_Get':
if putsnodes.has_key(exprs[0].offset):
dfg.add_edge(putsnodes[exprs[0].offset], stmt_node)
if len(exprs) > 1 and exprs[1].tag == "Iex_RdTmp":
dfg.add_edge(tmpsnodes[exprs[1].tmp], stmt_node)
elif len(exprs) > 1:
dfg.add_edge(exprs[1], stmt_node)
elif exprs[0].tag == 'Iex_Load':
if exprs[1].tag == 'Iex_RdTmp':
dfg.add_edge(tmpsnodes[exprs[1].tmp], stmt_node)
else:
dfg.add_edge(exprs[1], stmt_node)
else:
# Take a guess by assuming exprs[0] is the op and any other expressions are args
for e in exprs[1:]:
if e.tag == 'Iex_RdTmp':
dfg.add_edge(tmpsnodes[e.tmp], stmt_node)
else:
dfg.add_edge(e, stmt_node)
elif stmt.tag == 'Ist_Store':
if exprs[0].tag == 'Iex_RdTmp':
dfg.add_edge(tmpsnodes[exprs[0].tmp], stmt_node)
elif exprs[0].tag == 'Iex_Const':
dfg.add_edge(exprs[0], stmt_node)
if exprs[1].tag == 'Iex_RdTmp':
dfg.add_edge(tmpsnodes[exprs[1].tmp], stmt_node)
else:
dfg.add_edge(exprs[1], stmt_node)
elif stmt.tag == 'Ist_Put':
if exprs[0].tag == 'Iex_RdTmp':
dfg.add_edge(tmpsnodes[exprs[0].tmp], stmt_node)
elif exprs[0].tag == 'Iex_Const':
dfg.add_edge(exprs[0], stmt_node)
putsnodes[stmt.offset] = stmt_node
elif stmt.tag == 'Ist_Exit':
if exprs[0].tag == 'Iex_RdTmp':
dfg.add_edge(tmpsnodes[exprs[0].tmp], stmt_node)
elif stmt.tag == 'Ist_Dirty':
tmpsnodes[stmt.tmp] = stmt_node
elif stmt.tag == 'Ist_CAS':
tmpsnodes[stmt.oldLo] = stmt_node
else:
for e in stmt.expressions:
if e.tag == 'Iex_RdTmp':
dfg.add_edge(tmpsnodes[e.tmp], stmt_node)
else:
dfg.add_edge(e, stmt_node)
for vtx in dfg.nodes():
if dfg.degree(vtx) == 0:
dfg.remove_node(vtx)
if dfg.size() > 0:
dfgs[node.addr] = dfg
return dfgs
register_analysis(DFG, 'DFG')
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,472
|
MayfeelYang/angr
|
refs/heads/master
|
/angr/lifter.py
|
import sys
import logging
from cachetools import LRUCache
import pyvex
import simuvex
from archinfo import ArchARM
l = logging.getLogger("angr.lifter")
VEX_IRSB_MAX_SIZE = 400
VEX_IRSB_MAX_INST = 99
VEX_DEFAULT_OPT_LEVEL = 1
class Lifter(object):
"""
The lifter is the part of the factory that deals with the logic related to lifting blocks to IR.
It is complicated enough that it gets its own class!
Usually, the only way you'll ever have to interact with this class is that its `lift` method has
been transplanted into the factory as `project.factory.block`.
"""
LRUCACHE_SIZE = 10000
def __init__(self, project=None, arch=None, cache=False):
if project:
self._arch = project.arch
elif arch:
self._arch = arch
else:
self._arch = None
self._project = project
self._thumbable = isinstance(self._arch, ArchARM) if self._arch is not None else False
self._cache_enabled = cache
self._block_cache = LRUCache(maxsize=self.LRUCACHE_SIZE)
self._cache_hit_count = 0
self._cache_miss_count = 0
def clear_cache(self):
self._block_cache = LRUCache(maxsize=self.LRUCACHE_SIZE)
self._cache_hit_count = 0
self._cache_miss_count = 0
def _normalize_options(self, addr, arch, thumb):
"""
Given a subset of the arguments to lift or fresh_block, perform all the sanity checks
and normalize the form of the args
"""
if arch is None:
if self._arch is None:
raise AngrLifterError('"arch" must be specified')
thumbable = self._thumbable
arch = self._arch
else:
thumbable = isinstance(arch, ArchARM)
if thumbable and addr % 2 == 1:
thumb = True
elif not thumbable and thumb:
l.warning("Why did you pass in thumb=True on a non-ARM architecture")
thumb = False
if thumb:
addr &= ~1
return addr, arch, thumb
def fresh_block(self, addr, size, arch=None, insn_bytes=None, thumb=False):
"""
Returns a Block object with the specified size. No lifting will be performed.
:param int addr: Address at which to start the block.
:param int size: Size of the block.
:return: A Block instance.
:rtype: Block
"""
addr, arch, thumb = self._normalize_options(addr, arch, thumb)
if self._cache_enabled:
for opt_level in (0, 1):
cache_key = (addr, insn_bytes, size, None, thumb, opt_level)
if cache_key in self._block_cache:
return self._block_cache[cache_key]
if insn_bytes is None:
if self._project is None:
raise AngrLifterError("Lifter does not have an associated angr Project. "
"You must specify \"insn_bytes\".")
insn_bytes, size = self._load_bytes(addr, size, None)
if thumb:
addr += 1
b = Block(insn_bytes, arch=arch, addr=addr, size=size, thumb=thumb)
if self._cache_enabled:
self._block_cache[cache_key] = b
return b
def lift(self, addr, arch=None, insn_bytes=None, max_size=None, num_inst=None,
traceflags=0, thumb=False, backup_state=None, opt_level=None):
"""
Returns a pyvex block starting at address `addr`.
:param addr: The address at which to start the block.
The following parameters are optional:
:param thumb: Whether the block should be lifted in ARM's THUMB mode.
:param backup_state: A state to read bytes from instead of using project memory.
:param opt_level: The VEX optimization level to use.
:param insn_bytes: A string of bytes to use for the block instead of the project.
:param max_size: The maximum size of the block, in bytes.
:param num_inst: The maximum number of instructions.
:param traceflags: traceflags to be passed to VEX. (default: 0)
"""
passed_max_size = max_size is not None
passed_num_inst = num_inst is not None
max_size = VEX_IRSB_MAX_SIZE if max_size is None else max_size
num_inst = VEX_IRSB_MAX_INST if num_inst is None else num_inst
opt_level = VEX_DEFAULT_OPT_LEVEL if opt_level is None else opt_level
addr, arch, thumb = self._normalize_options(addr, arch, thumb)
cache_key = (addr, insn_bytes, max_size, num_inst, thumb, opt_level)
if self._cache_enabled and cache_key in self._block_cache and self._block_cache[cache_key].vex is not None:
self._cache_hit_count += 1
return self._block_cache[cache_key]
else:
self._cache_miss_count += 1
if insn_bytes is not None:
buff, size = insn_bytes, len(insn_bytes)
passed_max_size = True
else:
if self._project is None:
raise AngrLifterError("Lifter does not have an associated angr Project. "
"You must specify \"insn_bytes\".")
buff, size = self._load_bytes(addr, max_size, state=backup_state)
if not buff or size == 0:
raise AngrMemoryError("No bytes in memory for block starting at %#x." % addr)
# deal with thumb mode in ARM, sending an odd address and an offset
# into the string
byte_offset = 0
real_addr = addr
if thumb:
byte_offset = 1
addr += 1
l.debug("Creating pyvex.IRSB of arch %s at %#x", arch.name, addr)
pyvex.set_iropt_level(opt_level)
try:
if passed_max_size and not passed_num_inst:
irsb = pyvex.IRSB(buff, addr, arch,
num_bytes=size,
bytes_offset=byte_offset,
traceflags=traceflags)
elif not passed_max_size and passed_num_inst:
irsb = pyvex.IRSB(buff, addr, arch,
num_bytes=VEX_IRSB_MAX_SIZE,
num_inst=num_inst,
bytes_offset=byte_offset,
traceflags=traceflags)
elif passed_max_size and passed_num_inst:
irsb = pyvex.IRSB(buff, addr, arch,
num_bytes=size,
num_inst=num_inst,
bytes_offset=byte_offset,
traceflags=traceflags)
else:
irsb = pyvex.IRSB(buff, addr, arch,
num_bytes=size,
bytes_offset=byte_offset,
traceflags=traceflags)
except pyvex.PyVEXError:
l.debug("VEX translation error at %#x", addr)
if isinstance(buff, str):
l.debug('Using bytes: ' + buff)
else:
l.debug("Using bytes: " + str(pyvex.ffi.buffer(buff, size)).encode('hex'))
e_type, value, traceback = sys.exc_info()
raise AngrTranslationError, ("Translation error", e_type, value), traceback
if insn_bytes is None and self._project is not None:
for stmt in irsb.statements:
if stmt.tag != 'Ist_IMark' or stmt.addr == real_addr:
continue
if self._project.is_hooked(stmt.addr):
size = stmt.addr - real_addr
irsb = pyvex.IRSB(buff, addr, arch,
num_bytes=size,
bytes_offset=byte_offset,
traceflags=traceflags)
break
irsb = self._post_process(irsb, arch)
b = Block(buff, arch=arch, addr=addr, vex=irsb, thumb=thumb)
if self._cache_enabled:
self._block_cache[cache_key] = b
return b
def _load_bytes(self, addr, max_size, state=None):
buff, size = "", 0
if self._project._support_selfmodifying_code and state:
buff, size = self._bytes_from_state(state, addr, max_size)
else:
try:
buff, size = self._project.loader.memory.read_bytes_c(addr)
except KeyError:
if state:
buff, size = self._bytes_from_state(state, addr, max_size)
size = min(max_size, size)
return buff, size
@staticmethod
def _bytes_from_state(backup_state, addr, max_size):
arr = []
for i in range(addr, addr + max_size):
if i in backup_state.memory:
val = backup_state.memory.load(i, 1, inspect=False)
try:
val = backup_state.se.exactly_n_int(val, 1)[0]
val = chr(val)
except simuvex.SimValueError:
break
arr.append(val)
else:
break
buff = "".join(arr)
size = len(buff)
return buff, size
def _post_process(self, block, arch):
"""
Do some post-processing work here.
:param block:
:return:
"""
block.statements = [x for x in block.statements if x.tag != 'Ist_NoOp']
funcname = "_post_process_%s" % arch.name
if hasattr(self, funcname):
block = getattr(self, funcname)(block, arch)
return block
@staticmethod
def _post_process_ARM(block, arch):
# Jumpkind
if block.jumpkind == "Ijk_Boring":
# If PC is moved to LR, then this should be an Ijk_Call
#
# Example:
# MOV LR, PC
# MOV PC, R8
stmts = block.statements
lr_store_id = None
inst_ctr = 1
for i, stmt in reversed(list(enumerate(stmts))):
if isinstance(stmt, pyvex.IRStmt.Put):
if stmt.offset == arch.registers['lr'][0]:
lr_store_id = i
break
if isinstance(stmt, pyvex.IRStmt.IMark):
inst_ctr += 1
if lr_store_id is not None and inst_ctr == 2:
block.jumpkind = "Ijk_Call"
return block
_post_process_ARMEL = _post_process_ARM
_post_process_ARMHF = _post_process_ARM
@staticmethod
def _post_process_MIPS32(block, arch): #pylint:disable=unused-argument
# Handle unconditional branches
# `beq $zero, $zero, xxxx`
# It is translated to
#
# 15 | ------ IMark(0x401684, 4, 0) ------
# 16 | t0 = CmpEQ32(0x00000000, 0x00000000)
# 17 | PUT(128) = 0x00401688
# 18 | ------ IMark(0x401688, 4, 0) ------
# 19 | if (t0) goto {Ijk_Boring} 0x401684
# 20 | PUT(128) = 0x0040168c
# 21 | t4 = GET:I32(128)
# NEXT: PUT(128) = t4; Ijk_Boring
#
stmts = block.statements
tmp_exit = None
exit_stmt_idx = None
dst = None
for i, stmt in reversed(list(enumerate(stmts))):
if tmp_exit is None:
# Looking for the Exit statement
if isinstance(stmt, pyvex.IRStmt.Exit) and \
isinstance(stmt.guard, pyvex.IRExpr.RdTmp):
tmp_exit = stmt.guard.tmp
dst = stmt.dst
exit_stmt_idx = i
else:
# Looking for the WrTmp statement
if isinstance(stmt, pyvex.IRStmt.WrTmp) and \
stmt.tmp == tmp_exit:
if isinstance(stmt.data, pyvex.IRExpr.Binop) and \
stmt.data.op == 'Iop_CmpEQ32' and \
isinstance(stmt.data.child_expressions[0], pyvex.IRExpr.Const) and \
isinstance(stmt.data.child_expressions[1], pyvex.IRExpr.Const) and \
stmt.data.child_expressions[0].con.value == stmt.data.child_expressions[
1].con.value:
# Create a new IRConst
irconst = pyvex.IRExpr.Const.__new__() # XXX: does this work???
irconst.con = dst
irconst.is_atomic = True
irconst.result_type = dst.type
irconst.tag = 'Iex_Const'
block.statements = block.statements[: exit_stmt_idx] + block.statements[exit_stmt_idx + 1:]
# Replace the default exit!
block.next = irconst
else:
break
return block
@staticmethod
def _find_source(statements, put_stmt_id):
'''
Execute the statements backwards and figure out where the value comes from
This is not a slicer. It only take care of a small portion of statement types.
:param statements:
:param put_stmt_id:
:return:
'''
temps = set()
src_stmt_ids = set()
if not isinstance(statements[put_stmt_id], pyvex.IRStmt.Put):
return None
if not isinstance(statements[put_stmt_id].data, pyvex.IRExpr.RdTmp):
return None
temps.add(statements[put_stmt_id].data.tmp)
for i in xrange(put_stmt_id, -1, -1):
stmt = statements[i]
if isinstance(stmt, pyvex.IRStmt.WrTmp):
data = None
if stmt.tmp in temps:
data = stmt.data
if isinstance(data, pyvex.IRExpr.RdTmp):
temps.add(data.tmp)
elif isinstance(data, pyvex.IRExpr.Get):
src_stmt_ids.add(i)
temps.remove(stmt.tmp)
return src_stmt_ids
class Block(object):
BLOCK_MAX_SIZE = 4096
__slots__ = ['bytes', '_vex', '_thumb', '_arch', '_capstone', 'addr', 'size', 'instructions', 'instruction_addrs']
def __init__(self, byte_string, arch, addr=None, size=None, vex=None, thumb=None):
self._vex = vex
self._thumb = thumb
self._arch = arch
self._capstone = None
self.addr = addr
self.size = size
self.instructions = None
self.instruction_addrs = []
self._parse_vex_info()
if self.addr is None:
l.warning('Lifted basic block with no IMarks!')
self.addr = 0
if type(byte_string) is str:
if self.size is not None:
self.bytes = byte_string[:self.size]
else:
self.bytes = byte_string
else:
# Convert bytestring to a str
if self.size is not None:
self.bytes = str(pyvex.ffi.buffer(byte_string, self.size))
else:
l.warning("Block size is unknown. Truncate it to BLOCK_MAX_SIZE")
self.bytes = str(pyvex.ffi.buffer(byte_string), Block.BLOCK_MAX_SIZE)
def _parse_vex_info(self):
vex = self._vex
if vex is not None:
self.instructions = vex.instructions
if self._arch is None:
self._arch = vex.arch
if self.size is None:
self.size = vex.size
for stmt in vex.statements:
if stmt.tag != 'Ist_IMark':
continue
if self.addr is None:
self.addr = stmt.addr + stmt.delta
self.instruction_addrs.append(stmt.addr + stmt.delta)
def __repr__(self):
return '<Block for %#x, %d bytes>' % (self.addr, self.size)
def __getstate__(self):
return dict((k, getattr(self, k)) for k in self.__slots__ if k not in ('_capstone', ))
def __setstate__(self, data):
for k, v in data.iteritems():
setattr(self, k, v)
def __hash__(self):
return hash((type(self), self.addr, self.bytes))
def __eq__(self, other):
return type(self) is type(other) and \
self.addr == other.addr and \
self.bytes == other.bytes
def __ne__(self, other):
return not self == other
def pp(self):
return self.capstone.pp()
@property
def vex(self):
if not self._vex:
offset = 1 if self._thumb else 0
self._vex = pyvex.IRSB(self.bytes, self.addr, self._arch, bytes_offset=offset)
self._parse_vex_info()
return self._vex
@property
def capstone(self):
if self._capstone: return self._capstone
cs = self._arch.capstone if not self._thumb else self._arch.capstone_thumb
insns = []
for cs_insn in cs.disasm(self.bytes, self.addr):
insns.append(CapstoneInsn(cs_insn))
block = CapstoneBlock(self.addr, insns, self._thumb, self._arch)
self._capstone = block
return block
@property
def codenode(self):
return BlockNode(self.addr, self.size, bytestr=self.bytes)
class CapstoneBlock(object):
__slots__ = [ 'addr', 'insns', 'thumb', 'arch' ]
def __init__(self, addr, insns, thumb, arch):
self.addr = addr
self.insns = insns
self.thumb = thumb
self.arch = arch
def pp(self):
print str(self)
def __str__(self):
return '\n'.join(map(str, self.insns))
def __repr__(self):
return '<CapstoneBlock for %#x>' % self.addr
class CapstoneInsn(object):
def __init__(self, capstone_insn):
self.insn = capstone_insn
def __getattr__(self, item):
if item in ('__str__', '__repr__'):
return self.__getattribute__(item)
if hasattr(self.insn, item):
return getattr(self.insn, item)
raise AttributeError()
def __str__(self):
return "%#x:\t%s\t%s" % (self.address, self.mnemonic, self.op_str)
def __repr__(self):
return '<CapstoneInsn "%s" for %#x>' % (self.mnemonic, self.address)
from .errors import AngrMemoryError, AngrTranslationError, AngrLifterError
from .knowledge.codenode import BlockNode
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
38,473
|
MayfeelYang/angr
|
refs/heads/master
|
/tests/test_cle_gdb.py
|
import angr
import os
import nose
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)),
'../../binaries/tests'))
binpath = os.path.join(test_location, "x86_64/test_gdb_plugin")
def check_addrs(p):
libc = p.loader.shared_objects['libc.so.6']
ld = p.loader.shared_objects['ld-linux-x86-64.so.2']
nose.tools.assert_equal(libc.rebase_addr, 0x7ffff7a17000)
nose.tools.assert_equal(ld.rebase_addr, 0x7ffff7ddc000)
def test_cle_gdb():
"""
Test for `info proc mappings`
"""
mappath = os.path.join(test_location, "../test_data/test_gdb_plugin/procmap")
p = angr.Project(binpath, load_options={"gdb_map":mappath})
check_addrs(p)
def test_sharedlibs():
"""
Test for info sharedlibrary
"""
mappath = os.path.join(test_location, "../test_data/test_gdb_plugin/info_sharedlibs")
p = angr.Project(binpath, load_options={"gdb_map":mappath, "gdb_fix":True})
check_addrs(p)
if __name__ == "__main__":
test_cle_gdb()
test_sharedlibs()
|
{"/angr/surveyors/caller.py": ["/angr/surveyors/explorer.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/test_argv.py": ["/angr/__init__.py"], "/angr/path.py": ["/angr/errors.py", "/angr/path_history.py"], "/angr/simos.py": ["/angr/errors.py", "/angr/tablespecs.py"], "/tests/test_block_cache.py": ["/angr/__init__.py"], "/tests/test_signed_div.py": ["/angr/__init__.py"], "/angr/knowledge_base.py": ["/angr/knowledge/data.py"], "/angr/surveyors/executor.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_self_modifying_code.py": ["/angr/__init__.py"], "/tests/test_hook.py": ["/angr/__init__.py"], "/angr/surveyors/slicecutor.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/tests/broken_variableseekr.py": ["/angr/__init__.py"], "/tests/test_argc_sym.py": ["/angr/__init__.py"], "/tests/test_cfg_path.py": ["/angr/__init__.py"], "/angr/analyses/veritesting.py": ["/angr/errors.py", "/angr/analysis.py", "/angr/path_group.py", "/angr/path.py"], "/angr/analyses/congruency_check.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/knowledge/__init__.py": ["/angr/knowledge/data.py"], "/tests/test_scanf.py": ["/angr/__init__.py"], "/tests/test_vfg_path.py": ["/angr/__init__.py"], "/tests/test_serialization.py": ["/angr/__init__.py"], "/tests/test_mem_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/__init__.py": ["/angr/exploration_techniques/explorer.py", "/angr/exploration_techniques/dfs.py", "/angr/exploration_techniques/veritesting.py", "/angr/exploration_techniques/oppologist.py", "/angr/errors.py"], "/angr/surveyors/__init__.py": ["/angr/surveyors/explorer.py", "/angr/surveyors/executor.py", "/angr/surveyors/escaper.py", "/angr/surveyors/slicecutor.py", "/angr/surveyors/caller.py"], "/tests/test_static_hooker.py": ["/angr/__init__.py"], "/tests/test_cfgfast.py": ["/angr/__init__.py"], "/tests/test_str_funcs.py": ["/angr/__init__.py"], "/angr/exploration_techniques/oppologist.py": ["/angr/errors.py", "/angr/exploration_techniques/__init__.py"], "/tests/test_argc.py": ["/angr/__init__.py"], "/angr/analyses/cdg.py": ["/angr/analysis.py"], "/angr/surveyors/escaper.py": ["/angr/surveyor.py", "/angr/surveyors/__init__.py"], "/tests/test_checkbyte.py": ["/angr/__init__.py"], "/angr/analyses/static_hooker.py": ["/angr/analysis.py", "/angr/errors.py"], "/angr/analyses/forward_analysis.py": ["/angr/errors.py"], "/angr/analyses/__init__.py": ["/angr/analyses/cdg.py", "/angr/analyses/ddg.py", "/angr/analyses/girlscout.py", "/angr/analyses/veritesting.py", "/angr/analyses/dfg.py", "/angr/analyses/congruency_check.py", "/angr/analyses/static_hooker.py"], "/angr/exploration_techniques/dfs.py": ["/angr/exploration_techniques/__init__.py"], "/angr/__init__.py": ["/angr/project.py", "/angr/regmap.py", "/angr/path.py", "/angr/errors.py", "/angr/surveyor.py", "/angr/analyses/__init__.py", "/angr/analysis.py", "/angr/tablespecs.py", "/angr/simos.py", "/angr/path_group.py", "/angr/surveyors/caller.py", "/angr/log.py"], "/tests/test_echo.py": ["/angr/__init__.py"], "/tests/test_explorer.py": ["/angr/__init__.py"], "/angr/exploration_techniques/veritesting.py": ["/angr/exploration_techniques/__init__.py"], "/angr/path_group.py": ["/angr/errors.py", "/angr/path.py", "/angr/__init__.py"], "/angr/analyses/dfg.py": ["/angr/analysis.py"], "/tests/test_cle_gdb.py": ["/angr/__init__.py"], "/angr/surveyor.py": ["/angr/errors.py", "/angr/path.py", "/angr/surveyors/__init__.py"], "/angr/surveyors/explorer.py": ["/angr/surveyor.py", "/angr/errors.py", "/angr/surveyors/__init__.py"], "/angr/analysis.py": ["/angr/errors.py"], "/angr/factory.py": ["/angr/surveyors/caller.py", "/angr/lifter.py", "/angr/errors.py", "/angr/path.py", "/angr/path_group.py", "/angr/knowledge/__init__.py"], "/tests/test_strtol.py": ["/angr/__init__.py"], "/angr/exploration_techniques/explorer.py": ["/angr/exploration_techniques/__init__.py"], "/tests/test_veritesting.py": ["/angr/__init__.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.