File size: 2,142 Bytes
391f8df
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
"""
处理参数,如参数翻转、参数替换等等
"""
import os
import torch
import random
import struct
import pandas as pd
from bitstring import BitArray
from concurrent.futures import ThreadPoolExecutor
import numpy as np
import math


def split_file(file_path, chunk_size=8):
    """
    分割源文件成为元素BitArray的list
    :param file_path: 源文件路径
    :param chunk_size: 分割粒度 Bit
    :return: 返回一个元素BitArray的list
    """
    # 以bit的形式读取文件
    bit_data = BitArray(filename = file_path)
    chunks = [bit_data[i:i+chunk_size] for i in range(0, len(bit_data), chunk_size)]
    return chunks


def merge_file(output_file, chunks):
    """
    将BitArray的list合并成一个文件
    :param output_file: 目标文件路径
    :param chunks: BitArray的list
    :return: 合并后的文件
    """
    merge_data = BitArray()
    for chunk in chunks:
        merge_data.append(chunk)

    with open(output_file, 'wb') as file:
        merge_data.tofile(file)



def layer_low_n_bit_fLip(initParaPath, flipParaPath, bit_n, *layers):
    """
    翻转pth的layers层fa的低n bit
    :param initParaPath: 原始参数pth
    :param flipParaPath: 翻转之后的参数pth
    :param bit_n: 翻转低多少bit
    :return: void
    """
    para = torch.load(initParaPath)

    for layer in layers:  # layers数组中的所有layer
        if len(para[layer].data.shape) < 1:
            continue  # 单值除去
        # print(layer, type(layer))
        layerTensor = para[layer].data
        # print(layerTensor.shape)
        layerTensor_initView = layerTensor.view(torch.int32)
        # print(format(layerTensor_initView[0][0][0][0], '032b'), layerTensor[0][0][0][0])
        layerTensor_embedded_int = layerTensor_initView ^ bit_n
        layerTensor_embedded = layerTensor_embedded_int.view(torch.float32)
        # print(format(layerTensor_embedded_int[0][0][0][0], '032b'), layerTensor_embedded[0][0][0][0])

        para[layer].data = layerTensor_embedded

    torch.save(para, flipParaPath)
    return








if __name__ == "__main__":
    
    print("Test Done")